idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
16,200
def _partial_precolor ( G , chi_ub ) : v = next ( iter ( G ) ) clique = [ v ] for u in G [ v ] : if all ( w in G [ u ] for w in clique ) : clique . append ( u ) partial_coloring = { v : c for c , v in enumerate ( clique ) } chi_lb = len ( partial_coloring ) possible_colors = { v : set ( range ( chi_ub ) ) for v in G if v not in partial_coloring } for v , color in iteritems ( partial_coloring ) : for u in G [ v ] : if u in possible_colors : possible_colors [ u ] . discard ( color ) return partial_coloring , possible_colors , chi_lb
In order to reduce the number of variables in the QUBO we want to color as many nodes as possible without affecting the min vertex coloring . Without loss of generality we can choose a single maximal clique and color each node in it uniquely .
16,201
def is_cycle ( G ) : trailing , leading = next ( iter ( G . edges ) ) start_node = trailing n_visited = 1 while leading != start_node : neighbors = G [ leading ] if len ( neighbors ) != 2 : return False node1 , node2 = neighbors if node1 == trailing : trailing , leading = leading , node2 else : trailing , leading = leading , node1 n_visited += 1 return n_visited == len ( G )
Determines whether the given graph is a cycle or circle graph .
16,202
def is_vertex_coloring ( G , coloring ) : return all ( coloring [ u ] != coloring [ v ] for u , v in G . edges )
Determines whether the given coloring is a vertex coloring of graph G .
16,203
def maximal_matching ( G , sampler = None , ** sampler_args ) : delta = max ( G . degree ( node ) for node in G ) A = 1. if delta == 2 : B = .75 else : B = .75 * A / ( delta - 2. ) edge_mapping = _edge_mapping ( G ) Q = _maximal_matching_qubo ( G , edge_mapping , magnitude = B ) Qm = _matching_qubo ( G , edge_mapping , magnitude = A ) for edge , bias in Qm . items ( ) : if edge not in Q : Q [ edge ] = bias else : Q [ edge ] += bias response = sampler . sample_qubo ( Q , ** sampler_args ) sample = next ( iter ( response ) ) return set ( edge for edge in G . edges if sample [ edge_mapping [ edge ] ] > 0 )
Finds an approximate maximal matching .
16,204
def is_maximal_matching ( G , matching ) : touched_nodes = set ( ) . union ( * matching ) if len ( touched_nodes ) != len ( matching ) * 2 : return False for ( u , v ) in G . edges : if u not in touched_nodes and v not in touched_nodes : return False return True
Determines whether the given set of edges is a maximal matching .
16,205
def _maximal_matching_qubo ( G , edge_mapping , magnitude = 1. ) : Q = { } for ( u , v ) in G . edges : for edge in G . edges ( u ) : x = edge_mapping [ edge ] if ( x , x ) not in Q : Q [ ( x , x ) ] = - 1 * magnitude else : Q [ ( x , x ) ] -= magnitude for edge in G . edges ( v ) : x = edge_mapping [ edge ] if ( x , x ) not in Q : Q [ ( x , x ) ] = - 1 * magnitude else : Q [ ( x , x ) ] -= magnitude for e0 in G . edges ( v ) : x0 = edge_mapping [ e0 ] for e1 in G . edges ( u ) : x1 = edge_mapping [ e1 ] if x0 < x1 : if ( x0 , x1 ) not in Q : Q [ ( x0 , x1 ) ] = magnitude else : Q [ ( x0 , x1 ) ] += magnitude else : if ( x1 , x0 ) not in Q : Q [ ( x1 , x0 ) ] = magnitude else : Q [ ( x1 , x0 ) ] += magnitude return Q
Generates a QUBO that when combined with one as generated by _matching_qubo induces a maximal matching on the given graph G . The variables in the QUBO are the edges as given my edge_mapping .
16,206
def _matching_qubo ( G , edge_mapping , magnitude = 1. ) : Q = { } for node in G : for edge0 , edge1 in itertools . combinations ( G . edges ( node ) , 2 ) : v0 = edge_mapping [ edge0 ] v1 = edge_mapping [ edge1 ] Q [ ( v0 , v1 ) ] = magnitude return Q
Generates a QUBO that induces a matching on the given graph G . The variables in the QUBO are the edges as given my edge_mapping .
16,207
def canonical_chimera_labeling ( G , t = None ) : adj = G . adj if t is None : if hasattr ( G , 'edges' ) : num_edges = len ( G . edges ) else : num_edges = len ( G . quadratic ) t = _chimera_shore_size ( adj , num_edges ) chimera_indices = { } row = col = 0 root = min ( adj , key = lambda v : len ( adj [ v ] ) ) horiz , verti = rooted_tile ( adj , root , t ) while len ( chimera_indices ) < len ( adj ) : new_indices = { } if row == 0 : for si , v in enumerate ( horiz ) : new_indices [ v ] = ( row , col , 0 , si ) else : for v in horiz : north = [ u for u in adj [ v ] if u in chimera_indices ] assert len ( north ) == 1 i , j , u , si = chimera_indices [ north [ 0 ] ] assert i == row - 1 and j == col and u == 0 new_indices [ v ] = ( row , col , 0 , si ) if col == 0 : for si , v in enumerate ( verti ) : new_indices [ v ] = ( row , col , 1 , si ) else : for v in verti : east = [ u for u in adj [ v ] if u in chimera_indices ] assert len ( east ) == 1 i , j , u , si = chimera_indices [ east [ 0 ] ] assert i == row and j == col - 1 and u == 1 new_indices [ v ] = ( row , col , 1 , si ) chimera_indices . update ( new_indices ) root_neighbours = [ v for v in adj [ root ] if v not in chimera_indices ] if len ( root_neighbours ) == 1 : root = root_neighbours [ 0 ] horiz , verti = rooted_tile ( adj , root , t ) row += 1 else : assert not root_neighbours vert_root = [ v for v in chimera_indices if chimera_indices [ v ] == ( 0 , col , 1 , 0 ) ] [ 0 ] vert_root_neighbours = [ v for v in adj [ vert_root ] if v not in chimera_indices ] if vert_root_neighbours : verti , horiz = rooted_tile ( adj , vert_root_neighbours [ 0 ] , t ) root = next ( iter ( horiz ) ) row = 0 col += 1 return chimera_indices
Returns a mapping from the labels of G to chimera - indexed labeling .
16,208
def maximum_weighted_independent_set ( G , weight = None , sampler = None , lagrange = 2.0 , ** sampler_args ) : Q = maximum_weighted_independent_set_qubo ( G , weight , lagrange ) response = sampler . sample_qubo ( Q , ** sampler_args ) sample = next ( iter ( response ) ) return [ node for node in sample if sample [ node ] > 0 ]
Returns an approximate maximum weighted independent set .
16,209
def maximum_independent_set ( G , sampler = None , lagrange = 2.0 , ** sampler_args ) : return maximum_weighted_independent_set ( G , None , sampler , lagrange , ** sampler_args )
Returns an approximate maximum independent set .
16,210
def maximum_weighted_independent_set_qubo ( G , weight = None , lagrange = 2.0 ) : if not G : return { } cost = dict ( G . nodes ( data = weight , default = 1 ) ) scale = max ( cost . values ( ) ) Q = { ( node , node ) : min ( - cost [ node ] / scale , 0.0 ) for node in G } Q . update ( { edge : lagrange for edge in G . edges } ) return Q
Return the QUBO with ground states corresponding to a maximum weighted independent set .
16,211
def min_weighted_vertex_cover ( G , weight = None , sampler = None , ** sampler_args ) : indep_nodes = set ( maximum_weighted_independent_set ( G , weight , sampler , ** sampler_args ) ) return [ v for v in G if v not in indep_nodes ]
Returns an approximate minimum weighted vertex cover .
16,212
def is_vertex_cover ( G , vertex_cover ) : cover = set ( vertex_cover ) return all ( u in cover or v in cover for u , v in G . edges )
Determines whether the given set of vertices is a vertex cover of graph G .
16,213
def pegasus_layout ( G , scale = 1. , center = None , dim = 2 , crosses = False ) : if not isinstance ( G , nx . Graph ) or G . graph . get ( "family" ) != "pegasus" : raise ValueError ( "G must be generated by dwave_networkx.pegasus_graph" ) if G . graph . get ( 'labels' ) == 'nice' : m = 3 * ( G . graph [ 'rows' ] - 1 ) c_coords = chimera_node_placer_2d ( m , m , 4 , scale = scale , center = center , dim = dim ) def xy_coords ( t , y , x , u , k ) : return c_coords ( 3 * y + 2 - t , 3 * x + t , u , k ) pos = { v : xy_coords ( * v ) for v in G . nodes ( ) } else : xy_coords = pegasus_node_placer_2d ( G , scale , center , dim , crosses = crosses ) if G . graph . get ( 'labels' ) == 'coordinate' : pos = { v : xy_coords ( * v ) for v in G . nodes ( ) } elif G . graph . get ( 'data' ) : pos = { v : xy_coords ( * dat [ 'pegasus_index' ] ) for v , dat in G . nodes ( data = True ) } else : m = G . graph . get ( 'rows' ) coord = pegasus_coordinates ( m ) pos = { v : xy_coords ( * coord . tuple ( v ) ) for v in G . nodes ( ) } return pos
Positions the nodes of graph G in a Pegasus topology .
16,214
def pegasus_node_placer_2d ( G , scale = 1. , center = None , dim = 2 , crosses = False ) : import numpy as np m = G . graph . get ( 'rows' ) h_offsets = G . graph . get ( "horizontal_offsets" ) v_offsets = G . graph . get ( "vertical_offsets" ) tile_width = G . graph . get ( "tile" ) tile_center = tile_width / 2 - .5 scale /= m * tile_width if center is None : center = np . zeros ( dim ) else : center = np . asarray ( center ) paddims = dim - 2 if paddims < 0 : raise ValueError ( "layout must have at least two dimensions" ) if len ( center ) != dim : raise ValueError ( "length of center coordinates must match dimension of layout" ) if crosses : cross_shift = 2. else : cross_shift = 0. def _xy_coords ( u , w , k , z ) : if k % 2 : p = - .1 else : p = .1 if u : xy = np . array ( [ z * tile_width + h_offsets [ k ] + tile_center , - tile_width * w - k - p + cross_shift ] ) else : xy = np . array ( [ tile_width * w + k + p + cross_shift , - z * tile_width - v_offsets [ k ] - tile_center ] ) return np . hstack ( ( xy * scale , np . zeros ( paddims ) ) ) + center return _xy_coords
Generates a function that converts Pegasus indices to x y coordinates for a plot .
16,215
def draw_pegasus ( G , crosses = False , ** kwargs ) : draw_qubit_graph ( G , pegasus_layout ( G , crosses = crosses ) , ** kwargs )
Draws graph G in a Pegasus topology .
16,216
def draw_pegasus_embedding ( G , * args , ** kwargs ) : crosses = kwargs . pop ( "crosses" , False ) draw_embedding ( G , pegasus_layout ( G , crosses = crosses ) , * args , ** kwargs )
Draws an embedding onto the pegasus graph G according to layout .
16,217
def binary_quadratic_model_sampler ( which_args ) : @ decorator def _binary_quadratic_model_sampler ( f , * args , ** kw ) : if isinstance ( which_args , int ) : iter_args = ( which_args , ) else : iter_args = iter ( which_args ) new_args = [ arg for arg in args ] for idx in iter_args : sampler = args [ idx ] if sampler is None : default_sampler = dnx . get_default_sampler ( ) if default_sampler is None : raise dnx . DWaveNetworkXMissingSampler ( 'no default sampler set' ) new_args [ idx ] = default_sampler continue if not hasattr ( sampler , "sample_qubo" ) or not callable ( sampler . sample_qubo ) : raise TypeError ( "expected sampler to have a 'sample_qubo' method" ) if not hasattr ( sampler , "sample_ising" ) or not callable ( sampler . sample_ising ) : raise TypeError ( "expected sampler to have a 'sample_ising' method" ) return f ( * new_args , ** kw ) return _binary_quadratic_model_sampler
Decorator to validate sampler arguments .
16,218
def duration_to_number ( duration , units = 'seconds' ) : if isinstance ( duration , ( int , float , long ) ) : return duration elif isinstance ( duration , ( datetime . timedelta , ) ) : if units == 'seconds' : return duration . total_seconds ( ) else : msg = 'unit "%s" is not supported' % units raise NotImplementedError ( msg ) elif duration == inf or duration == - inf : msg = "Can't convert infinite duration to number" raise ValueError ( msg ) else : msg = 'duration is an unknown type (%s)' % duration raise TypeError ( msg )
If duration is already a numeric type then just return duration . If duration is a timedelta return a duration in seconds .
16,219
def convert_args_to_list ( args ) : list_of_pairs = [ ] if len ( args ) == 0 : return [ ] if any ( isinstance ( arg , ( list , tuple ) ) for arg in args ) : if len ( args ) == 1 and any ( isinstance ( arg , ( list , tuple ) ) for arg in args [ 0 ] ) : for item in args [ 0 ] : list_of_pairs . append ( list ( item ) ) else : for item in args : list_of_pairs . append ( list ( item ) ) else : if len ( args ) == 2 : list_of_pairs . append ( list ( args ) ) else : msg = "The argument type is invalid. " . format ( args ) raise TypeError ( msg ) return list_of_pairs
Convert all iterable pairs of inputs into a list of list
16,220
def variance ( self ) : _self = self . _discard_value ( None ) if not _self . total ( ) : return 0.0 mean = _self . mean ( ) weighted_central_moment = sum ( count * ( value - mean ) ** 2 for value , count in iteritems ( _self ) ) return weighted_central_moment / float ( _self . total ( ) )
Variance of the distribution .
16,221
def normalized ( self ) : total = self . total ( ) result = Histogram ( ) for value , count in iteritems ( self ) : try : result [ value ] = count / float ( total ) except UnorderableElements as e : result = Histogram . from_dict ( dict ( result ) , key = hash ) result [ value ] = count / float ( total ) return result
Return a normalized version of the histogram where the values sum to one .
16,222
def _quantile_function ( self , alpha = 0.5 , smallest_count = None ) : total = float ( self . total ( ) ) smallest_observed_count = min ( itervalues ( self ) ) if smallest_count is None : smallest_count = smallest_observed_count else : smallest_count = min ( smallest_count , smallest_observed_count ) beta = alpha * smallest_count debug_plot = [ ] cumulative_sum = 0.0 inverse = sortedcontainers . SortedDict ( ) for value , count in iteritems ( self ) : debug_plot . append ( ( cumulative_sum / total , value ) ) inverse [ ( cumulative_sum + beta ) / total ] = value cumulative_sum += count inverse [ ( cumulative_sum - beta ) / total ] = value debug_plot . append ( ( cumulative_sum / total , value ) ) q_min = inverse . iloc [ 0 ] q_max = inverse . iloc [ - 1 ] def function ( q ) : if q < 0.0 or q > 1.0 : msg = 'invalid quantile %s, need `0 <= q <= 1`' % q raise ValueError ( msg ) elif q < q_min : q = q_min elif q > q_max : q = q_max if beta > 0 : if q in inverse : result = inverse [ q ] else : previous_index = inverse . bisect_left ( q ) - 1 x1 = inverse . iloc [ previous_index ] x2 = inverse . iloc [ previous_index + 1 ] y1 = inverse [ x1 ] y2 = inverse [ x2 ] result = ( y2 - y1 ) * ( q - x1 ) / float ( x2 - x1 ) + y1 else : if q in inverse : previous_index = inverse . bisect_left ( q ) - 1 x1 = inverse . iloc [ previous_index ] x2 = inverse . iloc [ previous_index + 1 ] y1 = inverse [ x1 ] y2 = inverse [ x2 ] result = 0.5 * ( y1 + y2 ) else : previous_index = inverse . bisect_left ( q ) - 1 x1 = inverse . iloc [ previous_index ] result = inverse [ x1 ] return float ( result ) return function
Return a function that returns the quantile values for this histogram .
16,223
def get ( self , time , interpolate = 'previous' ) : try : getter = self . getter_functions [ interpolate ] except KeyError : msg = ( "unknown value '{}' for interpolate, " "valid values are in [{}]" ) . format ( interpolate , ', ' . join ( self . getter_functions ) ) raise ValueError ( msg ) else : return getter ( time )
Get the value of the time series even in - between measured values .
16,224
def set ( self , time , value , compact = False ) : if ( len ( self ) == 0 ) or ( not compact ) or ( compact and self . get ( time ) != value ) : self . _d [ time ] = value
Set the value for the time series . If compact is True only set the value if it s different from what it would be anyway .
16,225
def set_interval ( self , start , end , value , compact = False ) : for i , ( s , e , v ) in enumerate ( self . iterperiods ( start , end ) ) : if i == 0 : self . set ( s , value , compact ) else : del self [ s ] self . set ( end , v , compact )
Set the value for the time series on an interval . If compact is True only set the value if it s different from what it would be anyway .
16,226
def exists ( self ) : result = TimeSeries ( default = False if self . default is None else True ) for t , v in self : result [ t ] = False if v is None else True return result
returns False when the timeseries has a None value True otherwise
16,227
def iterintervals ( self , n = 2 ) : streams = tee ( iter ( self ) , n ) for stream_index , stream in enumerate ( streams ) : for i in range ( stream_index ) : next ( stream ) for intervals in zip ( * streams ) : yield intervals
Iterate over groups of n consecutive measurement points in the time series .
16,228
def sample ( self , sampling_period , start = None , end = None , interpolate = 'previous' ) : start , end , mask = self . _check_boundaries ( start , end ) sampling_period = self . _check_regularization ( start , end , sampling_period ) result = [ ] current_time = start while current_time <= end : value = self . get ( current_time , interpolate = interpolate ) result . append ( ( current_time , value ) ) current_time += sampling_period return result
Sampling at regular time periods .
16,229
def moving_average ( self , sampling_period , window_size = None , start = None , end = None , placement = 'center' , pandas = False ) : start , end , mask = self . _check_boundaries ( start , end ) if window_size is None : window_size = sampling_period sampling_period = self . _check_regularization ( start , end , sampling_period ) full_window = window_size * 1. half_window = full_window / 2. if ( isinstance ( start , datetime . datetime ) and not isinstance ( full_window , datetime . timedelta ) ) : half_window = datetime . timedelta ( seconds = half_window ) full_window = datetime . timedelta ( seconds = full_window ) result = [ ] current_time = start while current_time <= end : if placement == 'center' : window_start = current_time - half_window window_end = current_time + half_window elif placement == 'left' : window_start = current_time window_end = current_time + full_window elif placement == 'right' : window_start = current_time - full_window window_end = current_time else : msg = 'unknown placement "{}"' . format ( placement ) raise ValueError ( msg ) try : mean = self . mean ( window_start , window_end ) except TypeError as e : if 'NoneType' in str ( e ) : mean = None else : raise e result . append ( ( current_time , mean ) ) current_time += sampling_period if pandas : try : import pandas as pd except ImportError : msg = "can't have pandas=True if pandas is not installed" raise ImportError ( msg ) result = pd . Series ( [ v for t , v in result ] , index = [ t for t , v in result ] , ) return result
Averaging over regular intervals
16,230
def mean ( self , start = None , end = None , mask = None ) : return self . distribution ( start = start , end = end , mask = mask ) . mean ( )
This calculated the average value of the time series over the given time range from start to end when mask is truthy .
16,231
def distribution ( self , start = None , end = None , normalized = True , mask = None ) : start , end , mask = self . _check_boundaries ( start , end , mask = mask ) counter = histogram . Histogram ( ) for start , end , _ in mask . iterperiods ( value = True ) : for t0 , t1 , value in self . iterperiods ( start , end ) : duration = utils . duration_to_number ( t1 - t0 , units = 'seconds' , ) try : counter [ value ] += duration except histogram . UnorderableElements as e : counter = histogram . Histogram . from_dict ( dict ( counter ) , key = hash ) counter [ value ] += duration if normalized : return counter . normalized ( ) else : return counter
Calculate the distribution of values over the given time range from start to end .
16,232
def n_points ( self , start = - inf , end = + inf , mask = None , include_start = True , include_end = False , normalized = False ) : if not self . n_measurements ( ) : return 0 start , end , mask = self . _check_boundaries ( start , end , mask = mask ) count = 0 for start , end , _ in mask . iterperiods ( value = True ) : if include_end : end_count = self . _d . bisect_right ( end ) else : end_count = self . _d . bisect_left ( end ) if include_start : start_count = self . _d . bisect_left ( start ) else : start_count = self . _d . bisect_right ( start ) count += ( end_count - start_count ) if normalized : count /= float ( self . n_measurements ( ) ) return count
Calculate the number of points over the given time range from start to end .
16,233
def _check_time_series ( self , other ) : if not isinstance ( other , TimeSeries ) : msg = "unsupported operand types(s) for +: %s and %s" % ( type ( self ) , type ( other ) ) raise TypeError ( msg )
Function used to check the type of the argument and raise an informative error message if it s not a TimeSeries .
16,234
def operation ( self , other , function , ** kwargs ) : result = TimeSeries ( ** kwargs ) if isinstance ( other , TimeSeries ) : for time , value in self : result [ time ] = function ( value , other [ time ] ) for time , value in other : result [ time ] = function ( self [ time ] , value ) else : for time , value in self : result [ time ] = function ( value , other ) return result
Calculate elementwise operation either between this TimeSeries and another one i . e .
16,235
def to_bool ( self , invert = False ) : if invert : def function ( x , y ) : return False if x else True else : def function ( x , y ) : return True if x else False return self . operation ( None , function )
Return the truth value of each element .
16,236
def read_all ( pattern = 'data/lightbulb-*.csv' ) : result = [ ] for filename in glob . iglob ( pattern ) : print ( 'reading' , filename , file = sys . stderr ) ts = traces . TimeSeries . from_csv ( filename , time_column = 0 , time_transform = parse_iso_datetime , value_column = 1 , value_transform = int , default = 0 , ) ts . compact ( ) result . append ( ts ) return result
Read all of the CSVs in a directory matching the filename pattern as TimeSeries .
16,237
def read_dependencies ( filename ) : dependencies = [ ] filepath = os . path . join ( 'requirements' , filename ) with open ( filepath , 'r' ) as stream : for line in stream : package = line . strip ( ) . split ( '#' ) [ 0 ] . strip ( ) if package and package . split ( ' ' ) [ 0 ] != '-r' : dependencies . append ( package ) return dependencies
Read in the dependencies from the virtualenv requirements file .
16,238
def process ( self , plugin , instance = None , action = None ) : plugin_obj = self . __plugins [ plugin [ "id" ] ] instance_obj = ( self . __instances [ instance [ "id" ] ] if instance is not None else None ) result = pyblish . plugin . process ( plugin = plugin_obj , context = self . _context , instance = instance_obj , action = action ) return formatting . format_result ( result )
Given JSON objects from client perform actual processing
16,239
def _dispatch ( self , method , params ) : self . _count += 1 func = getattr ( self , method ) try : return func ( * params ) except Exception as e : traceback . print_exc ( ) raise e
Customise exception handling
16,240
def emit ( self , signal , kwargs ) : if "context" in kwargs : kwargs [ "context" ] = self . _context if "instance" in kwargs : kwargs [ "instance" ] = self . __instances [ kwargs [ "instance" ] ] if "plugin" in kwargs : kwargs [ "plugin" ] = self . __plugins [ kwargs [ "plugin" ] ] pyblish . api . emit ( signal , ** kwargs )
Trigger registered callbacks
16,241
def resolve_fragment ( self , document , fragment ) : fragment = fragment . lstrip ( u"/" ) parts = unquote ( fragment ) . split ( u"/" ) if fragment else [ ] for part in parts : part = part . replace ( u"~1" , u"/" ) . replace ( u"~0" , u"~" ) if isinstance ( document , Sequence ) : try : part = int ( part ) except ValueError : pass try : document = document [ part ] except ( TypeError , LookupError ) : raise RefResolutionError ( "Unresolvable JSON pointer: %r" % fragment ) return document
Resolve a fragment within the referenced document .
16,242
def generate_safemode_windows ( ) : try : import pyblish import pyblish_qml import PyQt5 except ImportError : return sys . stderr . write ( "Run this in a terminal with access to " "the Pyblish libraries and PyQt5.\n" ) template = r values = { } for lib in ( pyblish , pyblish_qml , PyQt5 ) : values [ lib . __name__ ] = os . path . dirname ( os . path . dirname ( lib . __file__ ) ) values [ "python" ] = os . path . dirname ( sys . executable ) with open ( "run.bat" , "w" ) as f : print ( "Writing %s" % template . format ( ** values ) ) f . write ( template . format ( ** values ) )
Produce batch file to run QML in safe - mode
16,243
def find_python ( ) : python = ( _state . get ( "pythonExecutable" ) or next ( ( exe for exe in os . getenv ( "PYBLISH_QML_PYTHON_EXECUTABLE" , "" ) . split ( os . pathsep ) if os . path . isfile ( exe ) ) , None ) or which ( "python" ) or which ( "python3" ) ) if not python or not os . path . isfile ( python ) : raise ValueError ( "Could not locate Python executable." ) return python
Search for Python automatically
16,244
def find_pyqt5 ( python ) : pyqt5 = ( _state . get ( "pyqt5" ) or os . getenv ( "PYBLISH_QML_PYQT5" ) ) if not pyqt5 : try : path = subprocess . check_output ( [ python , "-c" , "import PyQt5, sys;" "sys.stdout.write(PyQt5.__file__)" ] , universal_newlines = True ) pyqt5 = os . path . dirname ( os . path . dirname ( path ) ) except subprocess . CalledProcessError : pass return pyqt5
Search for PyQt5 automatically
16,245
def which ( program ) : def is_exe ( fpath ) : if os . path . isfile ( fpath ) and os . access ( fpath , os . X_OK ) : return True return False for path in os . environ [ "PATH" ] . split ( os . pathsep ) : for ext in os . getenv ( "PATHEXT" , "" ) . split ( os . pathsep ) : fname = program + ext . lower ( ) abspath = os . path . join ( path . strip ( '"' ) , fname ) if is_exe ( abspath ) : return abspath return None
Locate program in PATH
16,246
def listen ( self ) : def _listen ( ) : HEADER = "pyblish-qml:popen.request" for line in iter ( self . popen . stdout . readline , b"" ) : if six . PY3 : line = line . decode ( "utf8" ) try : response = json . loads ( line ) except Exception : sys . stdout . write ( line ) else : if ( hasattr ( response , "get" ) and response . get ( "header" ) == HEADER ) : payload = response [ "payload" ] args = payload [ "args" ] func_name = payload [ "name" ] wrapper = _state . get ( "dispatchWrapper" , default_wrapper ) func = getattr ( self . service , func_name ) result = wrapper ( func , * args ) data = json . dumps ( { "header" : "pyblish-qml:popen.response" , "payload" : result } ) if six . PY3 : data = data . encode ( "ascii" ) self . popen . stdin . write ( data + b"\n" ) self . popen . stdin . flush ( ) else : sys . stdout . write ( line ) if not self . listening : self . _start_pulse ( ) if self . modal : _listen ( ) else : thread = threading . Thread ( target = _listen ) thread . daemon = True thread . start ( ) self . listening = True
Listen to both stdout and stderr
16,247
def _start_pulse ( self ) : def _pulse ( ) : start_time = time . time ( ) while True : data = json . dumps ( { "header" : "pyblish-qml:server.pulse" } ) if six . PY3 : data = data . encode ( "ascii" ) try : self . popen . stdin . write ( data + b"\n" ) self . popen . stdin . flush ( ) except IOError : break time . sleep ( 5.0 - ( ( time . time ( ) - start_time ) % 5.0 ) ) thread = threading . Thread ( target = _pulse ) thread . daemon = True thread . start ( )
Send pulse to child process
16,248
def main ( demo = False , aschild = False , targets = [ ] ) : if aschild : print ( "Starting pyblish-qml" ) compat . main ( ) app = Application ( APP_PATH , targets ) app . listen ( ) print ( "Done, don't forget to call `show()`" ) return app . exec_ ( ) else : print ( "Starting pyblish-qml server.." ) service = ipc . service . MockService ( ) if demo else ipc . service . Service ( ) server = ipc . server . Server ( service , targets = targets ) proxy = ipc . server . Proxy ( server ) proxy . show ( settings . to_dict ( ) ) server . listen ( ) server . wait ( )
Start the Qt - runtime and show the window
16,249
def event ( self , event ) : if event . type ( ) == QtCore . QEvent . Close : modifiers = self . app . queryKeyboardModifiers ( ) shift_pressed = QtCore . Qt . ShiftModifier & modifiers states = self . app . controller . states if shift_pressed : print ( "Force quitted.." ) self . app . controller . host . emit ( "pyblishQmlCloseForced" ) event . accept ( ) elif any ( state in states for state in ( "ready" , "finished" ) ) : self . app . controller . host . emit ( "pyblishQmlClose" ) event . accept ( ) else : print ( "Not ready, hold SHIFT to force an exit" ) event . ignore ( ) return super ( Window , self ) . event ( event )
Allow GUI to be closed upon holding Shift
16,250
def inFocus ( self ) : previous_flags = self . window . flags ( ) self . window . setFlags ( previous_flags | QtCore . Qt . WindowStaysOnTopHint )
Set GUI on - top flag
16,251
def listen ( self ) : def _listen ( ) : while True : line = self . host . channels [ "parent" ] . get ( ) payload = json . loads ( line ) [ "payload" ] signal = { "show" : "shown" , "hide" : "hidden" , "quit" : "quitted" , "publish" : "published" , "validate" : "validated" , "rise" : "risen" , "inFocus" : "inFocused" , "outFocus" : "outFocused" , } . get ( payload [ "name" ] ) if not signal : print ( "'{name}' was unavailable." . format ( ** payload ) ) else : try : getattr ( self , signal ) . emit ( * payload . get ( "args" , [ ] ) ) except Exception : traceback . print_exc ( ) thread = threading . Thread ( target = _listen ) thread . daemon = True thread . start ( )
Listen on incoming messages from host
16,252
def defer ( target , args = None , kwargs = None , callback = None ) : obj = _defer ( target , args , kwargs , callback ) obj . finished . connect ( lambda : _defer_cleanup ( obj ) ) obj . start ( ) _defer_threads . append ( obj ) return obj
Perform operation in thread with callback
16,253
def schedule ( func , time , channel = "default" ) : try : _jobs [ channel ] . stop ( ) except ( AttributeError , KeyError ) : pass timer = QtCore . QTimer ( ) timer . setSingleShot ( True ) timer . timeout . connect ( func ) timer . start ( time ) _jobs [ channel ] = timer
Run func at a later time in a dedicated channel
16,254
def format_text ( text ) : result = "" for paragraph in text . split ( "\n\n" ) : result += " " . join ( paragraph . split ( ) ) + "\n\n" result = result . rstrip ( "\n" ) pattern = r"(https?:\/\/(?:w{1,3}.)?[^\s]*?(?:\.[a-z]+)+)" pattern += r"(?![^<]*?(?:<\/\w+>|\/?>))" if re . search ( pattern , result ) : html = r"<a href='\1'><font color='FF00CC'>\1</font></a>" result = re . sub ( pattern , html , result ) return result
Remove newlines but preserve paragraphs
16,255
def SlotSentinel ( * args ) : if len ( args ) == 0 or isinstance ( args [ 0 ] , types . FunctionType ) : args = [ ] @ QtCore . pyqtSlot ( * args ) def slotdecorator ( func ) : @ wraps ( func ) def wrapper ( * args , ** kwargs ) : try : func ( * args ) except Exception : traceback . print_exc ( ) return wrapper return slotdecorator
Provides exception handling for all slots
16,256
def _byteify ( data ) : if isinstance ( data , six . text_type ) : return data . encode ( "utf-8" ) if isinstance ( data , list ) : return [ _byteify ( item ) for item in data ] if isinstance ( data , dict ) : return { _byteify ( key ) : _byteify ( value ) for key , value in data . items ( ) } return data
Convert unicode to bytes
16,257
def process ( self , plugin , context , instance = None , action = None ) : plugin = plugin . to_json ( ) instance = instance . to_json ( ) if instance is not None else None return self . _dispatch ( "process" , args = [ plugin , instance , action ] )
Transmit a process request to host
16,258
def _self_destruct ( self ) : self . _kill = threading . Timer ( 15 , lambda : os . _exit ( 0 ) ) self . _kill . start ( )
Auto quit exec if parent process failed
16,259
def _listen ( self ) : def _listen ( ) : for line in iter ( sys . stdin . readline , b"" ) : try : response = json . loads ( line ) except Exception as e : raise e else : if response . get ( "header" ) == "pyblish-qml:popen.response" : self . channels [ "response" ] . put ( line ) elif response . get ( "header" ) == "pyblish-qml:popen.parent" : self . channels [ "parent" ] . put ( line ) elif response . get ( "header" ) == "pyblish-qml:server.pulse" : self . _kill . cancel ( ) self . _self_destruct ( ) else : raise Exception ( "Unhandled message " "passed to Popen, '%s'" % line ) thread = threading . Thread ( target = _listen ) thread . daemon = True thread . start ( )
Listen for messages passed from parent
16,260
def _dispatch ( self , func , args = None ) : data = json . dumps ( { "header" : "pyblish-qml:popen.request" , "payload" : { "name" : func , "args" : args or list ( ) , } } ) assert self . channels [ "response" ] . empty ( ) , ( "There were pending messages in the response channel" ) sys . stdout . write ( data + "\n" ) sys . stdout . flush ( ) try : message = self . channels [ "response" ] . get ( ) if six . PY3 : response = json . loads ( message ) else : response = _byteify ( json . loads ( message , object_hook = _byteify ) ) except TypeError as e : raise e else : assert response [ "header" ] == "pyblish-qml:popen.response" , response return response [ "payload" ]
Send message to parent process
16,261
def from_json ( cls , plugin ) : process = None repair = None name = plugin [ "name" ] + "Proxy" cls = type ( name , ( cls , ) , plugin ) for name in ( "process" , "repair" ) : args = ", " . join ( plugin [ "process" ] [ "args" ] ) func = "def {name}({args}): pass" . format ( name = name , args = args ) exec ( func ) cls . process = process cls . repair = repair cls . __orig__ = plugin return cls
Build PluginProxy object from incoming dictionary
16,262
def Item ( ** kwargs ) : parent = kwargs . pop ( "parent" , None ) cls = type ( "Item" , ( AbstractItem , ) , kwargs . copy ( ) ) self = cls ( parent ) self . json = kwargs for key , value in kwargs . items ( ) : if hasattr ( self , key ) : key = PropertyType . prefix + key setattr ( self , key , value ) return self
Factory function for QAbstractListModel items
16,263
def add_item ( self , item ) : self . beginInsertRows ( QtCore . QModelIndex ( ) , self . rowCount ( ) , self . rowCount ( ) ) item [ "parent" ] = self item = Item ( ** item ) self . items . append ( item ) self . endInsertRows ( ) item . __datachanged__ . connect ( self . _dataChanged ) return item
Add new item to model
16,264
def remove_item ( self , item ) : index = self . items . index ( item ) self . beginRemoveRows ( QtCore . QModelIndex ( ) , index , index ) self . items . remove ( item ) self . endRemoveRows ( )
Remove item from model
16,265
def _dataChanged ( self , item ) : index = self . items . index ( item ) qindex = self . createIndex ( index , 0 ) self . dataChanged . emit ( qindex , qindex )
Explicitly emit dataChanged upon item changing
16,266
def add_plugin ( self , plugin ) : item = { } item . update ( defaults [ "common" ] ) item . update ( defaults [ "plugin" ] ) for member in [ "pre11" , "name" , "label" , "optional" , "category" , "actions" , "id" , "order" , "doc" , "type" , "module" , "match" , "hasRepair" , "families" , "contextEnabled" , "instanceEnabled" , "__instanceEnabled__" , "path" ] : item [ member ] = plugin [ member ] item [ "familiesConcatenated" ] = ", " . join ( plugin [ "families" ] ) pattern = r"(https?:\/\/(?:w{1,3}.)?[^\s]*?(?:\.[a-z]+)+)" pattern += r"(?![^<]*?(?:<\/\w+>|\/?>))" if item [ "doc" ] and re . search ( pattern , item [ "doc" ] ) : html = r"<a href='\1'><font color='FF00CC'>\1</font></a>" item [ "doc" ] = re . sub ( pattern , html , item [ "doc" ] ) item [ "itemType" ] = "plugin" item [ "hasCompatible" ] = True item [ "isToggled" ] = plugin . get ( "active" , True ) item [ "verb" ] = { "Selector" : "Collect" , "Collector" : "Collect" , "Validator" : "Validate" , "Extractor" : "Extract" , "Integrator" : "Integrate" , "Conformer" : "Integrate" , } . get ( item [ "type" ] , "Other" ) for action in item [ "actions" ] : if action [ "on" ] == "all" : item [ "actionsIconVisible" ] = True self . add_section ( item [ "verb" ] ) item = self . add_item ( item ) self . plugins . append ( item )
Append plugin to model
16,267
def add_instance ( self , instance ) : assert isinstance ( instance , dict ) item = defaults [ "common" ] . copy ( ) item . update ( defaults [ "instance" ] ) item . update ( instance [ "data" ] ) item . update ( instance ) item [ "itemType" ] = "instance" item [ "isToggled" ] = instance [ "data" ] . get ( "publish" , True ) item [ "hasCompatible" ] = True item [ "category" ] = item [ "category" ] or item [ "family" ] self . add_section ( item [ "category" ] ) families = [ instance [ "data" ] [ "family" ] ] families . extend ( instance [ "data" ] . get ( "families" , [ ] ) ) item [ "familiesConcatenated" ] += ", " . join ( families ) item = self . add_item ( item ) self . instances . append ( item )
Append instance to model
16,268
def remove_instance ( self , item ) : self . instances . remove ( item ) self . remove_item ( item )
Remove instance from model
16,269
def add_section ( self , name ) : assert isinstance ( name , str ) for section in self . sections : if section . name == name : return section item = defaults [ "common" ] . copy ( ) item [ "name" ] = name item [ "itemType" ] = "section" item = self . add_item ( item ) self . sections . append ( item ) return item
Append section to model
16,270
def add_context ( self , context , label = None ) : assert isinstance ( context , dict ) item = defaults [ "common" ] . copy ( ) item . update ( defaults [ "instance" ] ) item . update ( context ) item [ "family" ] = None item [ "label" ] = context [ "data" ] . get ( "label" ) or settings . ContextLabel item [ "itemType" ] = "instance" item [ "isToggled" ] = True item [ "optional" ] = False item [ "hasCompatible" ] = True item = self . add_item ( item ) self . instances . append ( item )
Append context to model
16,271
def update_with_result ( self , result ) : assert isinstance ( result , dict ) , "%s is not a dictionary" % result for type in ( "instance" , "plugin" ) : id = ( result [ type ] or { } ) . get ( "id" ) is_context = not id if is_context : item = self . instances [ 0 ] else : item = self . items . get ( id ) if item is None : continue item . isProcessing = False item . currentProgress = 1 item . processed = True item . hasWarning = item . hasWarning or any ( [ record [ "levelno" ] == logging . WARNING for record in result [ "records" ] ] ) if result . get ( "error" ) : item . hasError = True item . amountFailed += 1 else : item . succeeded = True item . amountPassed += 1 item . duration += result [ "duration" ] item . finishedAt = time . time ( ) if item . itemType == "plugin" and not item . actionsIconVisible : actions = list ( item . actions ) for action in list ( actions ) : if action [ "on" ] == "failed" and not item . hasError : actions . remove ( action ) if action [ "on" ] == "succeeded" and not item . succeeded : actions . remove ( action ) if action [ "on" ] == "processed" and not item . processed : actions . remove ( action ) if actions : item . actionsIconVisible = True class DummySection ( object ) : hasWarning = False hasError = False succeeded = False section_item = DummySection ( ) for section in self . sections : if item . itemType == "plugin" and section . name == item . verb : section_item = section if ( item . itemType == "instance" and section . name == item . category ) : section_item = section section_item . hasWarning = ( section_item . hasWarning or item . hasWarning ) section_item . hasError = section_item . hasError or item . hasError section_item . succeeded = section_item . succeeded or item . succeeded section_item . isProcessing = False
Update item - model with result from host
16,272
def reset_status ( self ) : for item in self . items : item . isProcessing = False item . currentProgress = 0
Reset progress bars
16,273
def add_exclusion ( self , role , value ) : self . _add_rule ( self . excludes , role , value )
Exclude item if role equals value
16,274
def add_inclusion ( self , role , value ) : self . _add_rule ( self . includes , role , value )
Include item if role equals value
16,275
def format_records ( records ) : formatted = list ( ) for record_ in records : formatted . append ( format_record ( record_ ) ) return formatted
Serialise multiple records
16,276
def format_record ( record ) : record = dict ( ( key , getattr ( record , key , None ) ) for key in ( "threadName" , "name" , "thread" , "created" , "process" , "processName" , "args" , "module" , "filename" , "levelno" , "exc_text" , "pathname" , "lineno" , "msg" , "exc_info" , "funcName" , "relativeCreated" , "levelname" , "msecs" ) ) record [ "message" ] = str ( record . pop ( "msg" ) ) if os . getenv ( "PYBLISH_SAFE" ) : schema . validate ( record , "record" ) return record
Serialise LogRecord instance
16,277
def format_plugins ( plugins ) : formatted = [ ] for plugin_ in plugins : formatted_plugin = format_plugin ( plugin_ ) formatted . append ( formatted_plugin ) return formatted
Serialise multiple plug - in
16,278
def iterator ( plugins , context ) : test = pyblish . logic . registered_test ( ) state = { "nextOrder" : None , "ordersWithError" : set ( ) } for plugin in plugins : state [ "nextOrder" ] = plugin . order message = test ( ** state ) if message : raise StopIteration ( "Stopped due to %s" % message ) instances = pyblish . api . instances_by_plugin ( context , plugin ) if plugin . __instanceEnabled__ : for instance in instances : yield plugin , instance else : yield plugin , None
An iterator for plug - in and instance pairs
16,279
def getPluginActions ( self , index ) : index = self . data [ "proxies" ] [ "plugin" ] . mapToSource ( self . data [ "proxies" ] [ "plugin" ] . index ( index , 0 , QtCore . QModelIndex ( ) ) ) . row ( ) item = self . data [ "models" ] [ "item" ] . items [ index ] actions = [ dict ( action , ** { "index" : index } ) for action in item . actions ] for action in list ( actions ) : if action [ "on" ] == "failed" and not item . hasError : actions . remove ( action ) if action [ "on" ] == "succeeded" and not item . succeeded : actions . remove ( action ) if action [ "on" ] == "processed" and not item . processed : actions . remove ( action ) if action [ "on" ] == "notProcessed" and item . processed : actions . remove ( action ) remaining_actions = list ( ) index = 0 try : action = actions [ index ] except IndexError : pass else : while action : try : action = actions [ index ] except IndexError : break isempty = False if action [ "__type__" ] in ( "category" , "separator" ) : try : next_ = actions [ index + 1 ] if next_ [ "__type__" ] != "action" : isempty = True except IndexError : isempty = True if not isempty : remaining_actions . append ( action ) index += 1 return remaining_actions
Return actions from plug - in at index
16,280
def exclude ( self , target , operation , role , value ) : target = { "result" : self . data [ "proxies" ] [ "result" ] , "instance" : self . data [ "proxies" ] [ "instance" ] , "plugin" : self . data [ "proxies" ] [ "plugin" ] } [ target ] if operation == "add" : target . add_exclusion ( role , value ) elif operation == "remove" : target . remove_exclusion ( role , value ) else : raise TypeError ( "operation must be either `add` or `remove`" )
Exclude a role of value at target
16,281
def __item_data ( self , model , index ) : item = model . items [ index ] data = { "name" : item . name , "data" : item . data , "doc" : getattr ( item , "doc" , None ) , "path" : getattr ( item , "path" , None ) , } return data
Return item data as dict
16,282
def comment_sync ( self , comment ) : self . host . update ( key = "comment" , value = comment ) self . host . emit ( "commented" , comment = comment )
Update comments to host and notify subscribers
16,283
def on_commenting ( self , comment ) : def update ( ) : context = self . host . cached_context context . data [ "comment" ] = comment self . data [ "comment" ] = comment self . comment_sync ( comment ) self . commented . emit ( ) util . schedule ( update , 100 , channel = "commenting" )
The user is entering a comment
16,284
def publish ( self ) : def get_data ( ) : model = self . data [ "models" ] [ "item" ] host_plugins = dict ( ( p . id , p ) for p in self . host . cached_discover ) host_context = dict ( ( i . id , i ) for i in self . host . cached_context ) plugins = list ( ) instances = list ( ) for plugin in models . ItemIterator ( model . plugins ) : if pyblish . lib . inrange ( number = plugin . order , base = pyblish . api . Collector . order ) : continue plugins . append ( host_plugins [ plugin . id ] ) for instance in models . ItemIterator ( model . instances ) : instances . append ( host_context [ instance . id ] ) return plugins , instances def on_data_received ( args ) : self . run ( * args , callback = on_finished ) def on_finished ( ) : self . host . emit ( "published" , context = None ) util . defer ( get_data , callback = on_data_received )
Start asynchonous publishing
16,285
def from_dict ( settings ) : assert isinstance ( settings , dict ) , "`settings` must be of type dict" for key , value in settings . items ( ) : setattr ( self , key , value )
Apply settings from dictionary
16,286
def create_autospec ( spec , spec_set = False , instance = False , _parent = None , _name = None , ** kwargs ) : if _is_list ( spec ) : spec = type ( spec ) is_type = isinstance ( spec , ClassTypes ) _kwargs = { 'spec' : spec } if spec_set : _kwargs = { 'spec_set' : spec } elif spec is None : _kwargs = { } _kwargs . update ( kwargs ) Klass = MagicMock if type ( spec ) in DescriptorTypes : _kwargs = { } elif not _callable ( spec ) : Klass = NonCallableMagicMock elif is_type and instance and not _instance_callable ( spec ) : Klass = NonCallableMagicMock _new_name = _name if _parent is None : _new_name = '' mock = Klass ( parent = _parent , _new_parent = _parent , _new_name = _new_name , name = _name , ** _kwargs ) if isinstance ( spec , FunctionTypes ) : mock = _set_signature ( mock , spec ) else : _check_signature ( spec , mock , is_type , instance ) if _parent is not None and not instance : _parent . _mock_children [ _name ] = mock if is_type and not instance and 'return_value' not in kwargs : mock . return_value = create_autospec ( spec , spec_set , instance = True , _name = '()' , _parent = mock ) for entry in dir ( spec ) : if _is_magic ( entry ) : continue if isinstance ( spec , FunctionTypes ) and entry in FunctionAttributes : continue try : original = getattr ( spec , entry ) except AttributeError : continue kwargs = { 'spec' : original } if spec_set : kwargs = { 'spec_set' : original } if not isinstance ( original , FunctionTypes ) : new = _SpecState ( original , spec_set , mock , entry , instance ) mock . _mock_children [ entry ] = new else : parent = mock if isinstance ( spec , FunctionTypes ) : parent = mock . mock new = MagicMock ( parent = parent , name = entry , _new_name = entry , _new_parent = parent , ** kwargs ) mock . _mock_children [ entry ] = new skipfirst = _must_skip ( spec , entry , is_type ) _check_signature ( original , new , skipfirst = skipfirst ) if isinstance ( new , FunctionTypes ) : setattr ( mock , entry , new ) return mock
Create a mock object using another object as a spec . Attributes on the mock will use the corresponding attribute on the spec object as their spec .
16,287
def attach_mock ( self , mock , attribute ) : mock . _mock_parent = None mock . _mock_new_parent = None mock . _mock_name = '' mock . _mock_new_name = None setattr ( self , attribute , mock )
Attach a mock as an attribute of this one replacing its name and parent . Calls to the attached mock will be recorded in the method_calls and mock_calls attributes of this one .
16,288
def configure_mock ( self , ** kwargs ) : for arg , val in sorted ( kwargs . items ( ) , key = lambda entry : entry [ 0 ] . count ( '.' ) ) : args = arg . split ( '.' ) final = args . pop ( ) obj = self for entry in args : obj = getattr ( obj , entry ) setattr ( obj , final , val )
Set attributes on the mock through keyword arguments .
16,289
def _get_child_mock ( self , ** kw ) : _type = type ( self ) if not issubclass ( _type , CallableMixin ) : if issubclass ( _type , NonCallableMagicMock ) : klass = MagicMock elif issubclass ( _type , NonCallableMock ) : klass = Mock else : klass = _type . __mro__ [ 1 ] return klass ( ** kw )
Create the child mocks for attributes and return value . By default child mocks will be the same type as the parent . Subclasses of Mock may want to override this to customize the way child mocks are made .
16,290
def mock_add_spec ( self , spec , spec_set = False ) : self . _mock_add_spec ( spec , spec_set ) self . _mock_set_magics ( )
Add a spec to a mock . spec can either be an object or a list of strings . Only attributes on the spec can be fetched as attributes from the mock .
16,291
def call_list ( self ) : vals = [ ] thing = self while thing is not None : if thing . from_kall : vals . append ( thing ) thing = thing . parent return _CallList ( reversed ( vals ) )
For a call object that represents multiple calls call_list returns a list of all the intermediate calls as well as the final call .
16,292
def register_dispatch_wrapper ( wrapper ) : signature = inspect . getargspec ( wrapper ) if any ( [ len ( signature . args ) != 1 , signature . varargs is None , signature . keywords is None ] ) : raise TypeError ( "Wrapper signature mismatch" ) def _wrapper ( func , * args , ** kwargs ) : try : return wrapper ( func , * args , ** kwargs ) except Exception as e : _state [ "currentServer" ] . stop ( ) traceback . print_exc ( ) raise e _state [ "dispatchWrapper" ] = _wrapper
Register a dispatch wrapper for servers
16,293
def install ( modal ) : if _state . get ( "installed" ) : sys . stdout . write ( "Already installed, uninstalling..\n" ) uninstall ( ) use_threaded_wrapper = not modal install_callbacks ( ) install_host ( use_threaded_wrapper ) _state [ "installed" ] = True
Perform first time install
16,294
def show ( parent = None , targets = [ ] , modal = None , auto_publish = False , auto_validate = False ) : if modal is None : modal = bool ( os . environ . get ( "PYBLISH_QML_MODAL" , False ) ) install ( modal ) show_settings = settings . to_dict ( ) show_settings [ 'autoPublish' ] = auto_publish show_settings [ 'autoValidate' ] = auto_validate if _state . get ( "currentServer" ) : server = _state [ "currentServer" ] proxy = ipc . server . Proxy ( server ) try : proxy . show ( show_settings ) return server except IOError : _state . pop ( "currentServer" ) if not host . is_headless ( ) : host . splash ( ) try : service = ipc . service . Service ( ) server = ipc . server . Server ( service , targets = targets , modal = modal ) except Exception : traceback . print_exc ( ) return host . desplash ( ) proxy = ipc . server . Proxy ( server ) proxy . show ( show_settings ) _state [ "currentServer" ] = server log . info ( "Success. QML server available as " "pyblish_qml.api.current_server()" ) server . listen ( ) return server
Attempt to show GUI
16,295
def install_host ( use_threaded_wrapper ) : for install in ( _install_maya , _install_houdini , _install_nuke , _install_nukeassist , _install_hiero , _install_nukestudio , _install_blender ) : try : install ( use_threaded_wrapper ) except ImportError : pass else : break
Install required components into supported hosts
16,296
def _remove_googleapiclient ( ) : keyword = "googleapiclient" python_paths = os . environ [ "PYTHONPATH" ] . split ( os . pathsep ) paths = [ path for path in python_paths if keyword not in path ] os . environ [ "PYTHONPATH" ] = os . pathsep . join ( paths )
Check if the compatibility must be maintained
16,297
def _install_maya ( use_threaded_wrapper ) : from maya import utils , cmds def threaded_wrapper ( func , * args , ** kwargs ) : return utils . executeInMainThreadWithResult ( func , * args , ** kwargs ) sys . stdout . write ( "Setting up Pyblish QML in Maya\n" ) if cmds . about ( version = True ) == "2018" : _remove_googleapiclient ( ) _common_setup ( "Maya" , threaded_wrapper , use_threaded_wrapper )
Helper function to Autodesk Maya support
16,298
def _install_houdini ( use_threaded_wrapper ) : import hdefereval def threaded_wrapper ( func , * args , ** kwargs ) : return hdefereval . executeInMainThreadWithResult ( func , * args , ** kwargs ) _common_setup ( "Houdini" , threaded_wrapper , use_threaded_wrapper )
Helper function to SideFx Houdini support
16,299
def _install_nuke ( use_threaded_wrapper ) : import nuke not_nuke_launch = ( "--hiero" in nuke . rawArgs or "--studio" in nuke . rawArgs or "--nukeassist" in nuke . rawArgs ) if not_nuke_launch : raise ImportError def threaded_wrapper ( func , * args , ** kwargs ) : return nuke . executeInMainThreadWithResult ( func , args , kwargs ) _common_setup ( "Nuke" , threaded_wrapper , use_threaded_wrapper )
Helper function to The Foundry Nuke support