idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
17,100
def doc_browse ( self , args , range = None ) : self . log . debug ( 'browse: in' ) self . call_options [ self . call_id ] = { "browse" : True } self . send_at_position ( "DocUri" , False , "point" )
Browse doc of whatever at cursor .
17,101
def rename ( self , new_name , range = None ) : self . log . debug ( 'rename: in' ) if not new_name : new_name = self . editor . ask_input ( "Rename to:" ) self . editor . write ( noautocmd = True ) b , e = self . editor . word_under_cursor_pos ( ) current_file = self . editor . path ( ) self . editor . raw_message ( current_file ) self . send_refactor_request ( "RefactorReq" , { "typehint" : "RenameRefactorDesc" , "newName" : new_name , "start" : self . get_position ( b [ 0 ] , b [ 1 ] ) , "end" : self . get_position ( e [ 0 ] , e [ 1 ] ) + 1 , "file" : current_file , } , { "interactive" : False } )
Request a rename to the server .
17,102
def symbol_search ( self , search_terms ) : self . log . debug ( 'symbol_search: in' ) if not search_terms : self . editor . message ( 'symbol_search_symbol_required' ) return req = { "typehint" : "PublicSymbolSearchReq" , "keywords" : search_terms , "maxResults" : 25 } self . send_request ( req )
Search for symbols matching a set of keywords
17,103
def send_refactor_request ( self , ref_type , ref_params , ref_options ) : request = { "typehint" : ref_type , "procId" : self . refactor_id , "params" : ref_params } f = ref_params [ "file" ] self . refactorings [ self . refactor_id ] = f self . refactor_id += 1 request . update ( ref_options ) self . send_request ( request )
Send a refactor request to the Ensime server .
17,104
def apply_refactor ( self , call_id , payload ) : supported_refactorings = [ "Rename" , "InlineLocal" , "AddImport" , "OrganizeImports" ] if payload [ "refactorType" ] [ "typehint" ] in supported_refactorings : diff_filepath = payload [ "diff" ] path = self . editor . path ( ) bname = os . path . basename ( path ) target = os . path . join ( self . tmp_diff_folder , bname ) reject_arg = "--reject-file={}.rej" . format ( target ) backup_pref = "--prefix={}" . format ( self . tmp_diff_folder ) cmd = [ "patch" , reject_arg , backup_pref , path , diff_filepath ] failed = Popen ( cmd , stdout = PIPE , stderr = PIPE ) . wait ( ) if failed : self . editor . message ( "failed_refactoring" ) self . editor . edit ( self . editor . path ( ) ) self . editor . doautocmd ( 'BufReadPre' , 'BufRead' , 'BufEnter' )
Apply a refactor depending on its type .
17,105
def buffer_leave ( self , filename ) : self . log . debug ( 'buffer_leave: %s' , filename ) self . editor . clean_errors ( )
User is changing of buffer .
17,106
def type_check ( self , filename ) : self . log . debug ( 'type_check: in' ) self . editor . clean_errors ( ) self . send_request ( { "typehint" : "TypecheckFilesReq" , "files" : [ self . editor . path ( ) ] } )
Update type checking when user saves buffer .
17,107
def unqueue ( self , timeout = 10 , should_wait = False ) : start , now = time . time ( ) , time . time ( ) wait = self . queue . empty ( ) and should_wait while ( not self . queue . empty ( ) or wait ) and ( now - start ) < timeout : if wait and self . queue . empty ( ) : time . sleep ( 0.25 ) now = time . time ( ) else : result = self . queue . get ( False ) self . log . debug ( 'unqueue: result received\n%s' , result ) if result and result != "nil" : wait = None start , now = time . time ( ) , time . time ( ) _json = json . loads ( result ) call_id = _json . get ( "callId" ) if _json [ "payload" ] : self . handle_incoming_response ( call_id , _json [ "payload" ] ) else : self . log . debug ( 'unqueue: nil or None received' ) if ( now - start ) >= timeout : self . log . warning ( 'unqueue: no reply from server for %ss' , timeout )
Unqueue all the received ensime responses for a given file .
17,108
def tick ( self , filename ) : if self . connection_attempts < 10 : self . setup ( True , False ) self . connection_attempts += 1 self . unqueue_and_display ( filename )
Try to connect and display messages in queue .
17,109
def vim_enter ( self , filename ) : success = self . setup ( True , False ) if success : self . editor . message ( "start_message" )
Set up EnsimeClient when vim enters .
17,110
def complete_func ( self , findstart , base ) : self . log . debug ( 'complete_func: in %s %s' , findstart , base ) def detect_row_column_start ( ) : row , col = self . editor . cursor ( ) start = col line = self . editor . getline ( ) while start > 0 and line [ start - 1 ] not in " .,([{" : start -= 1 return row , col , start if start else 1 if str ( findstart ) == "1" : row , col , startcol = detect_row_column_start ( ) self . complete ( row , col ) self . completion_started = True return startcol else : result = [ ] if self . completion_started : self . unqueue ( timeout = self . completion_timeout , should_wait = True ) suggestions = self . suggestions or [ ] self . log . debug ( 'complete_func: suggestions in' ) for m in suggestions : result . append ( m ) self . suggestions = None self . completion_started = False return result
Handle omni completion .
17,111
def handle_debug_break ( self , call_id , payload ) : line = payload [ 'line' ] config = self . launcher . config path = os . path . relpath ( payload [ 'file' ] , config [ 'root-dir' ] ) self . editor . raw_message ( feedback [ 'notify_break' ] . format ( line , path ) ) self . debug_thread_id = payload [ "threadId" ]
Handle responses DebugBreakEvent .
17,112
def handle_debug_backtrace ( self , call_id , payload ) : frames = payload [ "frames" ] fd , path = tempfile . mkstemp ( '.json' , text = True , dir = self . tmp_diff_folder ) tmpfile = os . fdopen ( fd , 'w' ) tmpfile . write ( json . dumps ( frames , indent = 2 ) ) opts = { 'readonly' : True , 'bufhidden' : 'wipe' , 'buflisted' : False , 'swapfile' : False } self . editor . split_window ( path , size = 20 , bufopts = opts ) tmpfile . close ( )
Handle responses DebugBacktrace .
17,113
def _remove_legacy_bootstrap ( ) : home = os . environ [ 'HOME' ] old_base_dir = os . path . join ( home , '.config' , 'classpath_project_ensime' ) if os . path . isdir ( old_base_dir ) : shutil . rmtree ( old_base_dir , ignore_errors = True )
Remove bootstrap projects from old path they d be really stale by now .
17,114
def _start_process ( self , classpath ) : cache_dir = self . config [ 'cache-dir' ] java_flags = self . config [ 'java-flags' ] iswindows = os . name == 'nt' Util . mkdir_p ( cache_dir ) log_path = os . path . join ( cache_dir , "server.log" ) log = open ( log_path , "w" ) null = open ( os . devnull , "r" ) java = os . path . join ( self . config [ 'java-home' ] , 'bin' , 'java.exe' if iswindows else 'java' ) if not os . path . exists ( java ) : raise InvalidJavaPathError ( errno . ENOENT , 'No such file or directory' , java ) elif not os . access ( java , os . X_OK ) : raise InvalidJavaPathError ( errno . EACCES , 'Permission denied' , java ) args = ( [ java , "-cp" , ( ';' if iswindows else ':' ) . join ( classpath ) ] + [ a for a in java_flags if a ] + [ "-Densime.config={}" . format ( self . config . filepath ) , "org.ensime.server.Server" ] ) process = subprocess . Popen ( args , stdin = null , stdout = log , stderr = subprocess . STDOUT ) pid_path = os . path . join ( cache_dir , "server.pid" ) Util . write_file ( pid_path , str ( process . pid ) ) def on_stop ( ) : log . close ( ) null . close ( ) with catch ( Exception ) : os . remove ( pid_path ) return EnsimeProcess ( cache_dir , process , log_path , on_stop )
Given a classpath prepared for running ENSIME spawns a server process in a way that is otherwise agnostic to how the strategy installs ENSIME .
17,115
def install ( self ) : project_dir = os . path . dirname ( self . classpath_file ) sbt_plugin = Util . mkdir_p ( project_dir ) Util . mkdir_p ( os . path . join ( project_dir , "project" ) ) Util . write_file ( os . path . join ( project_dir , "build.sbt" ) , self . build_sbt ( ) ) Util . write_file ( os . path . join ( project_dir , "project" , "build.properties" ) , "sbt.version={}" . format ( self . SBT_VERSION ) ) Util . write_file ( os . path . join ( project_dir , "project" , "plugins.sbt" ) , sbt_plugin . format ( * self . SBT_COURSIER_COORDS ) ) cd_cmd = "cd {}" . format ( project_dir ) sbt_cmd = "sbt -Dsbt.log.noformat=true -batch saveClasspath" if int ( self . vim . eval ( "has('nvim')" ) ) : import tempfile import re tmp_dir = tempfile . gettempdir ( ) flag_file = "{}/ensime-vim-classpath.flag" . format ( tmp_dir ) self . vim . command ( "echo 'Waiting for generation of classpath...'" ) if re . match ( ".+fish$" , self . vim . eval ( "&shell" ) ) : sbt_cmd += "; echo $status > {}" . format ( flag_file ) self . vim . command ( "terminal {}; and {}" . format ( cd_cmd , sbt_cmd ) ) else : sbt_cmd += "; echo $? > {}" . format ( flag_file ) self . vim . command ( "terminal ({} && {})" . format ( cd_cmd , sbt_cmd ) ) waiting_for_flag = True while waiting_for_flag : waiting_for_flag = not os . path . isfile ( flag_file ) if not waiting_for_flag : with open ( flag_file , "r" ) as f : rtcode = f . readline ( ) os . remove ( flag_file ) if rtcode and int ( rtcode ) != 0 : self . vim . command ( "echo 'Something wrong happened, check the " "execution log...'" ) return None else : time . sleep ( 0.2 ) else : self . vim . command ( "!({} && {})" . format ( cd_cmd , sbt_cmd ) ) success = self . reorder_classpath ( self . classpath_file ) if not success : self . vim . command ( "echo 'Classpath ordering failed.'" ) return True
Installs ENSIME server with a bootstrap sbt project and generates its classpath .
17,116
def reorder_classpath ( self , classpath_file ) : success = False with catch ( ( IOError , OSError ) ) : with open ( classpath_file , "r" ) as f : classpath = f . readline ( ) if classpath : units = classpath . split ( ":" ) reordered_units = [ ] for unit in units : if "monkeys" in unit : reordered_units . insert ( 0 , unit ) else : reordered_units . append ( unit ) reordered_classpath = ":" . join ( reordered_units ) with open ( classpath_file , "w" ) as f : f . write ( reordered_classpath ) success = True return success
Reorder classpath and put monkeys - jar in the first place .
17,117
def find_from ( path ) : realpath = os . path . realpath ( path ) config_path = os . path . join ( realpath , '.ensime' ) if os . path . isfile ( config_path ) : return config_path elif realpath == os . path . abspath ( '/' ) : return None else : dirname = os . path . dirname ( realpath ) return ProjectConfig . find_from ( dirname )
Find path of an . ensime config searching recursively upward from path .
17,118
def parse ( path ) : def paired ( iterable ) : cursor = iter ( iterable ) return zip ( cursor , cursor ) def unwrap_if_sexp_symbol ( datum ) : return datum . value ( ) if isinstance ( datum , sexpdata . Symbol ) else datum def sexp2dict ( sexps ) : newdict = { } for key , value in paired ( sexps ) : key = str ( unwrap_if_sexp_symbol ( key ) ) . lstrip ( ':' ) if isinstance ( value , list ) and value : if isinstance ( value [ 0 ] , list ) : newdict [ key ] = [ sexp2dict ( val ) for val in value ] elif isinstance ( value [ 0 ] , sexpdata . Symbol ) : newdict [ key ] = sexp2dict ( value ) else : newdict [ key ] = value else : newdict [ key ] = value return newdict conf = sexpdata . loads ( Util . read_file ( path ) ) return sexp2dict ( conf )
Parse an . ensime config file from S - expressions .
17,119
def from_acl_response ( acl_response ) : if 'read' in acl_response : read_acl = AclType . from_acl_response ( acl_response [ 'read' ] ) return Acl ( read_acl ) else : raise ValueError ( 'Response does not contain read ACL' )
Takes JSON response from API and converts to ACL object
17,120
def create ( self , acl = None ) : parent , name = getParentAndBase ( self . path ) json = { 'name' : name } if acl is not None : json [ 'acl' ] = acl . to_api_param ( ) response = self . client . postJsonHelper ( DataDirectory . _getUrl ( parent ) , json , False ) if ( response . status_code != 200 ) : raise DataApiError ( "Directory creation failed: " + str ( response . content ) )
Creates a directory optionally include Acl argument to set permissions
17,121
def get_permissions ( self ) : response = self . client . getHelper ( self . url , acl = 'true' ) if response . status_code != 200 : raise DataApiError ( 'Unable to get permissions:' + str ( response . content ) ) content = response . json ( ) if 'acl' in content : return Acl . from_acl_response ( content [ 'acl' ] ) else : return None
Returns permissions for this directory or None if it s a special collection such as . session or . algo
17,122
def _eight_byte_real ( value ) : if value == 0 : return b'\x00\x00\x00\x00\x00\x00\x00\x00' if value < 0 : byte1 = 0x80 value = - value else : byte1 = 0x00 fexp = numpy . log2 ( value ) / 4 exponent = int ( numpy . ceil ( fexp ) ) if fexp == exponent : exponent += 1 mantissa = int ( value * 16. ** ( 14 - exponent ) ) byte1 += exponent + 64 byte2 = ( mantissa // 281474976710656 ) short3 = ( mantissa % 281474976710656 ) // 4294967296 long4 = mantissa % 4294967296 return struct . pack ( ">HHL" , byte1 * 256 + byte2 , short3 , long4 )
Convert a number into the GDSII 8 byte real format .
17,123
def _eight_byte_real_to_float ( value ) : short1 , short2 , long3 = struct . unpack ( '>HHL' , value ) exponent = ( short1 & 0x7f00 ) // 256 - 64 mantissa = ( ( ( short1 & 0x00ff ) * 65536 + short2 ) * 4294967296 + long3 ) / 72057594037927936.0 if short1 & 0x8000 : return - mantissa * 16. ** exponent return mantissa * 16. ** exponent
Convert a number from GDSII 8 byte real format to float .
17,124
def slice ( objects , position , axis , precision = 1e-3 , layer = 0 , datatype = 0 ) : if not isinstance ( layer , list ) : layer = [ layer ] if not isinstance ( objects , list ) : objects = [ objects ] if not isinstance ( position , list ) : pos = [ position ] else : pos = sorted ( position ) result = [ [ ] for _ in range ( len ( pos ) + 1 ) ] polygons = [ ] for obj in objects : if isinstance ( obj , PolygonSet ) : polygons . extend ( obj . polygons ) elif isinstance ( obj , CellReference ) or isinstance ( obj , CellArray ) : polygons . extend ( obj . get_polygons ( ) ) else : polygons . append ( obj ) scaling = 1 / precision for pol in polygons : for r , p in zip ( result , clipper . _chop ( pol , pos , axis , scaling ) ) : r . extend ( p ) for i in range ( len ( result ) ) : result [ i ] = PolygonSet ( result [ i ] , layer [ i % len ( layer ) ] , datatype ) return result
Slice polygons and polygon sets at given positions along an axis .
17,125
def offset ( polygons , distance , join = 'miter' , tolerance = 2 , precision = 0.001 , join_first = False , max_points = 199 , layer = 0 , datatype = 0 ) : poly = [ ] if isinstance ( polygons , PolygonSet ) : poly . extend ( polygons . polygons ) elif isinstance ( polygons , CellReference ) or isinstance ( polygons , CellArray ) : poly . extend ( polygons . get_polygons ( ) ) else : for obj in polygons : if isinstance ( obj , PolygonSet ) : poly . extend ( obj . polygons ) elif isinstance ( obj , CellReference ) or isinstance ( obj , CellArray ) : poly . extend ( obj . get_polygons ( ) ) else : poly . append ( obj ) result = clipper . offset ( poly , distance , join , tolerance , 1 / precision , 1 if join_first else 0 ) return None if len ( result ) == 0 else PolygonSet ( result , layer , datatype , verbose = False ) . fracture ( max_points , precision )
Shrink or expand a polygon or polygon set .
17,126
def fast_boolean ( operandA , operandB , operation , precision = 0.001 , max_points = 199 , layer = 0 , datatype = 0 ) : polyA = [ ] polyB = [ ] for poly , obj in zip ( ( polyA , polyB ) , ( operandA , operandB ) ) : if isinstance ( obj , PolygonSet ) : poly . extend ( obj . polygons ) elif isinstance ( obj , CellReference ) or isinstance ( obj , CellArray ) : poly . extend ( obj . get_polygons ( ) ) elif obj is not None : for inobj in obj : if isinstance ( inobj , PolygonSet ) : poly . extend ( inobj . polygons ) elif isinstance ( inobj , CellReference ) or isinstance ( inobj , CellArray ) : poly . extend ( inobj . get_polygons ( ) ) else : poly . append ( inobj ) if len ( polyB ) == 0 : polyB . append ( polyA . pop ( ) ) result = clipper . clip ( polyA , polyB , operation , 1 / precision ) return None if len ( result ) == 0 else PolygonSet ( result , layer , datatype , verbose = False ) . fracture ( max_points , precision )
Execute any boolean operation between 2 polygons or polygon sets .
17,127
def inside ( points , polygons , short_circuit = 'any' , precision = 0.001 ) : poly = [ ] if isinstance ( polygons , PolygonSet ) : poly . extend ( polygons . polygons ) elif isinstance ( polygons , CellReference ) or isinstance ( polygons , CellArray ) : poly . extend ( polygons . get_polygons ( ) ) else : for obj in polygons : if isinstance ( obj , PolygonSet ) : poly . extend ( obj . polygons ) elif isinstance ( obj , CellReference ) or isinstance ( obj , CellArray ) : poly . extend ( obj . get_polygons ( ) ) else : poly . append ( obj ) if hasattr ( points [ 0 ] [ 0 ] , '__iter__' ) : pts = points sc = 1 if short_circuit == 'any' else - 1 else : pts = ( points , ) sc = 0 return clipper . inside ( pts , poly , sc , 1 / precision )
Test whether each of the points is within the given set of polygons .
17,128
def copy ( obj , dx , dy ) : newObj = libCopy . deepcopy ( obj ) newObj . translate ( dx , dy ) return newObj
Creates a copy of obj and translates the new object to a new location .
17,129
def write_gds ( outfile , cells = None , name = 'library' , unit = 1.0e-6 , precision = 1.0e-9 ) : current_library . name = name current_library . unit = unit current_library . precision = precision current_library . write_gds ( outfile , cells )
Write the current GDSII library to a file .
17,130
def gdsii_hash ( filename , engine = None ) : with open ( filename , 'rb' ) as fin : data = fin . read ( ) contents = [ ] start = pos = 0 while pos < len ( data ) : size , rec = struct . unpack ( '>HH' , data [ pos : pos + 4 ] ) if rec == 0x0502 : start = pos + 28 elif rec == 0x0700 : contents . append ( data [ start : pos ] ) pos += size h = hashlib . sha1 ( ) if engine is None else engine for x in sorted ( contents ) : h . update ( x ) return h . hexdigest ( )
Calculate the a hash value for a GDSII file .
17,131
def get_bounding_box ( self ) : if len ( self . polygons ) == 0 : return None return numpy . array ( ( ( min ( pts [ : , 0 ] . min ( ) for pts in self . polygons ) , min ( pts [ : , 1 ] . min ( ) for pts in self . polygons ) ) , ( max ( pts [ : , 0 ] . max ( ) for pts in self . polygons ) , max ( pts [ : , 1 ] . max ( ) for pts in self . polygons ) ) ) )
Returns the bounding box of the polygons .
17,132
def scale ( self , scalex , scaley = None , center = ( 0 , 0 ) ) : c0 = numpy . array ( center ) s = scalex if scaley is None else numpy . array ( ( scalex , scaley ) ) self . polygons = [ ( points - c0 ) * s + c0 for points in self . polygons ] return self
Scale this object .
17,133
def to_gds ( self , multiplier ) : data = [ ] for ii in range ( len ( self . polygons ) ) : if len ( self . polygons [ ii ] ) > 4094 : raise ValueError ( "[GDSPY] Polygons with more than 4094 are " "not supported by the GDSII format." ) data . append ( struct . pack ( '>10h' , 4 , 0x0800 , 6 , 0x0D02 , self . layers [ ii ] , 6 , 0x0E02 , self . datatypes [ ii ] , 12 + 8 * len ( self . polygons [ ii ] ) , 0x1003 ) ) data . extend ( struct . pack ( '>2l' , int ( round ( point [ 0 ] * multiplier ) ) , int ( round ( point [ 1 ] * multiplier ) ) ) for point in self . polygons [ ii ] ) data . append ( struct . pack ( '>2l2h' , int ( round ( self . polygons [ ii ] [ 0 ] [ 0 ] * multiplier ) ) , int ( round ( self . polygons [ ii ] [ 0 ] [ 1 ] * multiplier ) ) , 4 , 0x1100 ) ) return b'' . join ( data )
Convert this object to a series of GDSII elements .
17,134
def fracture ( self , max_points = 199 , precision = 1e-3 ) : if max_points > 4 : ii = 0 while ii < len ( self . polygons ) : if len ( self . polygons [ ii ] ) > max_points : pts0 = sorted ( self . polygons [ ii ] [ : , 0 ] ) pts1 = sorted ( self . polygons [ ii ] [ : , 1 ] ) ncuts = len ( pts0 ) // max_points if pts0 [ - 1 ] - pts0 [ 0 ] > pts1 [ - 1 ] - pts1 [ 0 ] : cuts = [ pts0 [ int ( i * len ( pts0 ) / ( ncuts + 1.0 ) + 0.5 ) ] for i in range ( 1 , ncuts + 1 ) ] chopped = clipper . _chop ( self . polygons [ ii ] , cuts , 0 , 1 / precision ) else : cuts = [ pts1 [ int ( i * len ( pts1 ) / ( ncuts + 1.0 ) + 0.5 ) ] for i in range ( 1 , ncuts + 1 ) ] chopped = clipper . _chop ( self . polygons [ ii ] , cuts , 1 , 1 / precision ) self . polygons . pop ( ii ) layer = self . layers . pop ( ii ) datatype = self . datatypes . pop ( ii ) self . polygons . extend ( numpy . array ( x ) for x in itertools . chain . from_iterable ( chopped ) ) npols = sum ( len ( c ) for c in chopped ) self . layers . extend ( layer for _ in range ( npols ) ) self . datatypes . extend ( datatype for _ in range ( npols ) ) else : ii += 1 return self
Slice these polygons in the horizontal and vertical directions so that each resulting piece has at most max_points . This operation occurs in place .
17,135
def translate ( self , dx , dy ) : vec = numpy . array ( ( dx , dy ) ) self . polygons = [ points + vec for points in self . polygons ] return self
Move the polygons from one place to another
17,136
def to_gds ( self , multiplier ) : text = self . text if len ( text ) % 2 != 0 : text = text + '\0' data = struct . pack ( '>11h' , 4 , 0x0C00 , 6 , 0x0D02 , self . layer , 6 , 0x1602 , self . texttype , 6 , 0x1701 , self . anchor ) if ( self . rotation is not None ) or ( self . magnification is not None ) or self . x_reflection : word = 0 values = b'' if self . x_reflection : word += 0x8000 if not ( self . magnification is None ) : values += struct . pack ( '>2h' , 12 , 0x1B05 ) + _eight_byte_real ( self . magnification ) if not ( self . rotation is None ) : values += struct . pack ( '>2h' , 12 , 0x1C05 ) + _eight_byte_real ( self . rotation ) data += struct . pack ( '>2hH' , 6 , 0x1A01 , word ) + values return data + struct . pack ( '>2h2l2h' , 12 , 0x1003 , int ( round ( self . position [ 0 ] * multiplier ) ) , int ( round ( self . position [ 1 ] * multiplier ) ) , 4 + len ( text ) , 0x1906 ) + text . encode ( 'ascii' ) + struct . pack ( '>2h' , 4 , 0x1100 )
Convert this label to a GDSII structure .
17,137
def translate ( self , dx , dy ) : self . position = numpy . array ( ( dx + self . position [ 0 ] , dy + self . position [ 1 ] ) ) return self
Move the text from one place to another
17,138
def to_gds ( self , multiplier , timestamp = None ) : now = datetime . datetime . today ( ) if timestamp is None else timestamp name = self . name if len ( name ) % 2 != 0 : name = name + '\0' return struct . pack ( '>16h' , 28 , 0x0502 , now . year , now . month , now . day , now . hour , now . minute , now . second , now . year , now . month , now . day , now . hour , now . minute , now . second , 4 + len ( name ) , 0x0606 ) + name . encode ( 'ascii' ) + b'' . join ( element . to_gds ( multiplier ) for element in self . elements ) + b'' . join ( label . to_gds ( multiplier ) for label in self . labels ) + struct . pack ( '>2h' , 4 , 0x0700 )
Convert this cell to a GDSII structure .
17,139
def copy ( self , name , exclude_from_current = False , deep_copy = False ) : new_cell = Cell ( name , exclude_from_current ) if deep_copy : new_cell . elements = libCopy . deepcopy ( self . elements ) new_cell . labels = libCopy . deepcopy ( self . labels ) for ref in new_cell . get_dependencies ( True ) : if ref . _bb_valid : ref . _bb_valid = False else : new_cell . elements = list ( self . elements ) new_cell . labels = list ( self . labels ) return new_cell
Creates a copy of this cell .
17,140
def add ( self , element ) : if isinstance ( element , list ) : for e in element : if isinstance ( e , Label ) : self . labels . append ( e ) else : self . elements . append ( e ) else : if isinstance ( element , Label ) : self . labels . append ( element ) else : self . elements . append ( element ) self . _bb_valid = False return self
Add a new element or list of elements to this cell .
17,141
def remove_polygons ( self , test ) : empty = [ ] for element in self . elements : if isinstance ( element , PolygonSet ) : ii = 0 while ii < len ( element . polygons ) : if test ( element . polygons [ ii ] , element . layers [ ii ] , element . datatypes [ ii ] ) : element . polygons . pop ( ii ) element . layers . pop ( ii ) element . datatypes . pop ( ii ) else : ii += 1 if len ( element . polygons ) == 0 : empty . append ( element ) for element in empty : self . elements . remove ( element ) return self
Remove polygons from this cell .
17,142
def remove_labels ( self , test ) : ii = 0 while ii < len ( self . labels ) : if test ( self . labels [ ii ] ) : self . labels . pop ( ii ) else : ii += 1 return self
Remove labels from this cell .
17,143
def area ( self , by_spec = False ) : if by_spec : cell_area = { } for element in self . elements : element_area = element . area ( True ) for ll in element_area . keys ( ) : if ll in cell_area : cell_area [ ll ] += element_area [ ll ] else : cell_area [ ll ] = element_area [ ll ] else : cell_area = 0 for element in self . elements : cell_area += element . area ( ) return cell_area
Calculate the total area of the elements on this cell including cell references and arrays .
17,144
def get_layers ( self ) : layers = set ( ) for element in self . elements : if isinstance ( element , PolygonSet ) : layers . update ( element . layers ) elif isinstance ( element , CellReference ) or isinstance ( element , CellArray ) : layers . update ( element . ref_cell . get_layers ( ) ) for label in self . labels : layers . add ( label . layer ) return layers
Returns a set of layers in this cell .
17,145
def get_datatypes ( self ) : datatypes = set ( ) for element in self . elements : if isinstance ( element , PolygonSet ) : datatypes . update ( element . datatypes ) elif isinstance ( element , CellReference ) or isinstance ( element , CellArray ) : datatypes . update ( element . ref_cell . get_datatypes ( ) ) return datatypes
Returns a set of datatypes in this cell .
17,146
def get_bounding_box ( self ) : if len ( self . elements ) == 0 : return None if not ( self . _bb_valid and all ( ref . _bb_valid for ref in self . get_dependencies ( True ) ) ) : bb = numpy . array ( ( ( 1e300 , 1e300 ) , ( - 1e300 , - 1e300 ) ) ) all_polygons = [ ] for element in self . elements : if isinstance ( element , PolygonSet ) : all_polygons . extend ( element . polygons ) elif isinstance ( element , CellReference ) or isinstance ( element , CellArray ) : element_bb = element . get_bounding_box ( ) if element_bb is not None : bb [ 0 , 0 ] = min ( bb [ 0 , 0 ] , element_bb [ 0 , 0 ] ) bb [ 0 , 1 ] = min ( bb [ 0 , 1 ] , element_bb [ 0 , 1 ] ) bb [ 1 , 0 ] = max ( bb [ 1 , 0 ] , element_bb [ 1 , 0 ] ) bb [ 1 , 1 ] = max ( bb [ 1 , 1 ] , element_bb [ 1 , 1 ] ) if len ( all_polygons ) > 0 : all_points = numpy . concatenate ( all_polygons ) . transpose ( ) bb [ 0 , 0 ] = min ( bb [ 0 , 0 ] , all_points [ 0 ] . min ( ) ) bb [ 0 , 1 ] = min ( bb [ 0 , 1 ] , all_points [ 1 ] . min ( ) ) bb [ 1 , 0 ] = max ( bb [ 1 , 0 ] , all_points [ 0 ] . max ( ) ) bb [ 1 , 1 ] = max ( bb [ 1 , 1 ] , all_points [ 1 ] . max ( ) ) self . _bb_valid = True _bounding_boxes [ self ] = bb return _bounding_boxes [ self ]
Returns the bounding box for this cell .
17,147
def get_polygons ( self , by_spec = False , depth = None ) : if depth is not None and depth < 0 : bb = self . get_bounding_box ( ) if bb is None : return { } if by_spec else [ ] pts = [ numpy . array ( [ ( bb [ 0 , 0 ] , bb [ 0 , 1 ] ) , ( bb [ 0 , 0 ] , bb [ 1 , 1 ] ) , ( bb [ 1 , 0 ] , bb [ 1 , 1 ] ) , ( bb [ 1 , 0 ] , bb [ 0 , 1 ] ) ] ) ] polygons = { self . name : pts } if by_spec else pts else : if by_spec : polygons = { } for element in self . elements : if isinstance ( element , PolygonSet ) : for ii in range ( len ( element . polygons ) ) : key = ( element . layers [ ii ] , element . datatypes [ ii ] ) if key in polygons : polygons [ key ] . append ( numpy . array ( element . polygons [ ii ] ) ) else : polygons [ key ] = [ numpy . array ( element . polygons [ ii ] ) ] else : cell_polygons = element . get_polygons ( True , None if depth is None else depth - 1 ) for kk in cell_polygons . keys ( ) : if kk in polygons : polygons [ kk ] . extend ( cell_polygons [ kk ] ) else : polygons [ kk ] = cell_polygons [ kk ] else : polygons = [ ] for element in self . elements : if isinstance ( element , PolygonSet ) : for points in element . polygons : polygons . append ( numpy . array ( points ) ) else : polygons . extend ( element . get_polygons ( depth = None if depth is None else depth - 1 ) ) return polygons
Returns a list of polygons in this cell .
17,148
def get_labels ( self , depth = None ) : labels = libCopy . deepcopy ( self . labels ) if depth is None or depth > 0 : for element in self . elements : if isinstance ( element , CellReference ) : labels . extend ( element . get_labels ( None if depth is None else depth - 1 ) ) elif isinstance ( element , CellArray ) : labels . extend ( element . get_labels ( None if depth is None else depth - 1 ) ) return labels
Returns a list with a copy of the labels in this cell .
17,149
def get_dependencies ( self , recursive = False ) : dependencies = set ( ) for element in self . elements : if isinstance ( element , CellReference ) or isinstance ( element , CellArray ) : if recursive : dependencies . update ( element . ref_cell . get_dependencies ( True ) ) dependencies . add ( element . ref_cell ) return dependencies
Returns a list of the cells included in this cell as references .
17,150
def flatten ( self , single_layer = None , single_datatype = None , single_texttype = None ) : self . labels = self . get_labels ( ) if single_layer is not None : for lbl in self . labels : lbl . layer = single_layer if single_texttype is not None : for lbl in self . labels : lbl . texttype = single_texttype if single_layer is None or single_datatype is None : poly_dic = self . get_polygons ( True ) self . elements = [ ] if single_layer is None and single_datatype is None : for ld in poly_dic . keys ( ) : self . add ( PolygonSet ( poly_dic [ ld ] , * ld , verbose = False ) ) elif single_layer is None : for ld in poly_dic . keys ( ) : self . add ( PolygonSet ( poly_dic [ ld ] , ld [ 0 ] , single_datatype , verbose = False ) ) else : for ld in poly_dic . keys ( ) : self . add ( PolygonSet ( poly_dic [ ld ] , single_layer , ld [ 1 ] , verbose = False ) ) else : polygons = self . get_polygons ( ) self . elements = [ ] self . add ( PolygonSet ( polygons , single_layer , single_datatype , verbose = False ) ) return self
Flatten all CellReference and CellArray elements in this cell into real polygons and labels instead of references .
17,151
def area ( self , by_spec = False ) : if not isinstance ( self . ref_cell , Cell ) : return dict ( ) if by_spec else 0 if self . magnification is None : return self . ref_cell . area ( by_spec ) else : if by_spec : factor = self . magnification ** 2 cell_area = self . ref_cell . area ( True ) for kk in cell_area . keys ( ) : cell_area [ kk ] *= factor return cell_area else : return self . ref_cell . area ( ) * self . magnification ** 2
Calculate the total area of the referenced cell with the magnification factor included .
17,152
def get_bounding_box ( self ) : if not isinstance ( self . ref_cell , Cell ) : return None if ( self . rotation is None and self . magnification is None and self . x_reflection is None ) : key = self else : key = ( self . ref_cell , self . rotation , self . magnification , self . x_reflection ) deps = self . ref_cell . get_dependencies ( True ) if not ( self . ref_cell . _bb_valid and all ( ref . _bb_valid for ref in deps ) and key in _bounding_boxes ) : for ref in deps : ref . get_bounding_box ( ) self . ref_cell . get_bounding_box ( ) tmp = self . origin self . origin = None polygons = self . get_polygons ( ) self . origin = tmp if len ( polygons ) == 0 : bb = None else : all_points = numpy . concatenate ( polygons ) . transpose ( ) bb = numpy . array ( ( ( all_points [ 0 ] . min ( ) , all_points [ 1 ] . min ( ) ) , ( all_points [ 0 ] . max ( ) , all_points [ 1 ] . max ( ) ) ) ) _bounding_boxes [ key ] = bb else : bb = _bounding_boxes [ key ] if self . origin is None or bb is None : return bb else : return bb + numpy . array ( ( ( self . origin [ 0 ] , self . origin [ 1 ] ) , ( self . origin [ 0 ] , self . origin [ 1 ] ) ) )
Returns the bounding box for this reference .
17,153
def translate ( self , dx , dy ) : self . origin = ( self . origin [ 0 ] + dx , self . origin [ 1 ] + dy ) return self
Move the reference from one place to another
17,154
def add ( self , cell , overwrite_duplicate = False ) : if isinstance ( cell , Cell ) : if ( not overwrite_duplicate and cell . name in self . cell_dict and self . cell_dict [ cell . name ] is not cell ) : raise ValueError ( "[GDSPY] cell named {0} already present in " "library." . format ( cell . name ) ) self . cell_dict [ cell . name ] = cell else : for c in cell : if ( not overwrite_duplicate and c . name in self . cell_dict and self . cell_dict [ c . name ] is not c ) : raise ValueError ( "[GDSPY] cell named {0} already present " "in library." . format ( c . name ) ) self . cell_dict [ c . name ] = c return self
Add one or more cells to the library .
17,155
def write_gds ( self , outfile , cells = None , timestamp = None ) : if isinstance ( outfile , basestring ) : outfile = open ( outfile , 'wb' ) close = True else : close = False now = datetime . datetime . today ( ) if timestamp is None else timestamp name = self . name if len ( self . name ) % 2 == 0 else ( self . name + '\0' ) outfile . write ( struct . pack ( '>19h' , 6 , 0x0002 , 0x0258 , 28 , 0x0102 , now . year , now . month , now . day , now . hour , now . minute , now . second , now . year , now . month , now . day , now . hour , now . minute , now . second , 4 + len ( name ) , 0x0206 ) + name . encode ( 'ascii' ) + struct . pack ( '>2h' , 20 , 0x0305 ) + _eight_byte_real ( self . precision / self . unit ) + _eight_byte_real ( self . precision ) ) if cells is None : cells = self . cell_dict . values ( ) else : cells = [ self . cell_dict . get ( c , c ) for c in cells ] for cell in cells : outfile . write ( cell . to_gds ( self . unit / self . precision ) ) outfile . write ( struct . pack ( '>2h' , 4 , 0x0400 ) ) if close : outfile . close ( )
Write the GDSII library to a file .
17,156
def _read_record ( self , stream ) : header = stream . read ( 4 ) if len ( header ) < 4 : return None size , rec_type = struct . unpack ( '>HH' , header ) data_type = ( rec_type & 0x00ff ) rec_type = rec_type // 256 data = None if size > 4 : if data_type == 0x01 : data = numpy . array ( struct . unpack ( '>{0}H' . format ( ( size - 4 ) // 2 ) , stream . read ( size - 4 ) ) , dtype = 'uint' ) elif data_type == 0x02 : data = numpy . array ( struct . unpack ( '>{0}h' . format ( ( size - 4 ) // 2 ) , stream . read ( size - 4 ) ) , dtype = 'int' ) elif data_type == 0x03 : data = numpy . array ( struct . unpack ( '>{0}l' . format ( ( size - 4 ) // 4 ) , stream . read ( size - 4 ) ) , dtype = 'int' ) elif data_type == 0x05 : data = numpy . array ( [ _eight_byte_real_to_float ( stream . read ( 8 ) ) for _ in range ( ( size - 4 ) // 8 ) ] ) else : data = stream . read ( size - 4 ) if str is not bytes : if data [ - 1 ] == 0 : data = data [ : - 1 ] . decode ( 'ascii' ) else : data = data . decode ( 'ascii' ) elif data [ - 1 ] == '\0' : data = data [ : - 1 ] return [ rec_type , data ]
Read a complete record from a GDSII stream file .
17,157
def extract ( self , cell ) : cell = self . cell_dict . get ( cell , cell ) current_library . add ( cell ) current_library . add ( cell . get_dependencies ( True ) ) return cell
Extract a cell from the this GDSII file and include it in the current global library including referenced dependencies .
17,158
def top_level ( self ) : top = list ( self . cell_dict . values ( ) ) for cell in self . cell_dict . values ( ) : for dependency in cell . get_dependencies ( ) : if dependency in top : top . remove ( dependency ) return top
Output the top level cells from the GDSII data .
17,159
def write_cell ( self , cell ) : self . _outfile . write ( cell . to_gds ( self . _res ) ) return self
Write the specified cell to the file .
17,160
def close ( self ) : self . _outfile . write ( struct . pack ( '>2h' , 4 , 0x0400 ) ) if self . _close : self . _outfile . close ( )
Finalize the GDSII stream library .
17,161
def waveguide ( path , points , finish , bend_radius , number_of_points = 0.01 , direction = None , layer = 0 , datatype = 0 ) : if direction is not None : path . direction = direction axis = 0 if path . direction [ 1 ] == 'x' else 1 points . append ( finish [ ( axis + len ( points ) ) % 2 ] ) n = len ( points ) if points [ 0 ] > ( path . x , path . y ) [ axis ] : path . direction = [ '+x' , '+y' ] [ axis ] else : path . direction = [ '-x' , '-y' ] [ axis ] for i in range ( n ) : path . segment ( abs ( points [ i ] - ( path . x , path . y ) [ axis ] ) - bend_radius , layer = layer , datatype = datatype ) axis = 1 - axis if i < n - 1 : goto = points [ i + 1 ] else : goto = finish [ axis ] if ( goto > ( path . x , path . y ) [ axis ] ) ^ ( ( path . direction [ 0 ] == '+' ) ^ ( path . direction [ 1 ] == 'x' ) ) : bend = 'l' else : bend = 'r' path . turn ( bend_radius , bend , number_of_points = number_of_points , layer = layer , datatype = datatype ) return path . segment ( abs ( finish [ axis ] - ( path . x , path . y ) [ axis ] ) , layer = layer , datatype = datatype )
Easy waveguide creation tool with absolute positioning .
17,162
def taper ( path , length , final_width , final_distance , direction = None , layer = 0 , datatype = 0 ) : if layer . __class__ == datatype . __class__ == [ ] . __class__ : assert len ( layer ) == len ( datatype ) elif isinstance ( layer , int ) and isinstance ( datatype , int ) : layer = [ layer ] datatype = [ datatype ] else : raise ValueError ( 'Parameters layer and datatype must have the same ' 'type (either int or list) and length.' ) n = len ( layer ) w = numpy . linspace ( 2 * path . w , final_width , n + 1 ) [ 1 : ] d = numpy . linspace ( path . distance , final_distance , n + 1 ) [ 1 : ] l = float ( length ) / n for i in range ( n ) : path . segment ( l , direction , w [ i ] , d [ i ] , layer = layer [ i ] , datatype = datatype [ i ] ) return path
Linear tapers for the lazy .
17,163
def grating ( period , number_of_teeth , fill_frac , width , position , direction , lda = 1 , sin_theta = 0 , focus_distance = - 1 , focus_width = - 1 , evaluations = 99 , layer = 0 , datatype = 0 ) : if focus_distance < 0 : path = gdspy . L1Path ( ( position [ 0 ] - 0.5 * width , position [ 1 ] + 0.5 * ( number_of_teeth - 1 + fill_frac ) * period ) , '+x' , period * fill_frac , [ width ] , [ ] , number_of_teeth , period , layer = layer , datatype = datatype ) else : neff = lda / float ( period ) + sin_theta qmin = int ( focus_distance / float ( period ) + 0.5 ) path = gdspy . Path ( period * fill_frac , position ) max_points = 199 if focus_width < 0 else 2 * evaluations c3 = neff ** 2 - sin_theta ** 2 w = 0.5 * width for q in range ( qmin , qmin + number_of_teeth ) : c1 = q * lda * sin_theta c2 = ( q * lda ) ** 2 path . parametric ( lambda t : ( width * t - w , ( c1 + neff * numpy . sqrt ( c2 - c3 * ( width * t - w ) ** 2 ) ) / c3 ) , number_of_evaluations = evaluations , max_points = max_points , layer = layer , datatype = datatype ) path . x = position [ 0 ] path . y = position [ 1 ] if focus_width >= 0 : path . polygons [ 0 ] = numpy . vstack ( ( path . polygons [ 0 ] [ : evaluations , : ] , ( [ position ] if focus_width == 0 else [ ( position [ 0 ] + 0.5 * focus_width , position [ 1 ] ) , ( position [ 0 ] - 0.5 * focus_width , position [ 1 ] ) ] ) ) ) path . fracture ( ) if direction == '-x' : return path . rotate ( 0.5 * numpy . pi , position ) elif direction == '+x' : return path . rotate ( - 0.5 * numpy . pi , position ) elif direction == '-y' : return path . rotate ( numpy . pi , position ) else : return path
Straight or focusing grating .
17,164
def render_pdf ( html , stylesheets = None , download_filename = None , automatic_download = True ) : if not hasattr ( html , 'write_pdf' ) : html = HTML ( html ) pdf = html . write_pdf ( stylesheets = stylesheets ) response = current_app . response_class ( pdf , mimetype = 'application/pdf' ) if download_filename : if automatic_download : value = 'attachment' else : value = 'inline' response . headers . add ( 'Content-Disposition' , value , filename = download_filename ) return response
Render a PDF to a response with the correct Content - Type header .
17,165
def _inject_args ( sig , types ) : if '(' in sig : parts = sig . split ( '(' ) sig = '%s(%s%s%s' % ( parts [ 0 ] , ', ' . join ( types ) , ( ', ' if parts [ 1 ] . index ( ')' ) > 0 else '' ) , parts [ 1 ] ) else : sig = '%s(%s)' % ( sig , ', ' . join ( types ) ) return sig
A function to inject arguments manually into a method signature before it s been parsed . If using keyword arguments use kw = type instead in the types array .
17,166
def jsonrpc_method ( name , authenticated = False , authentication_arguments = [ 'username' , 'password' ] , safe = False , validate = False , site = default_site ) : def decorator ( func ) : arg_names = getargspec ( func ) [ 0 ] [ 1 : ] X = { 'name' : name , 'arg_names' : arg_names } if authenticated : if authenticated is True or six . callable ( authenticated ) : X [ 'arg_names' ] = authentication_arguments + X [ 'arg_names' ] X [ 'name' ] = _inject_args ( X [ 'name' ] , ( 'String' , 'String' ) ) from django . contrib . auth import authenticate as _authenticate from django . contrib . auth . models import User else : authenticate = authenticated @ six . wraps ( func ) def _func ( request , * args , ** kwargs ) : user = getattr ( request , 'user' , None ) is_authenticated = getattr ( user , 'is_authenticated' , lambda : False ) if ( ( user is not None and six . callable ( is_authenticated ) and not is_authenticated ( ) ) or user is None ) : user = None try : creds = args [ : len ( authentication_arguments ) ] if len ( creds ) == 0 : raise IndexError user = _authenticate ( username = creds [ 0 ] , password = creds [ 1 ] , * creds [ 2 : ] ) if user is not None : args = args [ len ( authentication_arguments ) : ] except IndexError : auth_kwargs = { } try : for auth_kwarg in authentication_arguments : auth_kwargs [ auth_kwarg ] = kwargs [ auth_kwarg ] except KeyError : raise InvalidParamsError ( 'Authenticated methods require at least ' '[%(arguments)s] or {%(arguments)s} arguments' % { 'arguments' : ', ' . join ( authentication_arguments ) } ) user = _authenticate ( ** auth_kwargs ) if user is not None : for auth_kwarg in authentication_arguments : kwargs . pop ( auth_kwarg ) if user is None : raise InvalidCredentialsError request . user = user return func ( request , * args , ** kwargs ) else : _func = func @ six . wraps ( _func ) def exc_printer ( * a , ** kw ) : try : return _func ( * a , ** kw ) except Exception as e : try : print ( 'JSONRPC SERVICE EXCEPTION' ) import traceback traceback . print_exc ( ) except : pass six . reraise ( * sys . exc_info ( ) ) ret_func = exc_printer method , arg_types , return_type = _parse_sig ( X [ 'name' ] , X [ 'arg_names' ] , validate ) ret_func . json_args = X [ 'arg_names' ] ret_func . json_arg_types = arg_types ret_func . json_return_type = return_type ret_func . json_method = method ret_func . json_safe = safe ret_func . json_sig = X [ 'name' ] ret_func . json_validate = validate site . register ( method , ret_func ) return ret_func return decorator
Wraps a function turns it into a json - rpc method . Adds several attributes to the function specific to the JSON - RPC machinery and adds it to the default jsonrpc_site if one isn t provided . You must import the module containing these functions in your urls . py .
17,167
def send_payload ( self , params ) : data = dumps ( { 'jsonrpc' : self . version , 'method' : self . service_name , 'params' : params , 'id' : str ( uuid . uuid1 ( ) ) } ) . encode ( 'utf-8' ) headers = { 'Content-Type' : 'application/json-rpc' , 'Accept' : 'application/json-rpc' , 'Content-Length' : len ( data ) } try : req = urllib_request . Request ( self . service_url , data , headers ) resp = urllib_request . urlopen ( req ) except IOError as e : if isinstance ( e , urllib_error . HTTPError ) : if e . code not in ( 401 , 403 ) and e . headers [ 'Content-Type' ] == 'application/json-rpc' : return e . read ( ) . decode ( 'utf-8' ) raise ServiceProxyException ( e . code , e . headers , req ) else : raise e return resp . read ( ) . decode ( 'utf-8' )
Performs the actual sending action and returns the result
17,168
def json_rpc_format ( self ) : error = { 'name' : smart_text ( self . __class__ . __name__ ) , 'code' : self . code , 'message' : "%s: %s" % ( smart_text ( self . __class__ . __name__ ) , smart_text ( self . message ) ) , 'data' : self . data } from django . conf import settings if settings . DEBUG : import sys , traceback error [ 'stack' ] = traceback . format_exc ( ) error [ 'executable' ] = sys . executable return error
return the Exception data in a format for JSON - RPC
17,169
def directory ( self , query , ** kwargs ) : if isinstance ( query , dict ) : query = str ( query ) . replace ( "'" , '"' ) return self . __call_api_get ( 'directory' , query = query , kwargs = kwargs )
Search by users or channels on all server .
17,170
def spotlight ( self , query , ** kwargs ) : return self . __call_api_get ( 'spotlight' , query = query , kwargs = kwargs )
Searches for users or rooms that are visible to the user .
17,171
def users_get_presence ( self , user_id = None , username = None , ** kwargs ) : if user_id : return self . __call_api_get ( 'users.getPresence' , userId = user_id , kwargs = kwargs ) elif username : return self . __call_api_get ( 'users.getPresence' , username = username , kwargs = kwargs ) else : raise RocketMissingParamException ( 'userID or username required' )
Gets the online presence of the a user .
17,172
def users_create ( self , email , name , password , username , ** kwargs ) : return self . __call_api_post ( 'users.create' , email = email , name = name , password = password , username = username , kwargs = kwargs )
Creates a user
17,173
def users_create_token ( self , user_id = None , username = None , ** kwargs ) : if user_id : return self . __call_api_post ( 'users.createToken' , userId = user_id , kwargs = kwargs ) elif username : return self . __call_api_post ( 'users.createToken' , username = username , kwargs = kwargs ) else : raise RocketMissingParamException ( 'userID or username required' )
Create a user authentication token .
17,174
def users_forgot_password ( self , email , ** kwargs ) : return self . __call_api_post ( 'users.forgotPassword' , email = email , data = kwargs )
Send email to reset your password .
17,175
def chat_post_message ( self , text , room_id = None , channel = None , ** kwargs ) : if room_id : return self . __call_api_post ( 'chat.postMessage' , roomId = room_id , text = text , kwargs = kwargs ) elif channel : return self . __call_api_post ( 'chat.postMessage' , channel = channel , text = text , kwargs = kwargs ) else : raise RocketMissingParamException ( 'roomId or channel required' )
Posts a new chat message .
17,176
def chat_delete ( self , room_id , msg_id , ** kwargs ) : return self . __call_api_post ( 'chat.delete' , roomId = room_id , msgId = msg_id , kwargs = kwargs )
Deletes a chat message .
17,177
def chat_search ( self , room_id , search_text , ** kwargs ) : return self . __call_api_get ( 'chat.search' , roomId = room_id , searchText = search_text , kwargs = kwargs )
Search for messages in a channel by id and text message .
17,178
def chat_get_message_read_receipts ( self , message_id , ** kwargs ) : return self . __call_api_get ( 'chat.getMessageReadReceipts' , messageId = message_id , kwargs = kwargs )
Get Message Read Receipts
17,179
def channels_history ( self , room_id , ** kwargs ) : return self . __call_api_get ( 'channels.history' , roomId = room_id , kwargs = kwargs )
Retrieves the messages from a channel .
17,180
def channels_add_all ( self , room_id , ** kwargs ) : return self . __call_api_post ( 'channels.addAll' , roomId = room_id , kwargs = kwargs )
Adds all of the users of the Rocket . Chat server to the channel .
17,181
def channels_add_moderator ( self , room_id , user_id , ** kwargs ) : return self . __call_api_post ( 'channels.addModerator' , roomId = room_id , userId = user_id , kwargs = kwargs )
Gives the role of moderator for a user in the current channel .
17,182
def channels_remove_moderator ( self , room_id , user_id , ** kwargs ) : return self . __call_api_post ( 'channels.removeModerator' , roomId = room_id , userId = user_id , kwargs = kwargs )
Removes the role of moderator from a user in the current channel .
17,183
def channels_add_owner ( self , room_id , user_id = None , username = None , ** kwargs ) : if user_id : return self . __call_api_post ( 'channels.addOwner' , roomId = room_id , userId = user_id , kwargs = kwargs ) elif username : return self . __call_api_post ( 'channels.addOwner' , roomId = room_id , username = username , kwargs = kwargs ) else : raise RocketMissingParamException ( 'userID or username required' )
Gives the role of owner for a user in the current channel .
17,184
def channels_remove_owner ( self , room_id , user_id , ** kwargs ) : return self . __call_api_post ( 'channels.removeOwner' , roomId = room_id , userId = user_id , kwargs = kwargs )
Removes the role of owner from a user in the current channel .
17,185
def channels_archive ( self , room_id , ** kwargs ) : return self . __call_api_post ( 'channels.archive' , roomId = room_id , kwargs = kwargs )
Archives a channel .
17,186
def channels_create ( self , name , ** kwargs ) : return self . __call_api_post ( 'channels.create' , name = name , kwargs = kwargs )
Creates a new public channel optionally including users .
17,187
def channels_get_integrations ( self , room_id , ** kwargs ) : return self . __call_api_get ( 'channels.getIntegrations' , roomId = room_id , kwargs = kwargs )
Retrieves the integrations which the channel has
17,188
def channels_kick ( self , room_id , user_id , ** kwargs ) : return self . __call_api_post ( 'channels.kick' , roomId = room_id , userId = user_id , kwargs = kwargs )
Removes a user from the channel .
17,189
def channels_leave ( self , room_id , ** kwargs ) : return self . __call_api_post ( 'channels.leave' , roomId = room_id , kwargs = kwargs )
Causes the callee to be removed from the channel .
17,190
def channels_rename ( self , room_id , name , ** kwargs ) : return self . __call_api_post ( 'channels.rename' , roomId = room_id , name = name , kwargs = kwargs )
Changes the name of the channel .
17,191
def channels_set_description ( self , room_id , description , ** kwargs ) : return self . __call_api_post ( 'channels.setDescription' , roomId = room_id , description = description , kwargs = kwargs )
Sets the description for the channel .
17,192
def channels_set_join_code ( self , room_id , join_code , ** kwargs ) : return self . __call_api_post ( 'channels.setJoinCode' , roomId = room_id , joinCode = join_code , kwargs = kwargs )
Sets the code required to join the channel .
17,193
def channels_set_topic ( self , room_id , topic , ** kwargs ) : return self . __call_api_post ( 'channels.setTopic' , roomId = room_id , topic = topic , kwargs = kwargs )
Sets the topic for the channel .
17,194
def channels_set_type ( self , room_id , a_type , ** kwargs ) : return self . __call_api_post ( 'channels.setType' , roomId = room_id , type = a_type , kwargs = kwargs )
Sets the type of room this channel should be . The type of room this channel should be either c or p .
17,195
def channels_set_announcement ( self , room_id , announce , ** kwargs ) : return self . __call_api_post ( 'channels.setAnnouncement' , roomId = room_id , announcement = announce , kwargs = kwargs )
Sets the announcement for the channel .
17,196
def channels_set_custom_fields ( self , rid , custom_fields ) : return self . __call_api_post ( 'channels.setCustomFields' , roomId = rid , customFields = custom_fields )
Sets the custom fields for the channel .
17,197
def channels_delete ( self , room_id = None , channel = None , ** kwargs ) : if room_id : return self . __call_api_post ( 'channels.delete' , roomId = room_id , kwargs = kwargs ) elif channel : return self . __call_api_post ( 'channels.delete' , roomName = channel , kwargs = kwargs ) else : raise RocketMissingParamException ( 'roomId or channel required' )
Delete a public channel .
17,198
def channels_get_all_user_mentions_by_channel ( self , room_id , ** kwargs ) : return self . __call_api_get ( 'channels.getAllUserMentionsByChannel' , roomId = room_id , kwargs = kwargs )
Gets all the mentions of a channel .
17,199
def groups_history ( self , room_id , ** kwargs ) : return self . __call_api_get ( 'groups.history' , roomId = room_id , kwargs = kwargs )
Retrieves the messages from a private group .