idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
44,700
def from_poppy_creature ( cls , poppy , motors , passiv , tip , reversed_motors = [ ] ) : chain_elements = get_chain_from_joints ( poppy . urdf_file , [ m . name for m in motors ] ) activ = [ False ] + [ m not in passiv for m in motors ] + [ True ] chain = cls . from_urdf_file ( poppy . urdf_file , base_elements = chain_elements , last_link_vector = tip , active_links_mask = activ ) chain . motors = [ getattr ( poppy , l . name ) for l in chain . links [ 1 : - 1 ] ] for m , l in zip ( chain . motors , chain . links [ 1 : - 1 ] ) : m . angle_limit bounds = m . __dict__ [ 'lower_limit' ] , m . __dict__ [ 'upper_limit' ] l . bounds = tuple ( map ( rad2deg , bounds ) ) chain . _reversed = array ( [ ( - 1 if m in reversed_motors else 1 ) for m in motors ] ) return chain
Creates an kinematic chain from motors of a Poppy Creature .
44,701
def goto ( self , position , duration , wait = False , accurate = False ) : if len ( position ) != 3 : raise ValueError ( 'Position should be a list [x, y, z]!' ) M = eye ( 4 ) M [ : 3 , 3 ] = position self . _goto ( M , duration , wait , accurate )
Goes to a given cartesian position .
44,702
def _goto ( self , pose , duration , wait , accurate ) : kwargs = { } if not accurate : kwargs [ 'max_iter' ] = 3 q0 = self . convert_to_ik_angles ( self . joints_position ) q = self . inverse_kinematics ( pose , initial_position = q0 , ** kwargs ) joints = self . convert_from_ik_angles ( q ) last = self . motors [ - 1 ] for m , pos in list ( zip ( self . motors , joints ) ) : m . goto_position ( pos , duration , wait = False if m != last else wait )
Goes to a given cartesian pose .
44,703
def convert_to_ik_angles ( self , joints ) : if len ( joints ) != len ( self . motors ) : raise ValueError ( 'Incompatible data, len(joints) should be {}!' . format ( len ( self . motors ) ) ) raw_joints = [ ( j + m . offset ) * ( 1 if m . direct else - 1 ) for j , m in zip ( joints , self . motors ) ] raw_joints *= self . _reversed return [ 0 ] + [ deg2rad ( j ) for j in raw_joints ] + [ 0 ]
Convert from poppy representation to IKPY internal representation .
44,704
def convert_from_ik_angles ( self , joints ) : if len ( joints ) != len ( self . motors ) + 2 : raise ValueError ( 'Incompatible data, len(joints) should be {}!' . format ( len ( self . motors ) + 2 ) ) joints = [ rad2deg ( j ) for j in joints [ 1 : - 1 ] ] joints *= self . _reversed return [ ( j * ( 1 if m . direct else - 1 ) ) - m . offset for j , m in zip ( joints , self . motors ) ]
Convert from IKPY internal representation to poppy representation .
44,705
def factory_reset ( self , ids , except_ids = False , except_baudrate_and_ids = False ) : mode = ( 0x02 if except_baudrate_and_ids else 0x01 if except_ids else 0xFF ) for id in ids : try : self . _send_packet ( self . _protocol . DxlResetPacket ( id , mode ) ) except ( DxlTimeoutError , DxlCommunicationError ) : pass
Reset all motors on the bus to their factory default settings .
44,706
def stop ( self , wait = True ) : logger . info ( "Primitive %s stopped." , self ) StoppableThread . stop ( self , wait )
Requests the primitive to stop .
44,707
def recent_update_frequencies ( self ) : return list ( reversed ( [ ( 1.0 / p ) for p in numpy . diff ( self . _recent_updates ) ] ) )
Returns the 10 most recent update frequencies .
44,708
def goto_position ( self , position , duration , control = None , wait = False ) : if control is None : control = self . goto_behavior if control == 'minjerk' : goto_min_jerk = GotoMinJerk ( self , position , duration ) goto_min_jerk . start ( ) if wait : goto_min_jerk . wait_to_stop ( ) elif control == 'dummy' : dp = abs ( self . present_position - position ) speed = ( dp / float ( duration ) ) if duration > 0 else numpy . inf self . moving_speed = speed self . goal_position = position if wait : time . sleep ( duration )
Automatically sets the goal position and the moving speed to reach the desired position within the duration .
44,709
def add_tracked_motors ( self , tracked_motors ) : new_mockup_motors = map ( self . get_mockup_motor , tracked_motors ) self . tracked_motors = list ( set ( self . tracked_motors + new_mockup_motors ) )
Add new motors to the recording
44,710
def update ( self ) : with self . syncing : for m in self . _motors : to_set = defaultdict ( list ) for p in self . _prim : for key , val in getattr ( p . robot , m . name ) . _to_set . iteritems ( ) : to_set [ key ] . append ( val ) for key , val in to_set . iteritems ( ) : if key == 'led' : colors = set ( val ) if len ( colors ) > 1 : colors -= { 'off' } filtred_val = colors . pop ( ) else : filtred_val = self . _filter ( val ) logger . debug ( 'Combined %s.%s from %s to %s' , m . name , key , val , filtred_val ) setattr ( m , key , filtred_val ) [ p . _synced . set ( ) for p in self . _prim ]
Combined at a predefined frequency the request orders and affect them to the real motors .
44,711
def stop ( self ) : for p in self . primitives [ : ] : p . stop ( ) StoppableLoopThread . stop ( self )
Stop the primitive manager .
44,712
def load_scene ( self , scene_path , start = False ) : self . stop_simulation ( ) if not os . path . exists ( scene_path ) : raise IOError ( "No such file or directory: '{}'" . format ( scene_path ) ) self . call_remote_api ( 'simxLoadScene' , scene_path , True ) if start : self . start_simulation ( )
Loads a scene on the V - REP server .
44,713
def get_motor_position ( self , motor_name ) : return self . call_remote_api ( 'simxGetJointPosition' , self . get_object_handle ( motor_name ) , streaming = True )
Gets the motor current position .
44,714
def set_motor_position ( self , motor_name , position ) : self . call_remote_api ( 'simxSetJointTargetPosition' , self . get_object_handle ( motor_name ) , position , sending = True )
Sets the motor target position .
44,715
def set_motor_force ( self , motor_name , force ) : self . call_remote_api ( 'simxSetJointForce' , self . get_object_handle ( motor_name ) , force , sending = True )
Sets the maximum force or torque that a joint can exert .
44,716
def get_object_position ( self , object_name , relative_to_object = None ) : h = self . get_object_handle ( object_name ) relative_handle = ( - 1 if relative_to_object is None else self . get_object_handle ( relative_to_object ) ) return self . call_remote_api ( 'simxGetObjectPosition' , h , relative_handle , streaming = True )
Gets the object position .
44,717
def set_object_position ( self , object_name , position = [ 0 , 0 , 0 ] ) : h = self . get_object_handle ( object_name ) return self . call_remote_api ( 'simxSetObjectPosition' , h , - 1 , position , sending = True )
Sets the object position .
44,718
def get_object_handle ( self , obj ) : if obj not in self . _object_handles : self . _object_handles [ obj ] = self . _get_object_handle ( obj = obj ) return self . _object_handles [ obj ]
Gets the vrep object handle .
44,719
def get_collision_state ( self , collision_name ) : return self . call_remote_api ( 'simxReadCollision' , self . get_collision_handle ( collision_name ) , streaming = True )
Gets the collision state .
44,720
def get_collision_handle ( self , collision ) : if collision not in self . _object_handles : h = self . _get_collision_handle ( collision ) self . _object_handles [ collision ] = h return self . _object_handles [ collision ]
Gets a vrep collisions handle .
44,721
def change_object_name ( self , old_name , new_name ) : h = self . _get_object_handle ( old_name ) if old_name in self . _object_handles : self . _object_handles . pop ( old_name ) lua_code = "simSetObjectName({}, '{}')" . format ( h , new_name ) self . _inject_lua_code ( lua_code )
Change object name
44,722
def _create_pure_shape ( self , primitive_type , options , sizes , mass , precision ) : lua_code = "simCreatePureShape({}, {}, {{{}, {}, {}}}, {}, {{{}, {}}})" . format ( primitive_type , options , sizes [ 0 ] , sizes [ 1 ] , sizes [ 2 ] , mass , precision [ 0 ] , precision [ 1 ] ) self . _inject_lua_code ( lua_code )
Create Pure Shape
44,723
def _inject_lua_code ( self , lua_code ) : msg = ( ctypes . c_ubyte * len ( lua_code ) ) . from_buffer_copy ( lua_code . encode ( ) ) self . call_remote_api ( 'simxWriteStringStream' , 'my_lua_code' , msg )
Sends raw lua code and evaluate it wihtout any checking!
44,724
def call_remote_api ( self , func_name , * args , ** kwargs ) : f = getattr ( remote_api , func_name ) mode = self . _extract_mode ( kwargs ) kwargs [ 'operationMode' ] = vrep_mode [ mode ] if '_force' in kwargs : del kwargs [ '_force' ] _force = True else : _force = False for _ in range ( VrepIO . MAX_ITER ) : with self . _lock : ret = f ( self . client_id , * args , ** kwargs ) if _force : return if mode == 'sending' or isinstance ( ret , int ) : err , res = ret , None else : err , res = ret [ 0 ] , ret [ 1 : ] res = res [ 0 ] if len ( res ) == 1 else res err = [ bool ( ( err >> i ) & 1 ) for i in range ( len ( vrep_error ) ) ] if remote_api . simx_return_novalue_flag not in err : break time . sleep ( VrepIO . TIMEOUT ) if any ( err ) : msg = ' ' . join ( [ vrep_error [ 2 ** i ] for i , e in enumerate ( err ) if e ] ) raise VrepIOErrors ( msg ) return res
Calls any remote API func in a thread_safe way .
44,725
def run ( self , ** kwargs ) : try : loop = IOLoop ( ) app = self . make_app ( ) app . listen ( self . port ) loop . start ( ) except socket . error as serr : if serr . errno != errno . EADDRINUSE : raise serr else : logger . warning ( 'The webserver port {} is already used. May be the HttpRobotServer is already running or another software is using this port.' . format ( self . port ) )
Start the tornado server run forever
44,726
def close ( self , _force_lock = False ) : if not self . closed : with self . __force_lock ( _force_lock ) or self . _serial_lock : self . _serial . close ( ) self . __used_ports . remove ( self . port ) logger . info ( "Closing port '%s'" , self . port , extra = { 'port' : self . port , 'baudrate' : self . baudrate , 'timeout' : self . timeout } )
Closes the serial communication if opened .
44,727
def ping ( self , id ) : pp = self . _protocol . DxlPingPacket ( id ) try : self . _send_packet ( pp , error_handler = None ) return True except DxlTimeoutError : return False
Pings the motor with the specified id .
44,728
def scan ( self , ids = range ( 254 ) ) : return [ id for id in ids if self . ping ( id ) ]
Pings all ids within the specified list by default it finds all the motors connected to the bus .
44,729
def get_model ( self , ids ) : to_get_ids = [ i for i in ids if i not in self . _known_models ] models = [ dxl_to_model ( m ) for m in self . _get_model ( to_get_ids , convert = False ) ] self . _known_models . update ( zip ( to_get_ids , models ) ) return tuple ( self . _known_models [ id ] for id in ids )
Gets the model for the specified motors .
44,730
def change_baudrate ( self , baudrate_for_ids ) : self . _change_baudrate ( baudrate_for_ids ) for motor_id in baudrate_for_ids : if motor_id in self . _known_models : del self . _known_models [ motor_id ] if motor_id in self . _known_mode : del self . _known_mode [ motor_id ]
Changes the baudrate of the specified motors .
44,731
def get_status_return_level ( self , ids , ** kwargs ) : convert = kwargs [ 'convert' ] if 'convert' in kwargs else self . _convert srl = [ ] for id in ids : try : srl . extend ( self . _get_status_return_level ( ( id , ) , error_handler = None , convert = convert ) ) except DxlTimeoutError as e : if self . ping ( id ) : srl . append ( 'never' if convert else 0 ) else : if self . _error_handler : self . _error_handler . handle_timeout ( e ) return ( ) else : raise e return tuple ( srl )
Gets the status level for the specified motors .
44,732
def set_status_return_level ( self , srl_for_id , ** kwargs ) : convert = kwargs [ 'convert' ] if 'convert' in kwargs else self . _convert if convert : srl_for_id = dict ( zip ( srl_for_id . keys ( ) , [ ( 'never' , 'read' , 'always' ) . index ( s ) for s in srl_for_id . values ( ) ] ) ) self . _set_status_return_level ( srl_for_id , convert = False )
Sets status return level to the specified motors .
44,733
def switch_led_on ( self , ids ) : self . _set_LED ( dict ( zip ( ids , itertools . repeat ( True ) ) ) )
Switches on the LED of the motors with the specified ids .
44,734
def switch_led_off ( self , ids ) : self . _set_LED ( dict ( zip ( ids , itertools . repeat ( False ) ) ) )
Switches off the LED of the motors with the specified ids .
44,735
def enable_torque ( self , ids ) : self . _set_torque_enable ( dict ( zip ( ids , itertools . repeat ( True ) ) ) )
Enables torque of the motors with the specified ids .
44,736
def disable_torque ( self , ids ) : self . _set_torque_enable ( dict ( zip ( ids , itertools . repeat ( False ) ) ) )
Disables torque of the motors with the specified ids .
44,737
def get_pid_gain ( self , ids , ** kwargs ) : return tuple ( [ tuple ( reversed ( t ) ) for t in self . _get_pid_gain ( ids , ** kwargs ) ] )
Gets the pid gain for the specified motors .
44,738
def set_pid_gain ( self , pid_for_id , ** kwargs ) : pid_for_id = dict ( itertools . izip ( pid_for_id . iterkeys ( ) , [ tuple ( reversed ( t ) ) for t in pid_for_id . values ( ) ] ) ) self . _set_pid_gain ( pid_for_id , ** kwargs )
Sets the pid gain to the specified motors .
44,739
def get_control_table ( self , ids , ** kwargs ) : error_handler = kwargs [ 'error_handler' ] if ( 'error_handler' in kwargs ) else self . _error_handler convert = kwargs [ 'convert' ] if ( 'convert' in kwargs ) else self . _convert bl = ( 'goal position speed load' , 'present position speed load' ) controls = [ c for c in self . _AbstractDxlIO__controls if c . name not in bl ] res = [ ] for id , model in zip ( ids , self . get_model ( ids ) ) : controls = [ c for c in controls if model in c . models ] controls = sorted ( controls , key = lambda c : c . address ) address = controls [ 0 ] . address length = controls [ - 1 ] . address + controls [ - 1 ] . nb_elem * controls [ - 1 ] . length rp = self . _protocol . DxlReadDataPacket ( id , address , length ) sp = self . _send_packet ( rp , error_handler = error_handler ) d = OrderedDict ( ) for c in controls : v = dxl_decode_all ( sp . parameters [ c . address : c . address + c . nb_elem * c . length ] , c . nb_elem ) d [ c . name ] = c . dxl_to_si ( v , model ) if convert else v res . append ( d ) return tuple ( res )
Gets the full control table for the specified motors .
44,740
def check_motor_eprom_configuration ( config , dxl_io , motor_names ) : changed_angle_limits = { } changed_return_delay_time = { } for name in motor_names : m = config [ 'motors' ] [ name ] id = m [ 'id' ] try : old_limits = dxl_io . get_angle_limit ( ( id , ) ) [ 0 ] old_return_delay_time = dxl_io . get_return_delay_time ( ( id , ) ) [ 0 ] except IndexError : continue if old_return_delay_time != 0 : logger . warning ( "Return delay time of %s changed from %s to 0" , name , old_return_delay_time ) changed_return_delay_time [ id ] = 0 new_limits = m [ 'angle_limit' ] if 'wheel_mode' in m and m [ 'wheel_mode' ] : dxl_io . set_wheel_mode ( [ m [ 'id' ] ] ) time . sleep ( 0.5 ) else : d = numpy . linalg . norm ( numpy . asarray ( new_limits ) - numpy . asarray ( old_limits ) ) if d > 1 : logger . warning ( "Limits of '%s' changed from %s to %s" , name , old_limits , new_limits , extra = { 'config' : config } ) changed_angle_limits [ id ] = new_limits if changed_angle_limits : dxl_io . set_angle_limit ( changed_angle_limits ) time . sleep ( 0.5 ) if changed_return_delay_time : dxl_io . set_return_delay_time ( changed_return_delay_time ) time . sleep ( 0.5 )
Change the angles limits depanding on the robot configuration ; Check if the return delay time is set to 0 .
44,741
def _get_gdcmconv ( ) : gdcmconv_executable = settings . gdcmconv_path if gdcmconv_executable is None : gdcmconv_executable = _which ( 'gdcmconv' ) if gdcmconv_executable is None : gdcmconv_executable = _which ( 'gdcmconv.exe' ) if gdcmconv_executable is None : raise ConversionError ( 'GDCMCONV_NOT_FOUND' ) return gdcmconv_executable
Get the full path to gdcmconv . If not found raise error
44,742
def compress_directory ( dicom_directory ) : if _is_compressed ( dicom_directory ) : return logger . info ( 'Compressing dicom files in %s' % dicom_directory ) for root , _ , files in os . walk ( dicom_directory ) : for dicom_file in files : if is_dicom_file ( os . path . join ( root , dicom_file ) ) : _compress_dicom ( os . path . join ( root , dicom_file ) )
This function can be used to convert a folder of jpeg compressed images to an uncompressed ones
44,743
def is_dicom_file ( filename ) : file_stream = open ( filename , 'rb' ) file_stream . seek ( 128 ) data = file_stream . read ( 4 ) file_stream . close ( ) if data == b'DICM' : return True if settings . pydicom_read_force : try : dicom_headers = pydicom . read_file ( filename , defer_size = "1 KB" , stop_before_pixels = True , force = True ) if dicom_headers is not None : return True except : pass return False
Util function to check if file is a dicom file the first 128 bytes are preamble the next 4 bytes should contain DICM otherwise it is not a dicom
44,744
def _is_compressed ( dicom_file , force = False ) : header = pydicom . read_file ( dicom_file , defer_size = "1 KB" , stop_before_pixels = True , force = force ) uncompressed_types = [ "1.2.840.10008.1.2" , "1.2.840.10008.1.2.1" , "1.2.840.10008.1.2.1.99" , "1.2.840.10008.1.2.2" ] if 'TransferSyntaxUID' in header . file_meta and header . file_meta . TransferSyntaxUID in uncompressed_types : return False return True
Check if dicoms are compressed or not
44,745
def _decompress_dicom ( dicom_file , output_file ) : gdcmconv_executable = _get_gdcmconv ( ) subprocess . check_output ( [ gdcmconv_executable , '-w' , dicom_file , output_file ] )
This function can be used to convert a jpeg compressed image to an uncompressed one for further conversion
44,746
def dicom_diff ( file1 , file2 ) : datasets = compressed_dicom . read_file ( file1 ) , compressed_dicom . read_file ( file2 ) rep = [ ] for dataset in datasets : lines = ( str ( dataset . file_meta ) + "\n" + str ( dataset ) ) . split ( '\n' ) lines = [ line + '\n' for line in lines ] rep . append ( lines ) diff = difflib . Differ ( ) for line in diff . compare ( rep [ 0 ] , rep [ 1 ] ) : if ( line [ 0 ] == '+' ) or ( line [ 0 ] == '-' ) : sys . stdout . write ( line )
Shows the fields that differ between two DICOM images .
44,747
def _get_number_of_slices ( self , slice_type ) : if slice_type == SliceType . AXIAL : return self . dimensions [ self . axial_orientation . normal_component ] elif slice_type == SliceType . SAGITTAL : return self . dimensions [ self . sagittal_orientation . normal_component ] elif slice_type == SliceType . CORONAL : return self . dimensions [ self . coronal_orientation . normal_component ]
Get the number of slices in a certain direction
44,748
def _get_first_header ( dicom_directory ) : for root , _ , file_names in os . walk ( dicom_directory ) : for file_name in file_names : file_path = os . path . join ( root , file_name ) if not compressed_dicom . is_dicom_file ( file_path ) : continue return compressed_dicom . read_file ( file_path , stop_before_pixels = True , force = dicom2nifti . settings . pydicom_read_force ) raise ConversionError ( 'NO_DICOM_FILES_FOUND' )
Function to get the first dicom file form a directory and return the header Useful to determine the type of data to convert
44,749
def _reorient_3d ( image ) : new_image = numpy . zeros ( [ image . dimensions [ image . sagittal_orientation . normal_component ] , image . dimensions [ image . coronal_orientation . normal_component ] , image . dimensions [ image . axial_orientation . normal_component ] ] , dtype = image . nifti_data . dtype ) if image . coronal_orientation . y_inverted : for i in range ( new_image . shape [ 2 ] ) : new_image [ : , : , i ] = numpy . fliplr ( numpy . squeeze ( image . get_slice ( SliceType . AXIAL , new_image . shape [ 2 ] - 1 - i ) . original_data ) ) else : for i in range ( new_image . shape [ 2 ] ) : new_image [ : , : , i ] = numpy . fliplr ( numpy . squeeze ( image . get_slice ( SliceType . AXIAL , i ) . original_data ) ) return new_image
Reorganize the data for a 3d nifti
44,750
def dicom_to_nifti ( dicom_input , output_file = None ) : assert common . is_philips ( dicom_input ) if common . is_multiframe_dicom ( dicom_input ) : _assert_explicit_vr ( dicom_input ) logger . info ( 'Found multiframe dicom' ) if _is_multiframe_4d ( dicom_input ) : logger . info ( 'Found sequence type: MULTIFRAME 4D' ) return _multiframe_to_nifti ( dicom_input , output_file ) if _is_multiframe_anatomical ( dicom_input ) : logger . info ( 'Found sequence type: MULTIFRAME ANATOMICAL' ) return _multiframe_to_nifti ( dicom_input , output_file ) else : logger . info ( 'Found singleframe dicom' ) grouped_dicoms = _get_grouped_dicoms ( dicom_input ) if _is_singleframe_4d ( dicom_input ) : logger . info ( 'Found sequence type: SINGLEFRAME 4D' ) return _singleframe_to_nifti ( grouped_dicoms , output_file ) logger . info ( 'Assuming anatomical data' ) return convert_generic . dicom_to_nifti ( dicom_input , output_file )
This is the main dicom to nifti conversion fuction for philips images . As input philips images are required . It will then determine the type of images and do the correct conversion
44,751
def _assert_explicit_vr ( dicom_input ) : if settings . validate_multiframe_implicit : header = dicom_input [ 0 ] if header . file_meta [ 0x0002 , 0x0010 ] . value == '1.2.840.10008.1.2' : raise ConversionError ( 'IMPLICIT_VR_ENHANCED_DICOM' )
Assert that explicit vr is used
44,752
def _is_multiframe_4d ( dicom_input ) : if not common . is_multiframe_dicom ( dicom_input ) : return False header = dicom_input [ 0 ] number_of_stack_slices = common . get_ss_value ( header [ Tag ( 0x2001 , 0x105f ) ] [ 0 ] [ Tag ( 0x2001 , 0x102d ) ] ) number_of_stacks = int ( int ( header . NumberOfFrames ) / number_of_stack_slices ) if number_of_stacks <= 1 : return False return True
Use this function to detect if a dicom series is a philips multiframe 4D dataset
44,753
def _is_singleframe_4d ( dicom_input ) : header = dicom_input [ 0 ] slice_number_mr_tag = Tag ( 0x2001 , 0x100a ) if slice_number_mr_tag not in header : return False grouped_dicoms = _get_grouped_dicoms ( dicom_input ) if len ( grouped_dicoms ) <= 1 : return False return True
Use this function to detect if a dicom series is a philips singleframe 4D dataset
44,754
def _is_bval_type_a ( grouped_dicoms ) : bval_tag = Tag ( 0x2001 , 0x1003 ) bvec_x_tag = Tag ( 0x2005 , 0x10b0 ) bvec_y_tag = Tag ( 0x2005 , 0x10b1 ) bvec_z_tag = Tag ( 0x2005 , 0x10b2 ) for group in grouped_dicoms : if bvec_x_tag in group [ 0 ] and _is_float ( common . get_fl_value ( group [ 0 ] [ bvec_x_tag ] ) ) and bvec_y_tag in group [ 0 ] and _is_float ( common . get_fl_value ( group [ 0 ] [ bvec_y_tag ] ) ) and bvec_z_tag in group [ 0 ] and _is_float ( common . get_fl_value ( group [ 0 ] [ bvec_z_tag ] ) ) and bval_tag in group [ 0 ] and _is_float ( common . get_fl_value ( group [ 0 ] [ bval_tag ] ) ) and common . get_fl_value ( group [ 0 ] [ bval_tag ] ) != 0 : return True return False
Check if the bvals are stored in the first of 2 currently known ways for single frame dti
44,755
def _is_bval_type_b ( grouped_dicoms ) : bval_tag = Tag ( 0x0018 , 0x9087 ) bvec_tag = Tag ( 0x0018 , 0x9089 ) for group in grouped_dicoms : if bvec_tag in group [ 0 ] and bval_tag in group [ 0 ] : bvec = common . get_fd_array_value ( group [ 0 ] [ bvec_tag ] , 3 ) bval = common . get_fd_value ( group [ 0 ] [ bval_tag ] ) if _is_float ( bvec [ 0 ] ) and _is_float ( bvec [ 1 ] ) and _is_float ( bvec [ 2 ] ) and _is_float ( bval ) and bval != 0 : return True return False
Check if the bvals are stored in the second of 2 currently known ways for single frame dti
44,756
def _multiframe_to_nifti ( dicom_input , output_file ) : logger . info ( 'Read dicom file' ) multiframe_dicom = dicom_input [ 0 ] logger . info ( 'Creating data block' ) full_block = _multiframe_to_block ( multiframe_dicom ) logger . info ( 'Creating affine' ) affine = _create_affine_multiframe ( multiframe_dicom ) logger . info ( 'Creating nifti' ) nii_image = nibabel . Nifti1Image ( full_block , affine ) timing_parameters = multiframe_dicom . SharedFunctionalGroupsSequence [ 0 ] . MRTimingAndRelatedParametersSequence [ 0 ] first_frame = multiframe_dicom [ Tag ( 0x5200 , 0x9230 ) ] [ 0 ] common . set_tr_te ( nii_image , float ( timing_parameters . RepetitionTime ) , float ( first_frame [ 0x2005 , 0x140f ] [ 0 ] . EchoTime ) ) if output_file is not None : logger . info ( 'Saving nifti to disk %s' % output_file ) nii_image . to_filename ( output_file ) if _is_multiframe_diffusion_imaging ( dicom_input ) : bval_file = None bvec_file = None if output_file is not None : base_path = os . path . dirname ( output_file ) base_name = os . path . splitext ( os . path . splitext ( os . path . basename ( output_file ) ) [ 0 ] ) [ 0 ] logger . info ( 'Creating bval en bvec files' ) bval_file = '%s/%s.bval' % ( base_path , base_name ) bvec_file = '%s/%s.bvec' % ( base_path , base_name ) bval , bvec , bval_file , bvec_file = _create_bvals_bvecs ( multiframe_dicom , bval_file , bvec_file , nii_image , output_file ) return { 'NII_FILE' : output_file , 'BVAL_FILE' : bval_file , 'BVEC_FILE' : bvec_file , 'NII' : nii_image , 'BVAL' : bval , 'BVEC' : bvec } return { 'NII_FILE' : output_file , 'NII' : nii_image }
This function will convert philips 4D or anatomical multiframe series to a nifti
44,757
def _singleframe_to_nifti ( grouped_dicoms , output_file ) : logger . info ( 'Creating data block' ) full_block = _singleframe_to_block ( grouped_dicoms ) logger . info ( 'Creating affine' ) affine , slice_increment = common . create_affine ( grouped_dicoms [ 0 ] ) logger . info ( 'Creating nifti' ) nii_image = nibabel . Nifti1Image ( full_block , affine ) common . set_tr_te ( nii_image , float ( grouped_dicoms [ 0 ] [ 0 ] . RepetitionTime ) , float ( grouped_dicoms [ 0 ] [ 0 ] . EchoTime ) ) if output_file is not None : logger . info ( 'Saving nifti to disk %s' % output_file ) nii_image . to_filename ( output_file ) if _is_singleframe_diffusion_imaging ( grouped_dicoms ) : bval_file = None bvec_file = None if output_file is not None : base_name = os . path . splitext ( output_file ) [ 0 ] if base_name . endswith ( '.nii' ) : base_name = os . path . splitext ( base_name ) [ 0 ] logger . info ( 'Creating bval en bvec files' ) bval_file = '%s.bval' % base_name bvec_file = '%s.bvec' % base_name nii_image , bval , bvec , bval_file , bvec_file = _create_singleframe_bvals_bvecs ( grouped_dicoms , bval_file , bvec_file , nii_image , output_file ) return { 'NII_FILE' : output_file , 'BVAL_FILE' : bval_file , 'BVEC_FILE' : bvec_file , 'NII' : nii_image , 'BVAL' : bval , 'BVEC' : bvec , 'MAX_SLICE_INCREMENT' : slice_increment } return { 'NII_FILE' : output_file , 'NII' : nii_image , 'MAX_SLICE_INCREMENT' : slice_increment }
This function will convert a philips singleframe series to a nifti
44,758
def _create_affine_multiframe ( multiframe_dicom ) : first_frame = multiframe_dicom [ Tag ( 0x5200 , 0x9230 ) ] [ 0 ] last_frame = multiframe_dicom [ Tag ( 0x5200 , 0x9230 ) ] [ - 1 ] image_orient1 = numpy . array ( first_frame . PlaneOrientationSequence [ 0 ] . ImageOrientationPatient ) [ 0 : 3 ] . astype ( float ) image_orient2 = numpy . array ( first_frame . PlaneOrientationSequence [ 0 ] . ImageOrientationPatient ) [ 3 : 6 ] . astype ( float ) normal = numpy . cross ( image_orient1 , image_orient2 ) delta_r = float ( first_frame [ 0x2005 , 0x140f ] [ 0 ] . PixelSpacing [ 0 ] ) delta_c = float ( first_frame [ 0x2005 , 0x140f ] [ 0 ] . PixelSpacing [ 1 ] ) image_pos = numpy . array ( first_frame . PlanePositionSequence [ 0 ] . ImagePositionPatient ) . astype ( float ) last_image_pos = numpy . array ( last_frame . PlanePositionSequence [ 0 ] . ImagePositionPatient ) . astype ( float ) number_of_stack_slices = int ( common . get_ss_value ( multiframe_dicom [ Tag ( 0x2001 , 0x105f ) ] [ 0 ] [ Tag ( 0x2001 , 0x102d ) ] ) ) delta_s = abs ( numpy . linalg . norm ( last_image_pos - image_pos ) ) / ( number_of_stack_slices - 1 ) return numpy . array ( [ [ - image_orient1 [ 0 ] * delta_c , - image_orient2 [ 0 ] * delta_r , - delta_s * normal [ 0 ] , - image_pos [ 0 ] ] , [ - image_orient1 [ 1 ] * delta_c , - image_orient2 [ 1 ] * delta_r , - delta_s * normal [ 1 ] , - image_pos [ 1 ] ] , [ image_orient1 [ 2 ] * delta_c , image_orient2 [ 2 ] * delta_r , delta_s * normal [ 2 ] , image_pos [ 2 ] ] , [ 0 , 0 , 0 , 1 ] ] )
Function to create the affine matrix for a siemens mosaic dataset This will work for siemens dti and 4D if in mosaic format
44,759
def _multiframe_to_block ( multiframe_dicom ) : number_of_stack_slices = int ( common . get_ss_value ( multiframe_dicom [ Tag ( 0x2001 , 0x105f ) ] [ 0 ] [ Tag ( 0x2001 , 0x102d ) ] ) ) number_of_stacks = int ( int ( multiframe_dicom . NumberOfFrames ) / number_of_stack_slices ) size_x = multiframe_dicom . pixel_array . shape [ 2 ] size_y = multiframe_dicom . pixel_array . shape [ 1 ] size_z = number_of_stack_slices size_t = number_of_stacks format_string = common . get_numpy_type ( multiframe_dicom ) frame_info = multiframe_dicom [ 0x5200 , 0x9230 ] data_4d = numpy . zeros ( ( size_z , size_y , size_x , size_t ) , dtype = format_string ) t_location_index = _get_t_position_index ( multiframe_dicom ) for slice_index in range ( 0 , size_t * size_z ) : z_location = frame_info [ slice_index ] . FrameContentSequence [ 0 ] . InStackPositionNumber - 1 if t_location_index is None : t_location = frame_info [ slice_index ] . FrameContentSequence [ 0 ] . TemporalPositionIndex - 1 else : t_location = frame_info [ slice_index ] . FrameContentSequence [ 0 ] . DimensionIndexValues [ t_location_index ] - 1 block_data = multiframe_dicom . pixel_array [ slice_index , : , : ] rescale_intercept = frame_info [ slice_index ] . PixelValueTransformationSequence [ 0 ] . RescaleIntercept rescale_slope = frame_info [ slice_index ] . PixelValueTransformationSequence [ 0 ] . RescaleSlope block_data = common . do_scaling ( block_data , rescale_slope , rescale_intercept ) if block_data . dtype != data_4d . dtype : data_4d = data_4d . astype ( block_data . dtype ) data_4d [ z_location , : , : , t_location ] = block_data full_block = numpy . zeros ( ( size_x , size_y , size_z , size_t ) , dtype = data_4d . dtype ) for t_index in range ( 0 , size_t ) : data_3d = numpy . transpose ( data_4d [ : , : , : , t_index ] , ( 2 , 1 , 0 ) ) full_block [ : , : , : , t_index ] = data_3d return full_block
Generate a full datablock containing all stacks
44,760
def _fix_diffusion_images ( bvals , bvecs , nifti , nifti_file ) : if numpy . count_nonzero ( bvecs ) == 0 or not numpy . count_nonzero ( bvals [ - 1 ] ) == 0 : return nifti , bvals , bvecs bvals = bvals [ : - 1 ] bvecs = bvecs [ : - 1 ] new_nifti = nibabel . Nifti1Image ( nifti . get_data ( ) [ : , : , : , : - 1 ] , nifti . affine ) new_nifti . to_filename ( nifti_file ) return new_nifti , bvals , bvecs
This function will remove the last timepoint from the nifti bvals and bvecs if the last vector is 0 0 0 This is sometimes added at the end by philips
44,761
def dicom_to_nifti ( dicom_input , output_file ) : if len ( dicom_input ) <= 0 : raise ConversionError ( 'NO_DICOM_FILES_FOUND' ) dicom_input = _remove_duplicate_slices ( dicom_input ) dicom_input = _remove_localizers_by_imagetype ( dicom_input ) if settings . validate_slicecount : dicom_input = _remove_localizers_by_orientation ( dicom_input ) common . validate_slicecount ( dicom_input ) if settings . validate_orientation : common . validate_orientation ( dicom_input ) if settings . validate_orthogonal : common . validate_orthogonal ( dicom_input ) dicom_input = common . sort_dicoms ( dicom_input ) slice_increment_inconsistent = False if settings . validate_slice_increment : common . validate_slice_increment ( dicom_input ) elif common . is_slice_increment_inconsistent ( dicom_input ) : slice_increment_inconsistent = True if slice_increment_inconsistent and settings . resample : nii_image , max_slice_increment = _convert_slice_incement_inconsistencies ( dicom_input ) else : data = common . get_volume_pixeldata ( dicom_input ) affine , max_slice_increment = common . create_affine ( dicom_input ) nii_image = nibabel . Nifti1Image ( data , affine ) if Tag ( 0x0018 , 0x0081 ) in dicom_input [ 0 ] and Tag ( 0x0018 , 0x0081 ) in dicom_input [ 0 ] : common . set_tr_te ( nii_image , float ( dicom_input [ 0 ] . RepetitionTime ) , float ( dicom_input [ 0 ] . EchoTime ) ) if output_file is not None : logger . info ( 'Saving nifti to disk %s' % output_file ) nii_image . to_filename ( output_file ) return { 'NII_FILE' : output_file , 'NII' : nii_image , 'MAX_SLICE_INCREMENT' : max_slice_increment }
This function will convert an anatomical dicom series to a nifti
44,762
def _convert_slice_incement_inconsistencies ( dicom_input ) : increment = numpy . array ( dicom_input [ 0 ] . ImagePositionPatient ) - numpy . array ( dicom_input [ 1 ] . ImagePositionPatient ) max_slice_increment = 0 slice_incement_groups = [ ] current_group = [ dicom_input [ 0 ] , dicom_input [ 1 ] ] previous_image_position = numpy . array ( dicom_input [ 1 ] . ImagePositionPatient ) for dicom in dicom_input [ 2 : ] : current_image_position = numpy . array ( dicom . ImagePositionPatient ) current_increment = previous_image_position - current_image_position max_slice_increment = max ( max_slice_increment , numpy . linalg . norm ( current_increment ) ) if numpy . allclose ( increment , current_increment , rtol = 0.05 , atol = 0.1 ) : current_group . append ( dicom ) if not numpy . allclose ( increment , current_increment , rtol = 0.05 , atol = 0.1 ) : slice_incement_groups . append ( current_group ) current_group = [ current_group [ - 1 ] , dicom ] increment = current_increment previous_image_position = current_image_position slice_incement_groups . append ( current_group ) slice_incement_niftis = [ ] for dicom_slices in slice_incement_groups : data = common . get_volume_pixeldata ( dicom_slices ) affine , _ = common . create_affine ( dicom_slices ) slice_incement_niftis . append ( nibabel . Nifti1Image ( data , affine ) ) nifti_volume = resample . resample_nifti_images ( slice_incement_niftis ) return nifti_volume , max_slice_increment
If there is slice increment inconsistency detected for the moment CT images then split the volumes into subvolumes based on the slice increment and process each volume separately using a space constructed based on the highest resolution increment
44,763
def is_hitachi ( dicom_input ) : header = dicom_input [ 0 ] if 'Manufacturer' not in header or 'Modality' not in header : return False if header . Modality . upper ( ) != 'MR' : return False if 'HITACHI' not in header . Manufacturer . upper ( ) : return False return True
Use this function to detect if a dicom series is a hitachi dataset
44,764
def is_ge ( dicom_input ) : header = dicom_input [ 0 ] if 'Manufacturer' not in header or 'Modality' not in header : return False if header . Modality . upper ( ) != 'MR' : return False if 'GE MEDICAL SYSTEMS' not in header . Manufacturer . upper ( ) : return False return True
Use this function to detect if a dicom series is a GE dataset
44,765
def is_philips ( dicom_input ) : header = dicom_input [ 0 ] if 'Manufacturer' not in header or 'Modality' not in header : return False if header . Modality . upper ( ) != 'MR' : return False if 'PHILIPS' not in header . Manufacturer . upper ( ) : return False return True
Use this function to detect if a dicom series is a philips dataset
44,766
def is_siemens ( dicom_input ) : header = dicom_input [ 0 ] if 'Manufacturer' not in header or 'Modality' not in header : return False if header . Modality . upper ( ) != 'MR' : return False if 'SIEMENS' not in header . Manufacturer . upper ( ) : return False return True
Use this function to detect if a dicom series is a siemens dataset
44,767
def _get_slice_pixeldata ( dicom_slice ) : data = dicom_slice . pixel_array if dicom_slice . BitsAllocated != dicom_slice . BitsStored and dicom_slice . HighBit == dicom_slice . BitsStored - 1 and dicom_slice . PixelRepresentation == 1 : if dicom_slice . BitsAllocated == 16 : data = data . astype ( numpy . int16 ) max_value = pow ( 2 , dicom_slice . HighBit ) - 1 invert_value = - 1 ^ max_value data [ data > max_value ] = numpy . bitwise_or ( data [ data > max_value ] , invert_value ) pass return apply_scaling ( data , dicom_slice )
the slice and intercept calculation can cause the slices to have different dtypes we should get the correct dtype that can cover all of them
44,768
def set_fd_value ( tag , value ) : if tag . VR == 'OB' or tag . VR == 'UN' : value = struct . pack ( 'd' , value ) tag . value = value
Setters for data that also work with implicit transfersyntax
44,769
def set_ss_value ( tag , value ) : if tag . VR == 'OB' or tag . VR == 'UN' : value = struct . pack ( 'h' , value ) tag . value = value
Setter for data that also work with implicit transfersyntax
44,770
def apply_scaling ( data , dicom_headers ) : private_scale_slope_tag = Tag ( 0x2005 , 0x100E ) private_scale_intercept_tag = Tag ( 0x2005 , 0x100D ) if 'RescaleSlope' in dicom_headers or 'RescaleIntercept' in dicom_headers or private_scale_slope_tag in dicom_headers or private_scale_intercept_tag in dicom_headers : rescale_slope = 1 rescale_intercept = 0 if 'RescaleSlope' in dicom_headers : rescale_slope = dicom_headers . RescaleSlope if 'RescaleIntercept' in dicom_headers : rescale_intercept = dicom_headers . RescaleIntercept return do_scaling ( data , rescale_slope , rescale_intercept ) else : return data
Rescale the data based on the RescaleSlope and RescaleOffset Based on the scaling from pydicomseries
44,771
def write_bvec_file ( bvecs , bvec_file ) : if bvec_file is None : return logger . info ( 'Saving BVEC file: %s' % bvec_file ) with open ( bvec_file , 'w' ) as text_file : text_file . write ( '%s\n' % ' ' . join ( map ( str , bvecs [ : , 0 ] ) ) ) text_file . write ( '%s\n' % ' ' . join ( map ( str , bvecs [ : , 1 ] ) ) ) text_file . write ( '%s\n' % ' ' . join ( map ( str , bvecs [ : , 2 ] ) ) )
Write an array of bvecs to a bvec file
44,772
def write_bval_file ( bvals , bval_file ) : if bval_file is None : return logger . info ( 'Saving BVAL file: %s' % bval_file ) with open ( bval_file , 'w' ) as text_file : text_file . write ( '%s\n' % ' ' . join ( map ( str , bvals ) ) )
Write an array of bvals to a bval file
44,773
def sort_dicoms ( dicoms ) : dicom_input_sorted_x = sorted ( dicoms , key = lambda x : ( x . ImagePositionPatient [ 0 ] ) ) dicom_input_sorted_y = sorted ( dicoms , key = lambda x : ( x . ImagePositionPatient [ 1 ] ) ) dicom_input_sorted_z = sorted ( dicoms , key = lambda x : ( x . ImagePositionPatient [ 2 ] ) ) diff_x = abs ( dicom_input_sorted_x [ - 1 ] . ImagePositionPatient [ 0 ] - dicom_input_sorted_x [ 0 ] . ImagePositionPatient [ 0 ] ) diff_y = abs ( dicom_input_sorted_y [ - 1 ] . ImagePositionPatient [ 1 ] - dicom_input_sorted_y [ 0 ] . ImagePositionPatient [ 1 ] ) diff_z = abs ( dicom_input_sorted_z [ - 1 ] . ImagePositionPatient [ 2 ] - dicom_input_sorted_z [ 0 ] . ImagePositionPatient [ 2 ] ) if diff_x >= diff_y and diff_x >= diff_z : return dicom_input_sorted_x if diff_y >= diff_x and diff_y >= diff_z : return dicom_input_sorted_y if diff_z >= diff_x and diff_z >= diff_y : return dicom_input_sorted_z
Sort the dicoms based om the image possition patient
44,774
def validate_orientation ( dicoms ) : first_image_orient1 = numpy . array ( dicoms [ 0 ] . ImageOrientationPatient ) [ 0 : 3 ] first_image_orient2 = numpy . array ( dicoms [ 0 ] . ImageOrientationPatient ) [ 3 : 6 ] for dicom_ in dicoms : image_orient1 = numpy . array ( dicom_ . ImageOrientationPatient ) [ 0 : 3 ] image_orient2 = numpy . array ( dicom_ . ImageOrientationPatient ) [ 3 : 6 ] if not numpy . allclose ( image_orient1 , first_image_orient1 , rtol = 0.001 , atol = 0.001 ) or not numpy . allclose ( image_orient2 , first_image_orient2 , rtol = 0.001 , atol = 0.001 ) : logger . warning ( 'Image orientations not consistent through all slices' ) logger . warning ( '---------------------------------------------------------' ) logger . warning ( '%s %s' % ( image_orient1 , first_image_orient1 ) ) logger . warning ( '%s %s' % ( image_orient2 , first_image_orient2 ) ) logger . warning ( '---------------------------------------------------------' ) raise ConversionValidationError ( 'IMAGE_ORIENTATION_INCONSISTENT' )
Validate that all dicoms have the same orientation
44,775
def set_tr_te ( nifti_image , repetition_time , echo_time ) : nifti_image . header . structarr [ 'pixdim' ] [ 4 ] = repetition_time / 1000.0 nifti_image . header . structarr [ 'db_name' ] = '?TR:%.3f TE:%d' % ( repetition_time , echo_time ) return nifti_image
Set the tr and te in the nifti headers
44,776
def dicom_to_nifti ( dicom_input , output_file = None ) : assert common . is_ge ( dicom_input ) logger . info ( 'Reading and sorting dicom files' ) grouped_dicoms = _get_grouped_dicoms ( dicom_input ) if _is_4d ( grouped_dicoms ) : logger . info ( 'Found sequence type: 4D' ) return _4d_to_nifti ( grouped_dicoms , output_file ) logger . info ( 'Assuming anatomical data' ) return convert_generic . dicom_to_nifti ( dicom_input , output_file )
This is the main dicom to nifti conversion fuction for ge images . As input ge images are required . It will then determine the type of images and do the correct conversion
44,777
def _4d_to_nifti ( grouped_dicoms , output_file ) : logger . info ( 'Creating data block' ) full_block = _get_full_block ( grouped_dicoms ) logger . info ( 'Creating affine' ) affine , slice_increment = common . create_affine ( grouped_dicoms [ 0 ] ) logger . info ( 'Creating nifti' ) nii_image = nibabel . Nifti1Image ( full_block , affine ) common . set_tr_te ( nii_image , float ( grouped_dicoms [ 0 ] [ 0 ] . RepetitionTime ) , float ( grouped_dicoms [ 0 ] [ 0 ] . EchoTime ) ) logger . info ( 'Saving nifti to disk %s' % output_file ) if output_file is not None : nii_image . to_filename ( output_file ) if _is_diffusion_imaging ( grouped_dicoms ) : bval_file = None bvec_file = None if output_file is not None : base_path = os . path . dirname ( output_file ) base_name = os . path . splitext ( os . path . splitext ( os . path . basename ( output_file ) ) [ 0 ] ) [ 0 ] logger . info ( 'Creating bval en bvec files' ) bval_file = '%s/%s.bval' % ( base_path , base_name ) bvec_file = '%s/%s.bvec' % ( base_path , base_name ) bval , bvec = _create_bvals_bvecs ( grouped_dicoms , bval_file , bvec_file ) return { 'NII_FILE' : output_file , 'BVAL_FILE' : bval_file , 'BVEC_FILE' : bvec_file , 'NII' : nii_image , 'BVAL' : bval , 'BVEC' : bvec , 'MAX_SLICE_INCREMENT' : slice_increment } return { 'NII_FILE' : output_file , 'NII' : nii_image }
This function will convert ge 4d series to a nifti
44,778
def dicom_to_nifti ( dicom_input , output_file = None ) : assert common . is_hitachi ( dicom_input ) logger . info ( 'Assuming anatomical data' ) return convert_generic . dicom_to_nifti ( dicom_input , output_file )
This is the main dicom to nifti conversion fuction for hitachi images . As input hitachi images are required . It will then determine the type of images and do the correct conversion
44,779
def dicom_to_nifti ( dicom_input , output_file = None ) : assert common . is_siemens ( dicom_input ) if _is_4d ( dicom_input ) : logger . info ( 'Found sequence type: MOSAIC 4D' ) return _mosaic_4d_to_nifti ( dicom_input , output_file ) grouped_dicoms = _classic_get_grouped_dicoms ( dicom_input ) if _is_classic_4d ( grouped_dicoms ) : logger . info ( 'Found sequence type: CLASSIC 4D' ) return _classic_4d_to_nifti ( grouped_dicoms , output_file ) logger . info ( 'Assuming anatomical data' ) return convert_generic . dicom_to_nifti ( dicom_input , output_file )
This is the main dicom to nifti conversion function for ge images . As input ge images are required . It will then determine the type of images and do the correct conversion
44,780
def _get_sorted_mosaics ( dicom_input ) : sorted_mosaics = sorted ( dicom_input , key = lambda x : x . AcquisitionNumber ) for index in range ( 0 , len ( sorted_mosaics ) - 1 ) : if sorted_mosaics [ index ] . AcquisitionNumber >= sorted_mosaics [ index + 1 ] . AcquisitionNumber : raise ConversionValidationError ( "INCONSISTENT_ACQUISITION_NUMBERS" ) return sorted_mosaics
Search all mosaics in the dicom directory sort and validate them
44,781
def _mosaic_to_block ( mosaic ) : mosaic_type = _get_mosaic_type ( mosaic ) matches = re . findall ( r'(\d+)\D+(\d+)\D*' , str ( mosaic [ Tag ( 0x0051 , 0x100b ) ] . value ) ) [ 0 ] ascconv_headers = _get_asconv_headers ( mosaic ) size = [ int ( matches [ 0 ] ) , int ( matches [ 1 ] ) , int ( re . findall ( r'sSliceArray\.lSize\s*=\s*(\d+)' , ascconv_headers ) [ 0 ] ) ] number_x = int ( mosaic . Rows / size [ 0 ] ) number_y = int ( mosaic . Columns / size [ 1 ] ) data_2d = mosaic . pixel_array data_3d = numpy . zeros ( ( size [ 2 ] , size [ 1 ] , size [ 0 ] ) , dtype = data_2d . dtype ) z_index = 0 for y_index in range ( 0 , number_y ) : if z_index >= size [ 2 ] : break for x_index in range ( 0 , number_x ) : if mosaic_type == MosaicType . ASCENDING : data_3d [ z_index , : , : ] = data_2d [ size [ 1 ] * y_index : size [ 1 ] * ( y_index + 1 ) , size [ 0 ] * x_index : size [ 0 ] * ( x_index + 1 ) ] else : data_3d [ size [ 2 ] - ( z_index + 1 ) , : , : ] = data_2d [ size [ 1 ] * y_index : size [ 1 ] * ( y_index + 1 ) , size [ 0 ] * x_index : size [ 0 ] * ( x_index + 1 ) ] z_index += 1 if z_index >= size [ 2 ] : break data_3d = numpy . transpose ( data_3d , ( 2 , 1 , 0 ) ) return data_3d
Convert a mosaic slice to a block of data by reading the headers splitting the mosaic and appending
44,782
def _create_affine_siemens_mosaic ( dicom_input ) : dicom_header = dicom_input [ 0 ] image_orient1 = numpy . array ( dicom_header . ImageOrientationPatient ) [ 0 : 3 ] image_orient2 = numpy . array ( dicom_header . ImageOrientationPatient ) [ 3 : 6 ] normal = numpy . cross ( image_orient1 , image_orient2 ) delta_r = float ( dicom_header . PixelSpacing [ 0 ] ) delta_c = float ( dicom_header . PixelSpacing [ 1 ] ) image_pos = dicom_header . ImagePositionPatient delta_s = dicom_header . SpacingBetweenSlices return numpy . array ( [ [ - image_orient1 [ 0 ] * delta_c , - image_orient2 [ 0 ] * delta_r , - delta_s * normal [ 0 ] , - image_pos [ 0 ] ] , [ - image_orient1 [ 1 ] * delta_c , - image_orient2 [ 1 ] * delta_r , - delta_s * normal [ 1 ] , - image_pos [ 1 ] ] , [ image_orient1 [ 2 ] * delta_c , image_orient2 [ 2 ] * delta_r , delta_s * normal [ 2 ] , image_pos [ 2 ] ] , [ 0 , 0 , 0 , 1 ] ] )
Function to create the affine matrix for a siemens mosaic dataset This will work for siemens dti and 4d if in mosaic format
44,783
def resample_single_nifti ( input_nifti ) : input_image = nibabel . load ( input_nifti ) output_image = resample_nifti_images ( [ input_image ] ) output_image . to_filename ( input_nifti )
Resample a gantry tilted image in place
44,784
def convert_directory ( dicom_directory , output_folder , compression = True , reorient = True ) : dicom_series = { } for root , _ , files in os . walk ( dicom_directory ) : for dicom_file in files : file_path = os . path . join ( root , dicom_file ) try : if compressed_dicom . is_dicom_file ( file_path ) : dicom_headers = compressed_dicom . read_file ( file_path , defer_size = "1 KB" , stop_before_pixels = False , force = dicom2nifti . settings . pydicom_read_force ) if not _is_valid_imaging_dicom ( dicom_headers ) : logger . info ( "Skipping: %s" % file_path ) continue logger . info ( "Organizing: %s" % file_path ) if dicom_headers . SeriesInstanceUID not in dicom_series : dicom_series [ dicom_headers . SeriesInstanceUID ] = [ ] dicom_series [ dicom_headers . SeriesInstanceUID ] . append ( dicom_headers ) except : logger . warning ( "Unable to read: %s" % file_path ) traceback . print_exc ( ) for series_id , dicom_input in iteritems ( dicom_series ) : base_filename = "" try : base_filename = "" if 'SeriesNumber' in dicom_input [ 0 ] : base_filename = _remove_accents ( '%s' % dicom_input [ 0 ] . SeriesNumber ) if 'SeriesDescription' in dicom_input [ 0 ] : base_filename = _remove_accents ( '%s_%s' % ( base_filename , dicom_input [ 0 ] . SeriesDescription ) ) elif 'SequenceName' in dicom_input [ 0 ] : base_filename = _remove_accents ( '%s_%s' % ( base_filename , dicom_input [ 0 ] . SequenceName ) ) elif 'ProtocolName' in dicom_input [ 0 ] : base_filename = _remove_accents ( '%s_%s' % ( base_filename , dicom_input [ 0 ] . ProtocolName ) ) else : base_filename = _remove_accents ( dicom_input [ 0 ] . SeriesInstanceUID ) logger . info ( '--------------------------------------------' ) logger . info ( 'Start converting %s' % base_filename ) if compression : nifti_file = os . path . join ( output_folder , base_filename + '.nii.gz' ) else : nifti_file = os . path . join ( output_folder , base_filename + '.nii' ) convert_dicom . dicom_array_to_nifti ( dicom_input , nifti_file , reorient ) gc . collect ( ) except : logger . info ( "Unable to convert: %s" % base_filename ) traceback . print_exc ( )
This function will order all dicom files by series and order them one by one
44,785
def clear_data ( self ) : self . __header . title = None self . __header . subtitle = None self . __prologue . text = None self . __epilogue . text = None self . __items_section . items = None
Clear menu data from previous menu generation .
44,786
def inner_horizontal_border ( self ) : return u"{lm}{lv}{hz}{rv}" . format ( lm = ' ' * self . margins . left , lv = self . border_style . outer_vertical_inner_right , rv = self . border_style . outer_vertical_inner_left , hz = self . inner_horizontals ( ) )
The complete inner horizontal border section including the left and right border verticals .
44,787
def outer_horizontal_border_bottom ( self ) : return u"{lm}{lv}{hz}{rv}" . format ( lm = ' ' * self . margins . left , lv = self . border_style . bottom_left_corner , rv = self . border_style . bottom_right_corner , hz = self . outer_horizontals ( ) )
The complete outer bottom horizontal border section including left and right margins .
44,788
def outer_horizontal_border_top ( self ) : return u"{lm}{lv}{hz}{rv}" . format ( lm = ' ' * self . margins . left , lv = self . border_style . top_left_corner , rv = self . border_style . top_right_corner , hz = self . outer_horizontals ( ) )
The complete outer top horizontal border section including left and right margins .
44,789
def row ( self , content = '' , align = 'left' ) : return u"{lm}{vert}{cont}{vert}" . format ( lm = ' ' * self . margins . left , vert = self . border_style . outer_vertical , cont = self . _format_content ( content , align ) )
A row of the menu which comprises the left and right verticals plus the given content .
44,790
def create_border ( self , border_style_type ) : if border_style_type == MenuBorderStyleType . ASCII_BORDER : return self . create_ascii_border ( ) elif border_style_type == MenuBorderStyleType . LIGHT_BORDER : return self . create_light_border ( ) elif border_style_type == MenuBorderStyleType . HEAVY_BORDER : return self . create_heavy_border ( ) elif border_style_type == MenuBorderStyleType . DOUBLE_LINE_BORDER : return self . create_doubleline_border ( ) elif border_style_type == MenuBorderStyleType . HEAVY_OUTER_LIGHT_INNER_BORDER : return self . create_heavy_outer_light_inner_border ( ) elif border_style_type == MenuBorderStyleType . DOUBLE_LINE_OUTER_LIGHT_INNER_BORDER : return self . create_doubleline_outer_light_inner_border ( ) else : self . logger . info ( 'Unrecognized border style type: {}. Defaulting to ASCII.' . format ( border_style_type ) ) return self . create_ascii_border ( )
Create a new MenuBorderStyle instance based on the given border style type .
44,791
def is_win_python35_or_earlier ( ) : return sys . platform . startswith ( "win" ) and sys . version_info . major < 3 or ( sys . version_info . major == 3 and sys . version_info . minor < 6 )
Convenience method to determine if the current platform is Windows and Python version 3 . 5 or earlier .
44,792
def set_menu ( self , menu ) : self . menu = menu self . submenu . parent = menu
Sets the menu of this item . Should be used instead of directly accessing the menu attribute for this class .
44,793
def validate ( self , input_string ) : validation_result = False try : validation_result = bool ( match ( pattern = self . pattern , string = input_string ) ) except TypeError as e : self . log . error ( 'Exception while validating Regex, pattern={}, input_string={} - exception: {}' . format ( self . pattern , input_string , e ) ) return validation_result
Validate input_string against a regex pattern
44,794
def process_user_input ( self ) : user_input = self . screen . input ( ) try : indexes = self . __parse_range_list ( user_input ) indexes [ : ] = [ x - 1 for x in indexes if 0 < x < len ( self . items ) + 1 ] for index in indexes : self . current_option = index self . select ( ) except Exception as e : return
This overrides the method in ConsoleMenu to allow for comma - delimited and range inputs .
44,795
def remove_item ( self , item ) : for idx , _item in enumerate ( self . items ) : if item == _item : del self . items [ idx ] return True return False
Remove the specified item from the menu .
44,796
def remove_exit ( self ) : if self . items : if self . items [ - 1 ] is self . exit_item : del self . items [ - 1 ] return True return False
Remove the exit item if necessary . Used to make sure we only remove the exit item not something else .
44,797
def draw ( self ) : self . screen . printf ( self . formatter . format ( title = self . title , subtitle = self . subtitle , items = self . items , prologue_text = self . prologue_text , epilogue_text = self . epilogue_text ) )
Refresh the screen and redraw the menu . Should be called whenever something changes that needs to be redrawn .
44,798
def process_user_input ( self ) : user_input = self . get_input ( ) try : num = int ( user_input ) except Exception : return if 0 < num < len ( self . items ) + 1 : self . current_option = num - 1 self . select ( ) return user_input
Gets the next single character and decides what to do with it
44,799
def go_down ( self ) : if self . current_option < len ( self . items ) - 1 : self . current_option += 1 else : self . current_option = 0 self . draw ( )
Go down one wrap to beginning if necessary