3
d'~                 @   s  d dl Z d dlZddlmZ ddlmZmZmZmZm	Z	m
Z
mZmZmZ G dd deZG dd	 d	eZG d
d de
ZG dd deZG dd deZG dd de
ZG dd deZG dd deZG dd de
ZG dd deZG dd deZG dd de
ZG dd deZG d d! d!eZG d"d# d#e
ZG d$d% d%eZG d&d' d'eZG d(d) d)eZG d*d+ d+eZ G d,d- d-eZ!G d.d/ d/eZ"G d0d1 d1eZ#G d2d3 d3eZ$G d4d5 d5e
Z%G d6d7 d7eZ&G d8d9 d9eZ'G d:d; d;e
Z(dS )<    N   )split_filename   )	CommandLineInputSpecCommandLinetraitsTraitedSpecFileStdOutCommandLineOutputMultiPathStdOutCommandLineInputSpec	isdefinedc               @   s<   e Zd ZeddddddZejdddd	d
dddddd
ZdS )Image2VoxelInputSpecTz-4dimage %s   z4d image file)existsargstr	mandatorypositiondescfloatcharshortintlongdoublez-outputdatatype %sr   zI"i.e. Bfloat". Can be "char", "short", "int", "long", "float" or "double")r   r   r   
usedefaultN)__name__
__module____qualname__r	   in_filer   Enumout_type r"   r"   B/tmp/pip-build-7vycvbft/nipype/nipype/interfaces/camino/convert.pyr      s"   
r   c               @   s   e Zd ZedddZdS )Image2VoxelOutputSpecTz%path/name of 4D volume in voxel order)r   r   N)r   r   r   r	   voxel_orderr"   r"   r"   r#   r$   2   s   r$   c               @   s,   e Zd ZdZdZeZeZdd Z	dd Z
dS )Image2Voxela  
    Converts Analyze / NIFTI / MHA files to voxel order.

    Converts scanner-order data in a supported image format to voxel-order data.
    Either takes a 4D file (all measurements in single image)
    or a list of 3D images.

    Examples
    --------

    >>> import nipype.interfaces.camino as cmon
    >>> img2vox = cmon.Image2Voxel()
    >>> img2vox.inputs.in_file = '4d_dwi.nii'
    >>> img2vox.run()                  # doctest: +SKIP
    Zimage2voxelc             C   s$   | j  j }tjj| j |d< |S )Nr%   )output_specgetospathabspath_gen_outfilename)selfoutputsr"   r"   r#   _list_outputsK   s    zImage2Voxel._list_outputsc             C   s"   t | jj\}}}|d | jj S )Nz.B)r   inputsr   r!   )r-   _namer"   r"   r#   r,   P   s    zImage2Voxel._gen_outfilenameN)r   r   r   __doc___cmdr   
input_specr$   r'   r/   r,   r"   r"   r"   r#   r&   6   s   r&   c               @   s   e Zd ZeddddddZeddddddZejd	d
ddZej	dddZ
ejdd
ddZejdd
ddZej	dddZej	dddZej	dddZej	dddZdS )FSL2SchemeInputSpecTz-bvecfile %sr   zb vector file)r   r   r   r   r   z-bvalfile %sr   zb value filez-numscans %dNAzmOutput all measurements numerous (n) times, used when combining multiple scans from the same imaging session.)r   unitsr   z-interleavez4Interleave repeated scans. Only used with -numscans.)r   r   z
-bscale %dzMScaling factor to convert the b-values into different units. Default is 10^6.z-diffusiontime %fzDiffusion timez-flipxz*Negate the x component of all the vectors.z-flipyz*Negate the y component of all the vectors.z-flipzz*Negate the z component of all the vectors.z-usegradmodzqUse the gradient magnitude to scale b. This option has no effect if your gradient directions have unit magnitude.N)r   r   r   r	   	bvec_fileZ	bval_filer   IntZnumscansBoolZ
interleaveFloatZbscaleZdiffusiontimeZflipxZflipyZflipzZ
usegradmodr"   r"   r"   r#   r6   U   sD   


r6   c               @   s   e Zd ZedddZdS )FSL2SchemeOutputSpecTzScheme file)r   r   N)r   r   r   r	   schemer"   r"   r"   r#   r=      s   r=   c               @   s,   e Zd ZdZdZeZeZdd Z	dd Z
dS )
FSL2Schemea]  
    Converts b-vectors and b-values from FSL format to a Camino scheme file.

    Examples
    --------

    >>> import nipype.interfaces.camino as cmon
    >>> makescheme = cmon.FSL2Scheme()
    >>> makescheme.inputs.bvec_file = 'bvecs'
    >>> makescheme.inputs.bvec_file = 'bvals'
    >>> makescheme.run()                  # doctest: +SKIP

    Z
fsl2schemec             C   s$   | j  j }tjj| j |d< |S )Nr>   )r'   r(   r)   r*   r+   r,   )r-   r.   r"   r"   r#   r/      s    zFSL2Scheme._list_outputsc             C   s   t | jj\}}}|d S )Nz.scheme)r   r0   r9   )r-   r1   r2   r"   r"   r#   r,      s    zFSL2Scheme._gen_outfilenameN)r   r   r   r3   r4   r6   r5   r=   r'   r/   r,   r"   r"   r"   r#   r?      s   r?   c            	   @   s   e Zd ZejddddddZedddd!d	d
Zejej	dddddddZ
edddddZedddddZedddddZejdddZejdddZejdddZd S )"VtkStreamlinesInputSpecrawvoxelsz-inputmodel %sz input model type (raw or voxels)T)r   r   r   z < %sr   z	data file)r   r   r   r   r   zvoxel dimensions in mmz-voxeldims %sr      mm)r   r   minlenmaxlenr   r8   Fz-seedfile %sr   zimage containing seed points)r   r   r   r   z-targetfile %sz.image containing integer-valued target regionsz-scalarfile %sz6image that is in the same physical space as the tractsz-colourorientzBEach point on the streamline is coloured by the local orientation.)r   r   z-interpolatescalarszYthe scalar value at each point on the streamline is calculated by trilinear interpolationz-interpolateN)r   r   r   r   r    
inputmodelr	   r   Listr:   	voxeldimsZ	seed_fileZtarget_fileZscalar_filer;   ZcolourorientZinterpolatescalarsZinterpolater"   r"   r"   r#   r@      sP   r@   c               @   s   e Zd ZedddZdS )VtkStreamlinesOutputSpecTzStreamlines in VTK format)r   r   N)r   r   r   r	   vtkr"   r"   r"   r#   rK      s   rK   c               @   s,   e Zd ZdZdZeZeZdd Z	dd Z
dS )VtkStreamlinesaS  
    Use vtkstreamlines to convert raw or voxel format streamlines to VTK polydata

    Examples
    --------

    >>> import nipype.interfaces.camino as cmon
    >>> vtk = cmon.VtkStreamlines()
    >>> vtk.inputs.in_file = 'tract_data.Bfloat'
    >>> vtk.inputs.voxeldims = [1,1,1]
    >>> vtk.run()                  # doctest: +SKIP
    Zvtkstreamlinesc             C   s$   | j  j }tjj| j |d< |S )NrL   )r'   r(   r)   r*   r+   r,   )r-   r.   r"   r"   r#   r/      s    zVtkStreamlines._list_outputsc             C   s   t | jj\}}}|d S )Nz.vtk)r   r0   r   )r-   r1   r2   r"   r"   r#   r,      s    zVtkStreamlines._gen_outfilenameN)r   r   r   r3   r4   r@   r5   rK   r'   r/   r,   r"   r"   r"   r#   rM      s   rM   c               @   s"  e Zd ZejddddddZeddddd	d
ZejddddZ	ejddddZ
ejddddZejddddZejejddddddZejejddddddZejejddddddZejejddddddZed d!d"d#Zejd$dd%dZejd&dd'dZed d(d)d#Zejd*d+d,Zejejd-d.ddddZed d/d0d#Zejd1d2d,Zejd3d4d,Zed d5d6d#Zejd7d8d,Zed d9d:d#Zejd;dd<dZ ejd=d>d,Z!ejd?d@d,Z"ed dAdBd#Z#ejdCdDd,Z$ejdEdFdGdHgdIZ%ejdJdKdGdHgdIZ&ejdLdMdGdHgdIZ'ejdNdOdGdPdHgdIZ(dQS )RProcStreamlinesInputSpecrA   rB   z-inputmodel %sz input model type (raw or voxels)T)r   r   r   z-inputfile %sr   z	data file)r   r   r   r   r   z-maxtractpoints %dr7   zmaximum number of tract points)r   r8   r   z-mintractpoints %dzminimum number of tract pointsz-maxtractlength %drD   zmaximum length of tractsz-mintractlength %dzminimum length of tractszdata dimensions in voxelsz-datadims %sr   )r   r   rE   rF   r8   zvoxel dimensions in mmz-voxeldims %sz=The coordinates of a single seed point for tractography in mmz-seedpointmm %szAThe coordinates of a single seed point for tractography in voxelsz-seedpointvox %sFz-seedfile %szImage Containing Seed Points)r   r   r   z-regionindex %dz#index of specific region to processz-iterations %dzNumber of streamlines generated for each seed. Not required when outputting streamlines, but needed to create PICo images. The default is 1 if the output is streamlines, and 5000 if the output is connection probability images.z-targetfile %sz Image containing target volumes.z-allowmultitargetsz9Allows streamlines to connect to multiple target volumes.)r   r   a  Splits the streamlines at the seed point and computes separate connection probabilities for each segment. Streamline segments are grouped according to their dot product with the vector (X, Y, Z). The ideal vector will be tangential to the streamline trajectory at the seed, such that the streamline projects from the seed along (X, Y, Z) and -(X, Y, Z). However, it is only necessary for the streamline trajectory to not be orthogonal to (X, Y, Z).z-directional %sz-waypointfile %szImage containing waypoints. Waypoints are defined as regions of the image with the same intensity, where 0 is background and any value > 0 is a waypoint.z-truncateloopszThis option allows streamlines to enter a waypoint exactly once. After the streamline leaves the waypoint, it is truncated upon a second entry to the waypoint.z-discardloopszThis option allows streamlines to enter a waypoint exactly once. After the streamline leaves the waypoint, the entire streamline is discarded upon a second entry to the waypoint.z-exclusionfile %szdImage containing exclusion ROIs. This should be an Analyze 7.5 header / image file.hdr and file.img.z-truncateinexclusionzARetain segments of a streamline before entry to an exclusion ROI.z-endpointfile %szcImage containing endpoint ROIs. This should be an Analyze 7.5 header / image file.hdr and file.img.z-resamplestepsize %dap  Each point on a streamline is tested for entry into target, exclusion or waypoint volumes. If the length between points on a tract is not much smaller than the voxel length, then streamlines may pass through part of a voxel without being counted. To avoid this, the program resamples streamlines such that the step size is one tenth of the smallest voxel dimension in the image. This increases the size of raw or oogl streamline output and incurs some performance penalty. The resample resolution can be controlled with this option or disabled altogether by passing a negative step size or by passing the -noresample option.z-noresamplezlDisables resampling of input streamlines. Resampling is automatically disabled if the input model is voxels.z-outputtractsz(Output streamlines in raw binary format.z-outputroot %sz%Prepended onto all output file names.z-gzipz$save the output image in gzip formatz	-outputcpz<output the connection probability map (Analyze image, float)
outputrootseedfile)r   r   requiresz	-outputscz<output the connection probability map (raw streamlines, int)z
-outputacmzHoutput all tracts in a single connection probability map (Analyze image)z
-outputcbszHoutputs connectivity-based segmentation maps; requires target outputfile
targetfileN))r   r   r   r   r    rH   r	   r   r:   ZmaxtractpointsZmintractpointsZmaxtractlengthZmintractlengthrI   ZdatadimsrJ   ZseedpointmmZseedpointvoxrP   Zregionindexr<   Z
iterationsrR   r;   ZallowmultitargetsZdirectionalZwaypointfileZtruncateloopsZdiscardloopsZexclusionfileZtruncateinexclusionZendpointfileZresamplestepsizeZ
noresampleZoutputtractsrO   gzipZoutputcpZoutputscZ	outputacmZ	outputcbsr"   r"   r"   r#   rN     s   
rN   c               @   s&   e Zd ZedddZeeddZdS )ProcStreamlinesOutputSpecTzProcessed Streamlines)r   r   )r   N)r   r   r   r	   procr   outputroot_filesr"   r"   r"   r#   rT     s   rT   c                   s\   e Zd ZdZdZeZeZ fddZ	 fddZ
 fddZd	d
 Zdd Zdd Z  ZS )ProcStreamlinesa  
    Process streamline data

    This program does post-processing of streamline output from track. It can either output streamlines or connection probability maps.
     * http://web4.cs.ucl.ac.uk/research/medic/camino/pmwiki/pmwiki.php?n=Man.procstreamlines

    Examples
    --------

    >>> import nipype.interfaces.camino as cmon
    >>> proc = cmon.ProcStreamlines()
    >>> proc.inputs.in_file = 'tract_data.Bfloat'
    >>> proc.run()                  # doctest: +SKIP
    Zprocstreamlinesc                s,   |dkr|j | j| S tt| j|||S )NrO   )r   _get_actual_outputrootsuperrW   _format_arg)r-   r2   specvalue)	__class__r"   r#   rZ     s    zProcStreamlines._format_argc                s   t t| j|| g | _d S )N)rY   rW   __init__rV   )r-   argskwargs)r]   r"   r#   r^     s    zProcStreamlines.__init__c                s   | j j}t|rp| j|}t|\}}}tjj|s>tj| t	t
| j|}tjtjjtj |d | _|S t	t
| j|}|S d S )N*)r0   rO   r   rX   r   r)   r*   r   makedirsrY   rW   _run_interfaceglobjoingetcwdrV   )r-   ZruntimerO   actual_outputrootbasefilenameextZnew_runtime)r]   r"   r#   rc     s    

zProcStreamlines._run_interfacec             C   s   t jjd|}|S )NZprocstream_outfiles)r)   r*   re   )r-   rO   rg   r"   r"   r#   rX     s    z&ProcStreamlines._get_actual_outputrootc             C   s.   | j  j }tjj| j |d< | j|d< |S )NrU   rV   )r'   r(   r)   r*   r+   r,   rV   )r-   r.   r"   r"   r#   r/     s    
zProcStreamlines._list_outputsc             C   s   t | jj\}}}|d S )N_proc)r   r0   r   )r-   r1   r2   r"   r"   r#   r,     s    z ProcStreamlines._gen_outfilename)r   r   r   r3   r4   rN   r5   rT   r'   rZ   r^   rc   rX   r/   r,   __classcell__r"   r"   )r]   r#   rW     s   rW   c               @   sT   e Zd ZeddddddZejdddd	d
Zejddddd
Zejddddd
Z	dS )TractShredderInputSpecTz< %sr   z
tract file)r   r   r   r   r   z%dr7   zinitial offset of offset tractsr   )r   r8   r   r   z-reads and outputs a group of bunchsize tractszskips space tractsr   NrG   )
r   r   r   r	   r   r   r:   offsetZ	bunchsizespacer"   r"   r"   r#   rm     s   rm   c               @   s   e Zd ZedddZdS )TractShredderOutputSpecTzShredded tract file)r   r   N)r   r   r   r	   shreddedr"   r"   r"   r#   rp     s   rp   c               @   s,   e Zd ZdZdZeZeZdd Z	dd Z
dS )TractShreddera  
    Extracts bunches of streamlines.

    tractshredder works in a similar way to shredder, but processes streamlines instead of scalar data.
    The input is raw streamlines, in the format produced by track or procstreamlines.

    The program first makes an initial offset of offset tracts.  It then reads and outputs a group of
    bunchsize tracts, skips space tracts, and repeats until there is no more input.

    Examples
    --------

    >>> import nipype.interfaces.camino as cmon
    >>> shred = cmon.TractShredder()
    >>> shred.inputs.in_file = 'tract_data.Bfloat'
    >>> shred.inputs.offset = 0
    >>> shred.inputs.bunchsize = 1
    >>> shred.inputs.space = 2
    >>> shred.run()                  # doctest: +SKIP
    Ztractshredderc             C   s$   | j  j }tjj| j |d< |S )Nrq   )r'   r(   r)   r*   r+   r,   )r-   r.   r"   r"   r#   r/     s    zTractShredder._list_outputsc             C   s   t | jj\}}}|d S )N	_shredded)r   r0   r   )r-   r1   r2   r"   r"   r#   r,     s    zTractShredder._gen_outfilenameN)r   r   r   r3   r4   rm   r5   rp   r'   r/   r,   r"   r"   r"   r#   rr     s   rr   c               @   s@   e Zd ZeddddddZeddddd	Zedd
ddddZdS )DT2NIfTIInputSpecTz-inputfile %sr   z
tract file)r   r   r   r   r   z-outputroot %sr   z=filename root prepended onto the names of three output files.)r   r   Zgenfiler   z
-header %sr   z< A Nifti .nii or .hdr file containing the header informationN)r   r   r   r	   r   output_rootZheader_filer"   r"   r"   r#   rt   !  s"   rt   c               @   s0   e Zd ZedddZedddZedddZdS )DT2NIfTIOutputSpecTz!diffusion tensors in NIfTI format)r   r   z5exit codes from Camino reconstruction in NIfTI formatz9estimated lns0 from Camino reconstruction in NIfTI formatN)r   r   r   r	   dtexitcodelns0r"   r"   r"   r#   rv   :  s
   
rv   c               @   s<   e Zd ZdZdZeZeZdd Z	dd Z
dd Zd	d
 ZdS )DT2NIfTIz
    Converts camino tensor data to NIfTI format

    Reads Camino diffusion tensors, and converts them to NIFTI format as three .nii files.
    Zdt2niic             C   sT   | j  j }| j }tjj|d |d< tjj|d |d< tjj|d |d< |S )Nzdt.niirw   zexitcode.niirx   zlns0.niiry   )r'   r(   _gen_outputrootr)   r*   r+   )r-   r.   ru   r"   r"   r#   r/   Q  s    zDT2NIfTI._list_outputsc             C   s   | j  S )N)r{   )r-   r"   r"   r#   r,   Y  s    zDT2NIfTI._gen_outfilenamec             C   s   | j j}t|s| jd}|S )Nru   )r0   ru   r   _gen_filename)r-   ru   r"   r"   r#   r{   \  s    
zDT2NIfTI._gen_outputrootc             C   s&   |dkr"t | jj\}}}|d }|S )Nru   r1   )r   r0   r   )r-   r2   r1   ri   r"   r"   r#   r|   b  s    zDT2NIfTI._gen_filenameN)r   r   r   r3   r4   rt   r5   rv   r'   r/   r,   r{   r|   r"   r"   r"   r#   rz   F  s   rz   c               @   sr   e Zd ZeddddddZeddddZed	dd
dZeddddZej	dddZ
ej	dddZejdddZdS )NIfTIDT2CaminoInputSpecTz-inputfile %sr   zA NIFTI-1 dataset containing diffusion tensors. The tensors are assumed to be in lower-triangular order as specified by the NIFTI standard for the storage of symmetric matrices. This file should be either a .nii or a .hdr file.)r   r   r   r   r   z-s0 %szFile containing the unweighted signal for each voxel, may be a raw binary file (specify type with -inputdatatype) or a supported image file.)r   r   r   z-lns0 %szFile containing the log of the unweighted signal for each voxel, may be a raw binary file (specify type with -inputdatatype) or a supported image file.z
-bgmask %szBinary valued brain / background segmentation, may be a raw binary file (specify type with -maskdatatype) or a supported image file.z-scaleslope %szA value v in the diffusion tensor is scaled to v * s + i. This is applied after any scaling specified by the input image. Default is 1.0.)r   r   z-scaleinter %szA value v in the diffusion tensor is scaled to v * s + i. This is applied after any scaling specified by the input image. Default is 0.0.z-uppertriangular %sz6Specifies input in upper-triangular (VTK style) order.N)r   r   r   r	   r   Zs0_fileZ	lns0_fileZbgmaskr   r<   
scaleslope
scaleinterr;   Zuppertriangularr"   r"   r"   r#   r}   i  s6   r}   c               @   s   e Zd ZeddZdS )NIfTIDT2CaminoOutputSpecz'diffusion tensors data in Camino format)r   N)r   r   r   r	   out_filer"   r"   r"   r#   r     s   r   c               @   s,   e Zd ZdZdZeZeZdd Z	dd Z
dS )NIfTIDT2Caminoav  
    Converts NIFTI-1 diffusion tensors to Camino format. The program reads the
    NIFTI header but does not apply any spatial transformations to the data. The
    NIFTI intensity scaling parameters are applied.

    The output is the tensors in Camino voxel ordering: [exit, ln(S0), dxx, dxy,
    dxz, dyy, dyz, dzz].

    The exit code is set to 0 unless a background mask is supplied, in which case
    the code is 0 in brain voxels and -1 in background voxels.

    The value of ln(S0) in the output is taken from a file if one is supplied,
    otherwise it is set to 0.

    NOTE FOR FSL USERS - FSL's dtifit can output NIFTI tensors, but they are not
    stored in the usual way (which is using NIFTI_INTENT_SYMMATRIX). FSL's
    tensors follow the ITK / VTK "upper-triangular" convention, so you will need
    to use the -uppertriangular option to convert these correctly.

    Zniftidt2caminoc             C   s   | j  j }| jd|d< |S )Nr   )r'   r(   r|   )r-   r.   r"   r"   r#   r/     s    zNIfTIDT2Camino._list_outputsc             C   s   |dkrt | jj\}}}|S )Nr   )r   r0   r   )r-   r2   r1   ri   r"   r"   r#   r|     s    zNIfTIDT2Camino._gen_filenameN)r   r   r   r3   r4   r}   r5   r   r'   r/   r|   r"   r"   r"   r#   r     s   r   c               @   s  e Zd ZeddddddZeddddd	Zedd
ddd	Zeddddd	Zeddddd	Zeddddd	Z	eddddd	Z
eddddd	ZejejddddddZejejddddddZejejdddddd Zejejd!ddd"dd#Zejd$d%d&d'Zejd(d)d*d+d,d-d.d/d0dd1
Zejd2d%d3d'Zejejd4ddd5d%d#Zejd6d%d7d'Zejd8d%d9d'Zejd:d;d<Zejd=d>d<Zejd?d@d<ZdAS )BAnalyzeHeaderInputSpecTz< %sr   zTensor-fitted data filename)r   r   r   r   r   z%sr   z>Camino scheme file (b values / vectors, see camino.fsl2scheme))r   r   r   r   z-readheader %sr   zReads header information from file and prints to stdout. If this option is not specified, then the program writes a header based on the other arguments.z-printimagedims %szEPrints image data and voxel dimensions as Camino arguments and exits.z-printprogargs %szPrints data dimension (and type, if relevant) arguments for a specific Camino program, where prog is one of shredder, scanner2voxel, vcthreshselect, pdview, track.z-printintelbyteorder %sz5Prints 1 if the header is little-endian, 0 otherwise.z-printbigendian %sz2Prints 1 if the header is big-endian, 0 otherwise.z-initfromheader %szReads header information from file and intializes a new header with the values read from the file. You may replace any combination of fields in the new header by specifying subsequent options.zdata dimensions in voxelsz-datadims %srB   )r   r   rE   rF   r8   zvoxel dimensions in mmz-voxeldims %srD   z
-centre %szPVoxel specifying origin of Talairach coordinate system for SPM, default [0 0 0].)r   rE   rF   r8   r   z-picoseed %sz;Voxel specifying the seed (for PICo maps), default [0 0 0].)r   rE   rF   r   r8   z-nimages %dr7   z,Number of images in the img file. Default 1.)r   r8   r   Zbyter   z[u]shortz[u]intr   complexr   z-datatype %szThe char datatype is 8 bit (not the 16 bit char of Java), as specified by the Analyze 7.5 standard. The byte, ushort and uint types are not part of the Analyze specification but are supported by SPM.)r   r   r   z
-offset %dzAccording to the Analyze 7.5 standard, this is the byte offset in the .img file at which voxels start. This value can be negative to specify that the absolute value is applied for every image in the file.z-gl %sz?Minimum and maximum greylevels. Stored as shorts in the header.z-scaleslope %dzUIntensities in the image are scaled by this factor by SPM and MRICro. Default is 1.0.z-scaleinter %dzAConstant to add to the image intensities. Used by SPM and MRIcro.z-description %szZShort description - No spaces, max length 79 bytes. Will be null terminated automatically.)r   r   z-intelbyteorderz1Write header in intel byte order (little-endian).z-networkbyteorderzUWrite header in network byte order (big-endian). This is the default for new headers.N) r   r   r   r	   r   Zscheme_fileZ
readheaderZprintimagedimsZprintprogargsZprintintelbyteorderZprintbigendianZinitfromheaderr   rI   r:   Z	data_dimsr<   Z
voxel_dimsZcentreZpicoseedZnimagesr    datatypern   Z
greylevelsr~   r   Stringdescriptionr;   ZintelbyteorderZnetworkbyteorderr"   r"   r"   r#   r     s   
r   c               @   s   e Zd ZedddZdS )AnalyzeHeaderOutputSpecTzAnalyze header)r   r   N)r   r   r   r	   headerr"   r"   r"   r#   r     s   r   c               @   s,   e Zd ZdZdZeZeZdd Z	dd Z
dS )AnalyzeHeadera  
    Create or read an Analyze 7.5 header file.

    Analyze image header, provides support for the most common header fields.
    Some fields, such as patient_id, are not currently supported. The program allows
    three nonstandard options: the field image_dimension.funused1 is the image scale.
    The intensity of each pixel in the associated .img file is (image value from file) * scale.
    Also, the origin of the Talairach coordinates (midline of the anterior commisure) are encoded
    in the field data_history.originator. These changes are included for compatibility with SPM.

    All headers written with this program are big endian by default.

    Example
    -------

    >>> import nipype.interfaces.camino as cmon
    >>> hdr = cmon.AnalyzeHeader()
    >>> hdr.inputs.in_file = 'tensor_fitted_data.Bdouble'
    >>> hdr.inputs.scheme_file = 'A.scheme'
    >>> hdr.inputs.data_dims = [256,256,256]
    >>> hdr.inputs.voxel_dims = [1,1,1]
    >>> hdr.run()                  # doctest: +SKIP
    Zanalyzeheaderc             C   s$   | j  j }tjj| j |d< |S )Nr   )r'   r(   r)   r*   r+   r,   )r-   r.   r"   r"   r#   r/     s    zAnalyzeHeader._list_outputsc             C   s   t | jj\}}}|d S )Nz.hdr)r   r0   r   )r-   r1   r2   r"   r"   r#   r,     s    zAnalyzeHeader._gen_outfilenameN)r   r   r   r3   r4   r   r5   r   r'   r/   r,   r"   r"   r"   r#   r     s   r   c               @   sT   e Zd ZeddddddZejdddd	d
Zejddddd
Zejddddd
Z	dS )ShredderInputSpecTz< %sr   zraw binary data file)r   r   r   r   r   z%dr7   zinitial offset of offset bytesr   )r   r8   r   r   z,reads and outputs a chunk of chunksize byteszskips space bytesr   NrG   )
r   r   r   r	   r   r   r:   rn   	chunksizero   r"   r"   r"   r#   r     s   r   c               @   s   e Zd ZedddZdS )ShredderOutputSpecTzShredded binary data file)r   r   N)r   r   r   r	   rq   r"   r"   r"   r#   r     s   r   c               @   s,   e Zd ZdZdZeZeZdd Z	dd Z
dS )ShredderaH  
    Extracts periodic chunks from a data stream.

    Shredder makes an initial offset of offset bytes. It then reads and outputs
    chunksize bytes, skips space bytes, and repeats until there is no more input.

    If  the  chunksize  is  negative, chunks of size chunksize are read and the
    byte ordering of each chunk is reversed. The whole chunk will be reversed, so
    the chunk must be the same size as the data type, otherwise the order of the
    values in the chunk, as well as their endianness, will be reversed.

    Examples
    --------

    >>> import nipype.interfaces.camino as cam
    >>> shred = cam.Shredder()
    >>> shred.inputs.in_file = 'SubjectA.Bfloat'
    >>> shred.inputs.offset = 0
    >>> shred.inputs.chunksize = 1
    >>> shred.inputs.space = 2
    >>> shred.run()                  # doctest: +SKIP
    Zshredderc             C   s$   | j  j }tjj| j |d< |S )NZshredded_file)r'   r(   r)   r*   r+   r,   )r-   r.   r"   r"   r#   r/     s    zShredder._list_outputsc             C   s   t | jj\}}}|d S )Nrs   )r   r0   r   )r-   r1   r2   r"   r"   r#   r,     s    zShredder._gen_outfilenameN)r   r   r   r3   r4   r   r5   r   r'   r/   r,   r"   r"   r"   r#   r     s   r   ))r)   rd   Zutils.filemanipr   rh   r   r   r   r   r	   r
   r   r   r   r   r$   r&   r6   r=   r?   r@   rK   rM   rN   rT   rW   rm   rp   rr   rt   rv   rz   r}   r   r   r   r   r   r   r   r   r"   r"   r"   r#   <module>   s@   ,5: !<$#2% P'