os.path.abspath

Here are the examples of the python api os.path.abspath taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

156 Examples 7

Example 151

Project: shinken Source File: arbiterdaemon.py
    def load_config_file(self):
        logger.info("Loading configuration")
        # REF: doc/shinken-conf-dispatching.png (1)
        buf = self.conf.read_config(self.config_files)
        raw_objects = self.conf.read_config_buf(buf)

        logger.debug("Opening local log file")

        # First we need to get arbiters and modules
        # so we can ask them for objects
        self.conf.create_objects_for_type(raw_objects, 'arbiter')
        self.conf.create_objects_for_type(raw_objects, 'module')

        self.conf.early_arbiter_linking()

        # Search which Arbiterlink I am
        for arb in self.conf.arbiters:
            if arb.is_me(self.arb_name):
                arb.need_conf = False
                self.me = arb
                self.is_master = not self.me.spare
                if self.is_master:
                    logger.info("I am the master Arbiter: %s", arb.get_name())
                else:
                    logger.info("I am a spare Arbiter: %s", arb.get_name())
                # export this data to our statsmgr object :)
                api_key = getattr(self.conf, 'api_key', '')
                secret = getattr(self.conf, 'secret', '')
                http_proxy = getattr(self.conf, 'http_proxy', '')
                statsd_host = getattr(self.conf, 'statsd_host', 'localhost')
                statsd_port = getattr(self.conf, 'statsd_port', 8125)
                statsd_interval = getattr(self.conf, 'statsd_interval', 5)
                statsd_prefix = getattr(self.conf, 'statsd_prefix', 'shinken')
                statsd_enabled = getattr(self.conf, 'statsd_enabled', False)
                statsd_types = getattr(self.conf, 'statsd_types', None)
                statsd_pattern = getattr(self.conf, 'statsd_pattern', '')
                statsmgr.register(self, arb.get_name(), 'arbiter',
                                  api_key=api_key, secret=secret, http_proxy=http_proxy,
                                  statsd_host=statsd_host, statsd_port=statsd_port,
                                  statsd_prefix=statsd_prefix,
                                  statsd_enabled=statsd_enabled,
                                  statsd_interval=statsd_interval,
                                  statsd_types=statsd_types,
                                  statsd_pattern=statsd_pattern)

                # Set myself as alive ;)
                self.me.alive = True
            else:  # not me
                arb.need_conf = True

        if not self.me:
            sys.exit("Error: I cannot find my own Arbiter object, I bail out. \
                     To solve it, please change the host_name parameter in \
                     the object Arbiter in the file shinken-specific.cfg. \
                     With the value %s \
                     Thanks." % socket.gethostname())

        logger.info("My own modules: " + ','.join([m.get_name() for m in self.me.modules]))

        self.modules_dir = getattr(self.conf, 'modules_dir', '')

        # Ok it's time to load the module manager now!
        self.load_modules_manager()
        # we request the instances without them being *started*
        # (for those that are concerned ("external" modules):
        # we will *start* these instances after we have been daemonized (if requested)
        self.modules_manager.set_modules(self.me.modules)
        self.do_load_modules()

        # Call modules that manage this read configuration pass
        self.hook_point('read_configuration')

        # Call modules get_objects() to load new objects from them
        # (example modules: glpi, mongodb, dummy_arbiter)
        self.load_modules_configuration_objects(raw_objects)

        # Resume standard operations ###
        self.conf.create_objects(raw_objects)

        # Maybe conf is already invalid
        if not self.conf.conf_is_correct:
            sys.exit("***> One or more problems was encountered "
                     "while processing the config files...")

        # Manage all post-conf modules
        self.hook_point('early_configuration')

        # Ok here maybe we should stop because we are in a pure migration run
        if self.migrate:
            logger.info("Migration MODE. Early exiting from configuration relinking phase")
            return

        # Load all file triggers
        self.conf.load_triggers()

        # Create Template links
        self.conf.linkify_templates()

        # All inheritances
        self.conf.apply_inheritance()

        # Explode between types
        self.conf.explode()

        # Implicit inheritance for services
        self.conf.apply_implicit_inheritance()

        # Fill default values
        self.conf.fill_default()

        # Remove templates from config
        self.conf.remove_templates()

        # We compute simple item hash
        self.conf.compute_hash()

        # Overrides sepecific service instaces properties
        self.conf.override_properties()

        # Linkify objects to each other
        self.conf.linkify()

        # applying dependencies
        self.conf.apply_dependencies()

        # sets objects initial state
        self.conf.set_initial_state()

        # Hacking some global parameters inherited from Nagios to create
        # on the fly some Broker modules like for status.dat parameters
        # or nagios.log one if there are none already available
        self.conf.hack_old_nagios_parameters()

        # Raise warning about currently unmanaged parameters
        if self.verify_only:
            self.conf.warn_about_unmanaged_parameters()

        # Explode global conf parameters into Classes
        self.conf.explode_global_conf()

        # set our own timezone and propagate it to other satellites
        self.conf.propagate_timezone_option()

        # Look for business rules, and create the dep tree
        self.conf.create_business_rules()
        # And link them
        self.conf.create_business_rules_dependencies()

        # Warn about useless parameters in Shinken
        if self.verify_only:
            self.conf.notice_about_useless_parameters()

        # Manage all post-conf modules
        self.hook_point('late_configuration')

        # Correct conf?
        self.conf.is_correct()

        # Maybe some elements where not wrong, so we must clean if possible
        self.conf.clean()

        # If the conf is not correct, we must get out now
        # if not self.conf.conf_is_correct:
        #    sys.exit("Configuration is incorrect, sorry, I bail out")

        # REF: doc/shinken-conf-dispatching.png (2)
        logger.info("Cutting the hosts and services into parts")
        self.confs = self.conf.cut_into_parts()

        # The conf can be incorrect here if the cut into parts see errors like
        # a realm with hosts and not schedulers for it
        if not self.conf.conf_is_correct:
            self.conf.show_errors()
            err = "Configuration is incorrect, sorry, I bail out"
            logger.error(err)
            sys.exit(err)

        logger.info('Things look okay - No serious problems were detected '
                    'during the pre-flight check')

        # Clean objects of temporary/unnecessary attributes for live work:
        self.conf.clean()

        # Exit if we are just here for config checking
        if self.verify_only:
            sys.exit(0)

        if self.analyse:
            self.launch_analyse()
            sys.exit(0)

        # Some properties need to be "flatten" (put in strings)
        # before being send, like realms for hosts for example
        # BEWARE: after the cutting part, because we stringify some properties
        self.conf.prepare_for_sending()

        # Ok, here we must check if we go on or not.
        # TODO: check OK or not
        self.log_level = self.conf.log_level
        self.use_local_log = self.conf.use_local_log
        self.local_log = self.conf.local_log
        self.pidfile = os.path.abspath(self.conf.lock_file)
        self.idontcareaboutsecurity = self.conf.idontcareaboutsecurity
        self.user = self.conf.shinken_user
        self.group = self.conf.shinken_group
        self.daemon_enabled = self.conf.daemon_enabled
        self.daemon_thread_pool_size = self.conf.daemon_thread_pool_size
        self.http_backend = getattr(self.conf, 'http_backend', 'auto')

        self.accept_passive_unknown_check_results = BoolProp.pythonize(
            getattr(self.me, 'accept_passive_unknown_check_results', '0')
        )

        # If the user sets a workdir, lets use it. If not, use the
        # pidfile directory
        if self.conf.workdir == '':
            self.workdir = os.path.abspath(os.path.dirname(self.pidfile))
        else:
            self.workdir = self.conf.workdir

        #  We need to set self.host & self.port to be used by do_daemon_init_and_start
        self.host = self.me.address
        self.port = self.me.port

        logger.info("Configuration Loaded")

Example 152

Project: spinalcordtoolbox Source File: sct_dmri_moco__old.py
Function: main
def main():

    # get path of the toolbox
    status, path_sct = commands.getstatusoutput('echo $SCT_DIR')
    print path_sct

    # Initialization
    fsloutput = 'export FSLOUTPUTTYPE=NIFTI; ' # for faster processing, all outputs are in NIFTI
    fname_data = ''
    fname_bvecs = ''
    fname_schedule = path_sct+'/flirtsch/schedule_TxTy.sch'
    interp = param.interp
    remove_temp_files = param.remove_temp_files
    verbose = param.verbose
    start_time = time.time()

    # Parameters for debug mode
    if param.debug:
        fname_data = path_sct+'/testing/data/errsm_23/dmri/dmri.nii.gz'
        fname_bvecs = path_sct+'/testing/data/errsm_23/dmri/bvecs.txt'
        interp = 'trilinear'
        remove_temp_files = 0
        verbose = 1

    # Check input parameters
    try:
        opts, args = getopt.getopt(sys.argv[1:],'hb:i:v:s:')
    except getopt.GetoptError:
        usage()
    for opt, arg in opts:
        if opt == '-h':
            usage()
        elif opt in ("-b"):
            fname_bvecs = arg
        elif opt in ("-i"):
            fname_data = arg
        elif opt in ('-s'):
            interp = str(arg)
        elif opt in ('-v'):
            verbose = int(arg)

    # display usage if a mandatory argument is not provided
    if fname_data == '' or fname_bvecs == '':
        usage()

    # check existence of input files
    sct.check_file_exist(fname_data)
    sct.check_file_exist(fname_bvecs)

    # print arguments
    print '\nCheck parameters:'
    print '.. DWI data:             '+fname_data
    print '.. bvecs file:           '+fname_bvecs
    print ''

    # Get full path
    fname_data = os.path.abspath(fname_data)
    fname_bvecs = os.path.abspath(fname_bvecs)

    # Extract path, file and extension
    path_data, file_data, ext_data = sct.extract_fname(fname_data)

    # create temporary folder
    path_tmp = 'tmp.'+time.strftime("%y%m%d%H%M%S")
    sct.run('mkdir '+path_tmp)

    # go to tmp folder
    os.chdir(path_tmp)

    # Get size of data
    print '\nGet dimensions data...'
    nx, ny, nz, nt, px, py, pz, pt = sct.get_dimension(fname_data)
    print '.. '+str(nx)+' x '+str(ny)+' x '+str(nz)+' x '+str(nt)

    # Open bvecs file
    print '\nOpen bvecs file...'
    bvecs = []
    with open(fname_bvecs) as f:
        for line in f:
            bvecs_new = map(float, line.split())
            bvecs.append(bvecs_new)

    # Check if bvecs file is nx3
    if not len(bvecs[0][:]) == 3:
        print '.. WARNING: bvecs file is 3xn instead of nx3. Consider using sct_dmri_transpose_bvecs.'
        print 'Transpose bvecs...'
        # transpose bvecs
        bvecs = zip(*bvecs)

    # Identify b=0 and DW images
    print '\nIdentify b=0 and DW images...'
    index_b0 = []
    index_dwi = []
    for it in xrange(0,nt):
        if math.sqrt(math.fsum([i**2 for i in bvecs[it]])) < 0.01:
            index_b0.append(it)
        else:
            index_dwi.append(it)
    n_b0 = len(index_b0)
    n_dwi = len(index_dwi)
    print '.. Index of b=0:'+str(index_b0)
    print '.. Index of DWI:'+str(index_dwi)

    #TODO: check if number of bvecs and nt match

    # Split into T dimension
    print '\nSplit along T dimension...'
    #cmd = fsloutput+'fslsplit tmp.data tmp.data_splitT'
    status, output = sct.run(fsloutput+'fslsplit '+fname_data+' tmp.data_splitT')

    # retrieve output names
    status, output = sct.run('ls tmp.data_splitT*.*')
    file_data_split = output.split()
    # Remove .nii extension
    file_data_split = [file_data_split[i].replace('.nii','') for i in xrange (0,len(file_data_split))]

    # Merge b=0 images
    print '\nMerge b=0...'
    file_b0 = 'tmp.b0'
    cmd = fsloutput+'fslmerge -t '+file_b0
    for it in xrange(0,n_b0):
        cmd += ' '+file_data_split[index_b0[it]]
    #print('>> '+cmd)
    status, output = sct.run(cmd)

    # Merge DWI images
    print '\nMerge DWI...'
    file_dwi = 'tmp.dwi'
    cmd = fsloutput+'fslmerge -t '+file_dwi
    for it in xrange(0,n_dwi):
        cmd += ' '+file_data_split[index_dwi[it]]
    status, output = sct.run(cmd)

    # Average b=0 images
    print '\nAverage b=0...'
    file_b0_mean = 'tmp.b0_mean'
    cmd = fsloutput+'fslmaths '+file_b0+' -Tmean '+file_b0_mean
    status, output = sct.run(cmd)

    # Average DWI images
    print '\nAverage DWI...'
    file_dwi_mean = 'tmp.dwi_mean'
    cmd = fsloutput+'fslmaths '+file_dwi+' -Tmean '+file_dwi_mean
    status, output = sct.run(cmd)



    # REGISTER DWI TO THE MEAN DWI  -->  output transfo Tdwi
    # ---------------------------------------------------------------------------------------

    # loop across DWI data
    print '\nRegister DWI data to '+file_dwi_mean+'...'
    for it in xrange(0,n_dwi):
        # estimate transformation matrix
        file_target = file_dwi_mean
        file_mat = 'tmp.mat_'+str(index_dwi[it]).zfill(4)
        cmd = fsloutput+'flirt -in '+file_data_split[index_dwi[it]]+' -ref '+file_target+' -omat '+file_mat+' -cost normcorr -schedule '+fname_schedule+' -interp trilinear -out '+file_data_split[index_dwi[it]]+'_moco'
        status, output = sct.run(cmd)

    # Merge corrected DWI images
    print '\nMerge corrected DWI...'
    file_dwi = 'tmp.dwi_moco'
    cmd = fsloutput+'fslmerge -t '+file_dwi
    for it in xrange(0,n_dwi):
        cmd += ' '+file_data_split[index_dwi[it]]+'_moco'
    status, output = sct.run(cmd)

    # Average corrected DWI
    print '\nAverage corrected DWI...'
    file_dwi_mean = 'tmp.dwi_moco_mean'
    cmd = fsloutput+'fslmaths '+file_dwi+' -Tmean '+file_dwi_mean
    status, output = sct.run(cmd)


    # REGISTER B=0 DATA TO THE FIRST B=0  --> output transfo Tb0
    # ---------------------------------------------------------------------------------------
    print '\nRegister b=0 data to the first b=0...'
    for it in xrange(0,n_b0):
        # estimate transformation matrix
        file_target = file_data_split[int(index_b0[0])]
        file_mat = 'tmp.mat_'+str(index_b0[it]).zfill(4)
        cmd = fsloutput+'flirt -in '+file_data_split[index_b0[it]]+' -ref '+file_target+' -omat '+file_mat+' -cost normcorr -forcescaling -2D -out '+file_data_split[index_b0[it]]+'_moco'
        status, output = sct.run(cmd)

    # Merge corrected b=0 images
    print '\nMerge corrected b=0...'
    cmd = fsloutput+'fslmerge -t tmp.b0_moco'
    for it in xrange(0,n_b0):
        cmd += ' '+file_data_split[index_b0[it]]+'_moco'
    status, output = sct.run(cmd)

    # Average corrected b=0
    print '\nAverage corrected b=0...'
    cmd = fsloutput+'fslmaths tmp.b0_moco -Tmean tmp.b0_moco_mean'
    status, output = sct.run(cmd)


    # REGISTER MEAN DWI TO THE MEAN B=0  --> output transfo Tdwi2b0
    # ---------------------------------------------------------------------------------------
    print '\nRegister mean DWI to the mean b=0...'
    cmd = fsloutput+'flirt -in tmp.dwi_moco_mean -ref tmp.b0_moco_mean -omat tmp.mat_dwi2b0 -cost mutualinfo -forcescaling -dof 12 -2D -out tmp.dwi_mean_moco_reg2b0'
    status, output = sct.run(cmd)


    # COMBINE TRANSFORMATIONS
    # ---------------------------------------------------------------------------------------
    print '\nCombine all transformations...'
    # USE FSL convert_xfm: convert_xfm -omat AtoC.mat -concat BtoC.mat AtoB.mat
    # For DWI
    print '\n.. For DWI:'
    for it in xrange(0,n_dwi):
        cmd = 'convert_xfm -omat tmp.mat_final_'+str(index_dwi[it]).zfill(4)+' -concat tmp.mat_dwi2b0 tmp.mat_'+str(index_dwi[it]).zfill(4)
        status, output = sct.run(cmd)
    # For b=0 (don't concat because there is just one mat file -- just rename it)
    print '\n.. For b=0:'
    for it in xrange(0,n_b0):
        cmd = 'cp tmp.mat_'+str(index_b0[it]).zfill(4)+' tmp.mat_final_'+str(index_b0[it]).zfill(4)
        status, output = sct.run(cmd)


    # APPLY TRANSFORMATIONS
    # ---------------------------------------------------------------------------------------
    ## Split original data into T dimension
    #print '\nSplit original data along T dimension...'
    #cmd = fsloutput+'fslsplit '+fname_data+' tmp.data_raw_splitT'
    #print('>> '+cmd)
    #status, output = commands.getstatusoutput(cmd)

    #print '\nApply transformations to original data...'
    #for it in xrange(0,nt):
    #    cmd = fsloutput+'flirt -in tmp.data_raw_splitT'+str(it).zfill(4)+' -ref tmp.data_raw_splitT'+index_b0[0].zfill(4)+' -applyxfm -init tmp.mat_final_'+str(it).zfill(4)+' -out tmp.data_raw_splitT'+str(it).zfill(4)+'_moco'
    #    print('>> '+cmd)
    #    status, output = commands.getstatusoutput(cmd)
    #
    ## Merge corrected data
    #print '\nMerge corrected data...'
    #cmd = fsloutput+'fslmerge -t tmp.data_raw_moco'
    #for it in xrange(0,it):
    #    cmd += ' tmp.data_raw_splitT'+str(it).zfill(4)+'_moco'
    #print('>> '+cmd)
    #status, output = commands.getstatusoutput(cmd)

    print '\nApply transformations...'
    for it in xrange(0,nt):
        # -paddingsize 3 prevents from having missing slices at the edge
        cmd = fsloutput+'flirt -in tmp.data_splitT'+str(it).zfill(4)+' -ref tmp.data_splitT'+str(index_b0[0]).zfill(4)+' -applyxfm -init tmp.mat_final_'+str(it).zfill(4)+' -out tmp.data_splitT'+str(it).zfill(4)+'_moco -paddingsize 3'+' -interp '+interp
        status, output = sct.run(cmd)

    # Merge corrected data
    print '\nMerge all corrected data...'
    cmd = fsloutput+'fslmerge -t tmp.data_moco'
    for it in xrange(0,nt):
        cmd += ' tmp.data_splitT'+str(it).zfill(4)+'_moco'
    status, output = sct.run(cmd)

    # Merge corrected DWI images
    print '\nMerge corrected DWI...'
    cmd = fsloutput+'fslmerge -t tmp.dwi_moco'
    for it in xrange(0,n_dwi):
        cmd += ' tmp.data_splitT'+str(index_dwi[it]).zfill(4)+'_moco'
    status, output = sct.run(cmd)

    # Average corrected DWI
    print '\nAverage corrected DWI...'
    cmd = fsloutput+'fslmaths tmp.dwi_moco -Tmean tmp.dwi_moco_mean'
    status, output = sct.run(cmd)

    # Merge corrected b=0 images
    print '\nMerge corrected b=0...'
    cmd = fsloutput+'fslmerge -t tmp.b0_moco'
    for it in xrange(0,n_b0):
        cmd += ' tmp.data_splitT'+str(index_b0[it]).zfill(4)+'_moco'
    status, output = sct.run(cmd)

    # Average corrected b=0
    print '\nAverage corrected b=0...'
    cmd = fsloutput+'fslmaths tmp.b0_moco -Tmean tmp.b0_moco_mean'
    status, output = sct.run(cmd)

    # Generate output files
    print('\nGenerate output files...')
    sct.generate_output_file('tmp.data_moco.nii',path_data,file_data+'_moco',ext_data)
    sct.generate_output_file('tmp.dwi_moco_mean.nii',path_data,'dwi_moco_mean',ext_data)
    sct.generate_output_file('tmp.b0_moco_mean.nii',path_data,'b0_moco_mean',ext_data)

    # come back to parent folder
    os.chdir('..')

    # Delete temporary files
    if remove_temp_files == 1:
        print '\nDelete temporary files...'
        sct.run('rm -rf '+path_tmp)

    # display elapsed time
    elapsed_time = time.time() - start_time
    print '\nFinished! Elapsed time: '+str(int(round(elapsed_time)))+'s'

    # to view results
    print '\nTo view results, type:'
    print 'fslview '+file_data+' '+file_data+'_moco &\n'

Example 153

Project: spinalcordtoolbox Source File: sct_symetrize.py
def main():
    
    #Initialization
    fname = ''
    verbose = param.verbose
        
    try:
         opts, args = getopt.getopt(sys.argv[1:],'hi:v:')
    except getopt.GetoptError:
        usage()
    for opt, arg in opts :
        if opt == '-h':
            usage()
        elif opt in ("-i"):
            fname = arg                      
        elif opt in ('-v'):
            verbose = int(arg)
    
    # display usage if a mandatory argument is not provided
    if fname == '' :
        usage()

    # check existence of input files
    print'\nCheck if file exists ...'

    sct.check_file_exist(fname)


    # Display arguments
    print'\nCheck input arguments...'
    print'  Input volume ...................... '+fname
    print'  Verbose ........................... '+str(verbose)


    # create temporary folder
    path_tmp = 'tmp.'+time.strftime("%y%m%d%H%M%S")
    sct.run('mkdir '+path_tmp)

    fname = os.path.abspath(fname)
    path_data, file_data, ext_data = sct.extract_fname(fname)

    # copy files into tmp folder
    sct.run('cp '+fname+' '+path_tmp)

    # go to tmp folder
    os.chdir(path_tmp)


    # Get size of data
    print '\nGet dimensions of template...'
    nx, ny, nz, nt, px, py, pz, pt = sct.get_dimension(fname)
    print '.. '+str(nx)+' x '+str(ny)+' y '+str(nz)+' z '+str(nt)

    # extract left side and right side
    sct.run('sct_crop_image -i '+fname+' -o left.nii.gz -dim 0 -start '+str(int(0))+' -end '+str(int(floor(nx/2)-1)))
    sct.run('sct_crop_image -i '+fname+' -o right.nii.gz -dim 0 -start '+str(int(floor(nx/2)))+' -end '+str(int(nx-1)))

    # create mirror right
    right = nibabel.load('right.nii.gz')
    data_right = right.get_data()
    hdr_right = right.get_header()

    nx_r, ny_r, nz_r, nt_r, px_r, py_r, pz_r, pt_r = sct.get_dimension('right.nii.gz')

    mirror_right = data_right*0

    for i in xrange(nx_r):
        for j in xrange(ny_r):
            for k in xrange(nz_r):

                mirror_right[i,j,k] = data_right[(nx_r-1)-i,j,k]


    print '\nSave volume ...'

    img = nibabel.Nifti1Image(mirror_right, None, hdr_right)
    file_name = 'mirror_right.nii.gz'
    nibabel.save(img,file_name)

    #copy header of left to mirror right
    sct.run ('fslcpgeom left.nii.gz mirror_right.nii.gz')



    # compute transfo from left to mirror right
     #MI [fixed,moving]
    ### Beause it takes time there's a output that were computed on guillimin /home/django/jtouati/data/test_templateANTS/final_preprocessed/MI/test/tmp.141015123447

     #
    cmd = 'isct_antsRegistration \
    --dimensionality 3 \
    --transform Syn[0.5,3,0] \
    --metric MI[mirror_right.nii.gz,left.nii.gz,1,32] \
    --convergence 50x20 \
    --shrink-factors 4x1 \
    --smoothing-sigmas 1x1mm \
    --Restrict-Deformation 1x1x0 \
    --output [l2r,l2r.nii.gz]'

    status, output = sct.run(cmd)
    if verbose:
        print output

    #output are : l2r0InverseWarp.nii.gz l2r.nii.gz l2r0Warp.nii.gz

    # separate the 2 warping fields along the 3 directions
    status, output = sct.run('isct_c3d -mcs l2r0Warp.nii.gz -oo l2rwarpx.nii.gz l2rwarpy.nii.gz l2rwarpz.nii.gz')
    status, output = sct.run('isct_c3d -mcs l2r0InverseWarp.nii.gz -oo l2rinvwarpx.nii.gz l2rinvwarpy.nii.gz l2rinvwarpz.nii.gz')
    print 'Loading ..'
    # load warping fields
    warpx = nibabel.load('l2rwarpx.nii.gz')
    data_warpx = warpx.get_data()
    hdr_warpx=warpx.get_header()

    warpy = nibabel.load('l2rwarpy.nii.gz')
    data_warpy = warpy.get_data()
    hdr_warpy=warpy.get_header()

    warpz = nibabel.load('l2rwarpz.nii.gz')
    data_warpz = warpz.get_data()
    hdr_warpz=warpz.get_header()

    invwarpx = nibabel.load('l2rinvwarpx.nii.gz')
    data_invwarpx = invwarpx.get_data()
    hdr_invwarpx=invwarpx.get_header()

    invwarpy = nibabel.load('l2rinvwarpy.nii.gz')
    data_invwarpy = invwarpy.get_data()
    hdr_invwarpy=invwarpy.get_header()

    invwarpz = nibabel.load('l2rinvwarpz.nii.gz')
    data_invwarpz = invwarpz.get_data()
    hdr_invwarpz=invwarpz.get_header()
    print 'Creating..'
    # create demi warping fields
    data_warpx = (data_warpx - data_warpx[::-1,:,:])/2
    data_warpy = (data_warpy + data_warpy[::-1,:,:])/2
    data_warpz = (data_warpz + data_warpz[::-1,:,:])/2
    data_invwarpx = (data_invwarpx - data_invwarpx[::-1,:,:])/2
    data_invwarpy = (data_invwarpy + data_invwarpy[::-1,:,:])/2
    data_invwarpz = (data_invwarpz + data_invwarpz[::-1,:,:])/2
    print 'Saving ..'
    # save demi warping fields
    img = nibabel.Nifti1Image(data_warpx, None, hdr_warpx)
    file_name = 'warpx_demi.nii.gz'
    nibabel.save(img,file_name)

    img = nibabel.Nifti1Image(data_warpy, None, hdr_warpy)
    file_name = 'warpy_demi.nii.gz'
    nibabel.save(img,file_name)

    img = nibabel.Nifti1Image(data_warpz, None, hdr_warpz)
    file_name = 'warpz_demi.nii.gz'
    nibabel.save(img,file_name)

    img = nibabel.Nifti1Image(data_invwarpx, None, hdr_invwarpx)
    file_name = 'invwarpx_demi.nii.gz'
    nibabel.save(img,file_name)

    img = nibabel.Nifti1Image(data_invwarpy, None, hdr_invwarpy)
    file_name = 'invwarpy_demi.nii.gz'
    nibabel.save(img,file_name)

    img = nibabel.Nifti1Image(data_invwarpz, None, hdr_invwarpz)
    file_name = 'invwarpz_demi.nii.gz'
    nibabel.save(img,file_name)
    print 'Copy ..'
    # copy transform
    status,output = sct.run('isct_c3d l2rwarpx.nii.gz warpx_demi.nii.gz -copy-transform -o warpx_demi.nii.gz')
    status,output = sct.run('isct_c3d l2rwarpy.nii.gz warpy_demi.nii.gz -copy-transform -o warpy_demi.nii.gz')
    status,output = sct.run('isct_c3d l2rwarpz.nii.gz warpz_demi.nii.gz -copy-transform -o warpz_demi.nii.gz')
    status,output = sct.run('isct_c3d l2rinvwarpx.nii.gz invwarpx_demi.nii.gz -copy-transform -o invwarpx_demi.nii.gz')
    status,output = sct.run('isct_c3d l2rinvwarpy.nii.gz invwarpy_demi.nii.gz -copy-transform -o invwarpy_demi.nii.gz')
    status,output = sct.run('isct_c3d l2rinvwarpz.nii.gz invwarpz_demi.nii.gz -copy-transform -o invwarpz_demi.nii.gz')
    
    # combine warping fields
    print 'Combine ..'
    sct.run('isct_c3d warpx_demi.nii.gz warpy_demi.nii.gz warpz_demi.nii.gz -omc 3 warpl2r_demi.nii.gz')
    sct.run('isct_c3d invwarpx_demi.nii.gz invwarpy_demi.nii.gz invwarpz_demi.nii.gz -omc 3 invwarpl2r_demi.nii.gz')
    
    #warpl2r_demi.nii.gz invwarpl2r_demi.nii.gz
    
    # apply demi warping fields
    sct.run('sct_apply_transfo -i left.nii.gz -d left.nii.gz -w warpl2r_demi.nii.gz -o left_demi.nii.gz')
    sct.run('sct_apply_transfo -i mirror_right.nii.gz -d mirror_right.nii.gz -w invwarpl2r_demi.nii.gz -o mirror_right_demi.nii.gz')
    
    #unmirror right
    
    demi_right = nibabel.load('mirror_right_demi.nii.gz')
    data_demi_right = demi_right.get_data()
    hdr_demi_right = demi_right.get_header()
    
    nx_r, ny_r, nz_r, nt_r, px_r, py_r, pz_r, pt_r = sct.get_dimension('mirror_right_demi.nii.gz')
    
    unmirror_right = data_demi_right*0

    for i in xrange(nx_r):
        for j in xrange(ny_r):
            for k in xrange(nz_r):

                unmirror_right[i,j,k] = data_demi_right[(nx_r-1)-i,j,k]
    
    print '\nSave volume ...'
    
    img = nibabel.Nifti1Image(unmirror_right, None, hdr_right)
    file_name = 'un_mirror_right.nii.gz'
    nibabel.save(img,file_name)
    
    
    sct.run('fslmaths left_demi.nii.gz -add un_mirror_right.nii.gz symetrize_template.nii.gz')

Example 154

Project: eyed3 Source File: fixup.py
    def handleDirectory(self, directory, _):
        if not self._file_cache:
            return

        directory = os.path.abspath(directory)
        print("\n" + Style.BRIGHT + Fore.GREY +
              "Scanning directory%s %s" % (Style.RESET_ALL, directory))

        def _path(af):
            return af.path

        self._handled_one = True

        # Make sure all of the audio files has a tag.
        for f in self._file_cache:
            if f.tag is None:
                f.initTag()

        audio_files = sorted(list(self._file_cache), key=_path)

        self._file_cache = []
        edited_files = set()
        self._curr_dir_type = self.args.dir_type
        if self._curr_dir_type is None:
            types = set([a.tag.album_type for a in audio_files])
            if len(types) == 1:
                self._curr_dir_type = types.pop()

        # Check for corrections to LP, EP, COMP
        if (self._curr_dir_type is None and len(audio_files) < EP_MAX_HINT):
            # Do you want EP?
            if False in [a.tag.album_type == EP_TYPE for a in audio_files]:
                if prompt("Only %d audio files, process directory as an EP" %
                          len(audio_files),
                          default=True):
                    self._curr_dir_type = EP_TYPE
            else:
                self._curr_dir_type = EP_TYPE
        elif (self._curr_dir_type in (EP_TYPE, DEMO_TYPE) and
                len(audio_files) > EP_MAX_HINT):
            # Do you want LP?
            if prompt("%d audio files is large for type %s, process "
                      "directory as an LP" % (len(audio_files),
                                              self._curr_dir_type),
                      default=True):
                self._curr_dir_type = LP_TYPE

        last = defaultdict(lambda: None)

        album_artist = None
        artists = set()
        album = None

        if self._curr_dir_type != SINGLE_TYPE:
            album_artist, artists = self._resolveArtistInfo(audio_files)
            print(Fore.BLUE + u"Album artist: " + Style.RESET_ALL +
                  (album_artist or u""))
            print(Fore.BLUE + "Artist" + ("s" if len(artists) > 1 else "") +
                  ": " + Style.RESET_ALL + u", ".join(artists))

            album = self._getAlbum(audio_files)
            print(Fore.BLUE + "Album: " + Style.RESET_ALL + album)

            rel_date, orel_date, rec_date = self._getDates(audio_files)
            for what, d in [("Release", rel_date),
                            ("Original", orel_date),
                            ("Recording", rec_date)]:
                print(Fore.BLUE + ("%s date: " % what) + Style.RESET_ALL +
                        str(d))

            num_audio_files = len(audio_files)
            track_nums = set([f.tag.track_num[0] for f in audio_files])
            fix_track_nums = set(range(1, num_audio_files + 1)) != track_nums
            new_track_nums = []

        dir_type = self._curr_dir_type
        for f in sorted(audio_files, key=_path):
            print(Style.BRIGHT + Fore.GREEN + u"Checking" + Fore.RESET +
                  Fore.GREY + (" %s" % os.path.basename(f.path)) +
                  Style.RESET_ALL)

            if not f.tag:
                print("\tAdding new tag")
                f.initTag()
                edited_files.add(f)
            tag = f.tag

            if tag.version != ID3_V2_4:
                print("\tConverting to ID3 v2.4")
                tag.version = ID3_V2_4
                edited_files.add(f)

            if (dir_type != SINGLE_TYPE and album_artist != tag.album_artist):
                print(u"\tSetting album artist: %s" % album_artist)
                tag.album_artist = album_artist
                edited_files.add(f)

            if not tag.artist and dir_type in (VARIOUS_TYPE, SINGLE_TYPE):
                # Prompt artist
                tag.artist = prompt("Artist name", default=last["artist"])
                last["artist"] = tag.artist
            elif len(artists) == 1 and tag.artist != artists[0]:
                assert(dir_type != SINGLE_TYPE)
                print(u"\tSetting artist: %s" % artists[0])
                tag.artist = artists[0]
                edited_files.add(f)

            if tag.album != album and dir_type != SINGLE_TYPE:
                print(u"\tSetting album: %s" % album)
                tag.album = album
                edited_files.add(f)

            orig_title = tag.title
            if not tag.title:
                tag.title = prompt("Track title")
            tag.title = tag.title.strip()
            if self.args.fix_case:
                tag.title = _fixCase(tag.title)
            if orig_title != tag.title:
                print(u"\tSetting title: %s" % tag.title)
                edited_files.add(f)

            if dir_type != SINGLE_TYPE:
                # Track numbers
                tnum, ttot = tag.track_num
                update = False
                if ttot != num_audio_files:
                    update = True
                    ttot = num_audio_files

                if fix_track_nums or not (1 <= tnum <= num_audio_files):
                    tnum = None
                    while tnum is None:
                        tnum = int(prompt("Track #", type_=int))
                        if not (1 <= tnum <= num_audio_files):
                            print(Fore.RED + "Out of range: " + Fore.RESET +
                                  "1 <= %d <= %d" % (tnum, num_audio_files))
                            tnum = None
                        elif tnum in new_track_nums:
                            print(Fore.RED + "Duplicate value: " + Fore.RESET +
                                    str(tnum))
                            tnum = None
                        else:
                            update = True
                            new_track_nums.append(tnum)

                if update:
                    tag.track_num = (tnum, ttot)
                    print("\tSetting track numbers: %s" % str(tag.track_num))
                    edited_files.add(f)
            else:
                # Singles
                if tag.track_num != (None, None):
                    tag.track_num = (None, None)
                    edited_files.add(f)

            if dir_type != SINGLE_TYPE:
                # Dates
                if rec_date and tag.recording_date != rec_date:
                    print("\tSetting %s date (%s)" %
                            ("recording", str(rec_date)))
                    tag.recording_date = rec_date
                    edited_files.add(f)
                if rel_date and tag.release_date != rel_date:
                    print("\tSetting %s date (%s)" % ("release", str(rel_date)))
                    tag.release_date = rel_date
                    edited_files.add(f)
                if orel_date and tag.original_release_date != orel_date:
                    print("\tSetting %s date (%s)" % ("original release",
                                                      str(orel_date)))
                    tag.original_release_date = orel_date
                    edited_files.add(f)

            for frame in list(tag.frameiter(["USER", "PRIV"])):
                print("\tRemoving %s frames: %s" %
                        (frame.id,
                         frame.owner_id if frame.id == b"PRIV" else frame.text))
                tag.frame_set[frame.id].remove(frame)
                edited_files.add(f)

            # Add TLEN
            tlen = tag.getTextFrame("TLEN")
            real_tlen = f.info.time_secs * 1000
            if tlen is None or int(tlen) != real_tlen:
                print("\tSetting TLEN (%d)" % real_tlen)
                tag.setTextFrame("TLEN", UnicodeType(real_tlen))
                edited_files.add(f)

            # Add custom album type if special and otherwise not able to be
            # determined.
            curr_type = tag.album_type
            if curr_type != dir_type:
                print("\tSetting %s = %s" % (TXXX_ALBUM_TYPE, dir_type))
                tag.album_type = dir_type
                edited_files.add(f)

        try:
            if not self._checkCoverArt(directory, audio_files):
                if not prompt("Proceed without valid cover file", default=True):
                    return
        finally:
            self._dir_images = []

        # Determine other changes, like file and/or directory renames
        # so they can be reported before save confirmation.

        # File renaming
        file_renames = []
        if self.args.file_rename_pattern:
            format_str = self.args.file_rename_pattern
        else:
            if dir_type == SINGLE_TYPE:
                format_str = SINGLE_FNAME_FORMAT
            elif dir_type in (VARIOUS_TYPE, COMP_TYPE):
                format_str = VARIOUS_FNAME_FORMAT
            else:
                format_str = NORMAL_FNAME_FORMAT

        for f in audio_files:
            orig_name, orig_ext = os.path.splitext(os.path.basename(f.path))
            new_name = TagTemplate(format_str).substitute(f.tag, zeropad=True)
            if orig_name != new_name:
                printMsg(u"Rename file to %s%s" % (new_name, orig_ext))
                file_renames.append((f, new_name, orig_ext))

        # Directory renaming
        dir_rename = None
        if dir_type != SINGLE_TYPE:
            if self.args.dir_rename_pattern:
                dir_format = self.args.dir_rename_pattern
            else:
                if dir_type == LIVE_TYPE:
                    dir_format = LIVE_DNAME_FORMAT
                else:
                    dir_format = NORMAL_DNAME_FORMAT
            template = TagTemplate(dir_format,
                                   dotted_dates=self.args.dotted_dates)

            pref_dir = template.substitute(audio_files[0].tag, zeropad=True)
            if os.path.basename(directory) != pref_dir:
                new_dir = os.path.join(os.path.dirname(directory), pref_dir)
                printMsg("Rename directory to %s" % new_dir)
                dir_rename = (directory, new_dir)

        # Cruft files to remove
        file_removes = []
        if self._dir_files_to_remove:
            for f in self._dir_files_to_remove:
                print("Remove file: " + os.path.basename(f))
                file_removes.append(f)
        self._dir_files_to_remove = set()

        if not self.args.dry_run:
            confirmed = False

            if (edited_files or file_renames or dir_rename or file_removes):
                confirmed = prompt("\nSave changes", default=True)

            if confirmed:
                for f in edited_files:
                    print(u"Saving %s" % os.path.basename(f.path))
                    f.tag.save(version=ID3_V2_4, preserve_file_time=True)

                for f, new_name, orig_ext in file_renames:
                    printMsg(u"Renaming file to %s%s" % (new_name, orig_ext))
                    f.rename(new_name, preserve_file_time=True)

                if file_removes:
                    for f in file_removes:
                        printMsg("Removing file %s" % os.path.basename(f))
                        os.remove(f)

                if dir_rename:
                    printMsg("Renaming directory to %s" % dir_rename[1])
                    s = os.stat(dir_rename[0])
                    os.rename(dir_rename[0], dir_rename[1])
                    # With a rename use the origianl access time
                    os.utime(dir_rename[1], (s.st_atime, s.st_atime))

        else:
            printMsg("\nNo changes made (run without -n/--dry-run)")

Example 155

Project: Memacs Source File: ical_test.py
Function: test_all
    def test_all(self):
        test_file = os.path.join(
            os.path.dirname(os.path.abspath(__file__)),
            'data', 'austrian_holidays_from_google.ics'
        )
        argv = "-s -cf " + test_file
        memacs = CalendarMemacs(argv=argv.split())
        data = memacs.test_get_entries()

        self.assertEqual(
            data[0],
             "** <2012-05-28 Mon>--<2012-05-28 Mon> Whit Monday")
        self.assertEqual(
            data[1],
             "   :PROPERTIES:")
        self.assertEqual(
            data[2],
             "   :ID:         b6972cddd864a2fba79ed8ff95e0f2f8948f2410")
        self.assertEqual(
            data[3],
             "   :END:")
        self.assertEqual(
            data[4],
             "** <2011-02-14 Mon>--<2011-02-14 Mon> Valentine's day")
        self.assertEqual(
            data[5],
             "   :PROPERTIES:")
        self.assertEqual(
            data[6],
             "   :ID:         66186caf3409e2086a9c199a03cb6ff440ab738b")
        self.assertEqual(
            data[7],
             "   :END:")
        self.assertEqual(
            data[8],
             "** <2010-02-14 Sun>--<2010-02-14 Sun> Valentine's day")
        self.assertEqual(
            data[9],
             "   :PROPERTIES:")
        self.assertEqual(
            data[10],
             "   :ID:         bee25809ac0695d567664decb61592ada965f858")
        self.assertEqual(
            data[11],
             "   :END:")
        self.assertEqual(
            data[12],
             "** <2012-02-14 Tue>--<2012-02-14 Tue> Valentine's day")
        self.assertEqual(
            data[13],
             "   :PROPERTIES:")
        self.assertEqual(
            data[14],
             "   :ID:         d74b79979f616f13715439a1ef7e0b2f0c69f220")
        self.assertEqual(
            data[15],
             "   :END:")
        self.assertEqual(
            data[16],
             "** <2012-12-26 Wed>--<2012-12-26 Wed> St. Stephan's Day")
        self.assertEqual(
            data[17],
             "   :PROPERTIES:")
        self.assertEqual(
            data[18],
             "   :ID:         c2559692c5465c6dad0f014f936eef320b516b9f")
        self.assertEqual(
            data[19],
             "   :END:")
        self.assertEqual(
            data[20],
             "** <2010-12-26 Sun>--<2010-12-26 Sun> St. Stephan's Day")
        self.assertEqual(
            data[21],
             "   :PROPERTIES:")
        self.assertEqual(
            data[22],
             "   :ID:         c145ba3f76fab2f9eca5a9b09695c47b1f65554a")
        self.assertEqual(
            data[23],
             "   :END:")
        self.assertEqual(
            data[24],
             "** <2011-12-26 Mon>--<2011-12-26 Mon> St. Stephan's Day")
        self.assertEqual(
            data[25],
             "   :PROPERTIES:")
        self.assertEqual(
            data[26],
             "   :ID:         0c663e887265d372cf40d3c7f1d7fd595a0114a0")
        self.assertEqual(
            data[27],
             "   :END:")
        self.assertEqual(
            data[28],
             "** <2011-12-06 Tue>--<2011-12-06 Tue> St. Nicholas")
        self.assertEqual(
            data[29],
             "   :PROPERTIES:")
        self.assertEqual(
            data[30],
             "   :ID:         821d4ce5231db9f037cf64f8b3cfeeeb65c84bee")
        self.assertEqual(
            data[31],
             "   :END:")
        self.assertEqual(
            data[32],
             "** <2010-12-06 Mon>--<2010-12-06 Mon> St. Nicholas")
        self.assertEqual(
            data[33],
             "   :PROPERTIES:")
        self.assertEqual(
            data[34],
             "   :ID:         4b1f7183ef085af82ec9b7be7845d35d9504b0b6")
        self.assertEqual(
            data[35],
             "   :END:")
        self.assertEqual(
            data[36],
             "** <2012-12-06 Thu>--<2012-12-06 Thu> St. Nicholas")
        self.assertEqual(
            data[37],
             "   :PROPERTIES:")
        self.assertEqual(
            data[38],
             "   :ID:         34c1c44697bedbe3228842204e84f45ec45b0923")
        self.assertEqual(
            data[39],
             "   :END:")
        self.assertEqual(
            data[40],
             "** <2011-12-31 Sat>--<2011-12-31 Sat> New Year's Eve")
        self.assertEqual(
            data[41],
             "   :PROPERTIES:")
        self.assertEqual(
            data[42],
             "   :ID:         ea722a9d474e8bbda41f48460ad3681e10097044")
        self.assertEqual(
            data[43],
             "   :END:")
        self.assertEqual(
            data[44],
             "** <2010-12-31 Fri>--<2010-12-31 Fri> New Year's Eve")
        self.assertEqual(
            data[45],
             "   :PROPERTIES:")
        self.assertEqual(
            data[46],
             "   :ID:         afcbb4912aaede6e31b0c4bdb9221b90f10c1b62")
        self.assertEqual(
            data[47],
             "   :END:")
        self.assertEqual(
            data[48],
             "** <2012-01-01 Sun>--<2012-01-01 Sun> New Year")
        self.assertEqual(
            data[49],
             "   :PROPERTIES:")
        self.assertEqual(
            data[50],
             "   :ID:         9a533328738c914dcc4abd5bb571e63cccae0fa2")
        self.assertEqual(
            data[51],
             "   :END:")
        self.assertEqual(
            data[52],
             "** <2010-01-01 Fri>--<2010-01-01 Fri> New Year")
        self.assertEqual(
            data[53],
             "   :PROPERTIES:")
        self.assertEqual(
            data[54],
             "   :ID:         1239f768e303f38b312d4fa84ad295f44a12ea99")
        self.assertEqual(
            data[55],
             "   :END:")
        self.assertEqual(
            data[56],
             "** <2011-01-01 Sat>--<2011-01-01 Sat> New Year")
        self.assertEqual(
            data[57],
             "   :PROPERTIES:")
        self.assertEqual(
            data[58],
             "   :ID:         c578509791f5865707d0018ad79c2eaf37210481")
        self.assertEqual(
            data[59],
             "   :END:")
        self.assertEqual(
            data[60],
             "** <2010-10-26 Tue>--<2010-10-26 Tue> National Holiday")
        self.assertEqual(
            data[61],
             "   :PROPERTIES:")
        self.assertEqual(
            data[62],
             "   :ID:         dffe086b45549c333b308892bf7b4b83485ea216")
        self.assertEqual(
            data[63],
             "   :END:")
        self.assertEqual(
            data[64],
             "** <2012-10-26 Fri>--<2012-10-26 Fri> National Holiday")
        self.assertEqual(
            data[65],
             "   :PROPERTIES:")
        self.assertEqual(
            data[66],
             "   :ID:         5d74bcc91609435775c774cf4b2c373e3b6b9a9e")
        self.assertEqual(
            data[67],
             "   :END:")
        self.assertEqual(
            data[68],
             "** <2011-10-26 Wed>--<2011-10-26 Wed> National Holiday")
        self.assertEqual(
            data[69],
             "   :PROPERTIES:")
        self.assertEqual(
            data[70],
             "   :ID:         5c99d7709dfe1e81b18e3c3343e06edd0854015f")
        self.assertEqual(
            data[71],
             "   :END:")
        self.assertEqual(
            data[72],
             "** <2011-05-01 Sun>--<2011-05-01 Sun> Labour Day")
        self.assertEqual(
            data[73],
             "   :PROPERTIES:")
        self.assertEqual(
            data[74],
             "   :ID:         5f18bf2bffdedf1fd50bca2b5ccfb8bd7554b52f")
        self.assertEqual(
            data[75],
             "   :END:")
        self.assertEqual(
            data[76],
             "** <2010-05-01 Sat>--<2010-05-01 Sat> Labour Day")
        self.assertEqual(
            data[77],
             "   :PROPERTIES:")
        self.assertEqual(
            data[78],
             "   :ID:         248bbd02f36ba32fbe36c5fdf65ab66a400307c5")
        self.assertEqual(
            data[79],
             "   :END:")
        self.assertEqual(
            data[80],
             "** <2012-05-01 Tue>--<2012-05-01 Tue> Labour Day")
        self.assertEqual(
            data[81],
             "   :PROPERTIES:")
        self.assertEqual(
            data[82],
             "   :ID:         709d57b34901a8dab5277cdec884acb989579451")
        self.assertEqual(
            data[83],
             "   :END:")
        self.assertEqual(
            data[84],
             "** <2012-12-08 Sat>--<2012-12-08 Sat> Immaculate Conception")
        self.assertEqual(
            data[85],
             "   :PROPERTIES:")
        self.assertEqual(
            data[86],
             "   :ID:         9718f2c669addc152c80d478beaeb81ab7dc2757")
        self.assertEqual(
            data[87],
             "   :END:")
        self.assertEqual(
            data[88],
             "** <2010-12-08 Wed>--<2010-12-08 Wed> Immaculate Conception")
        self.assertEqual(
            data[89],
             "   :PROPERTIES:")
        self.assertEqual(
            data[90],
             "   :ID:         7d02e0af4e44664e5a474376dd97ba838bcdb725")
        self.assertEqual(
            data[91],
             "   :END:")
        self.assertEqual(
            data[92],
             "** <2011-12-08 Thu>--<2011-12-08 Thu> Immaculate Conception")
        self.assertEqual(
            data[93],
             "   :PROPERTIES:")
        self.assertEqual(
            data[94],
             "   :ID:         20e022ce71904efac1f90d45b24b4164623a919b")
        self.assertEqual(
            data[95],
             "   :END:")
        self.assertEqual(
            data[96],
             "** <2012-04-06 Fri>--<2012-04-06 Fri> Good Friday")
        self.assertEqual(
            data[97],
             "   :PROPERTIES:")
        self.assertEqual(
            data[98],
             "   :ID:         6a9a405cdba496987ca9ab66aef623fe0ed70e26")
        self.assertEqual(
            data[99],
             "   :END:")
        self.assertEqual(
            data[100],
             "** <2010-01-06 Wed>--<2010-01-06 Wed> Epiphany")
        self.assertEqual(
            data[101],
             "   :PROPERTIES:")
        self.assertEqual(
            data[102],
             "   :ID:         6640ef7807da042944392601c4e9b046174bce8e")
        self.assertEqual(
            data[103],
             "   :END:")
        self.assertEqual(
            data[104],
             "** <2012-01-06 Fri>--<2012-01-06 Fri> Epiphany")
        self.assertEqual(
            data[105],
             "   :PROPERTIES:")
        self.assertEqual(
            data[106],
             "   :ID:         0aa9ab88fb1bfcb9b0fb430e673ec23eb42a4f38")
        self.assertEqual(
            data[107],
             "   :END:")
        self.assertEqual(
            data[108],
             "** <2011-01-06 Thu>--<2011-01-06 Thu> Epiphany")
        self.assertEqual(
            data[109],
             "   :PROPERTIES:")
        self.assertEqual(
            data[110],
             "   :ID:         36897fcbb92a331ebebb86f4cef7b0e988c020c6")
        self.assertEqual(
            data[111],
             "   :END:")
        self.assertEqual(
            data[112],
             "** <2012-04-09 Mon>--<2012-04-09 Mon> Easter Monday")
        self.assertEqual(
            data[113],
             "   :PROPERTIES:")
        self.assertEqual(
            data[114],
             "   :ID:         a71164883dcb44825f7de50f68b7ea881b1a5d23")
        self.assertEqual(
            data[115],
             "   :END:")
        self.assertEqual(
            data[116],
             "** <2012-04-08 Sun>--<2012-04-08 Sun> Easter")
        self.assertEqual(
            data[117],
             "   :PROPERTIES:")
        self.assertEqual(
            data[118],
             "   :ID:         7dcfbb563cd9300bf18f3c05965a1b0c7c6442b8")
        self.assertEqual(
            data[119],
             "   :END:")
        self.assertEqual(
            data[120],
             "** <2012-06-07 Thu>--<2012-06-07 Thu> Corpus Christi")
        self.assertEqual(
            data[121],
             "   :PROPERTIES:")
        self.assertEqual(
            data[122],
             "   :ID:         01cd602579e0774b020c3d13a760e8fa828c6aec")
        self.assertEqual(
            data[123],
             "   :END:")
        self.assertEqual(
            data[124],
             "** <2011-12-24 Sat>--<2011-12-24 Sat> Christmas Eve")
        self.assertEqual(
            data[125],
             "   :PROPERTIES:")
        self.assertEqual(
            data[126],
             "   :ID:         4b91f8eefc9723bb3022b2bedb4c4d098f7f9d39")
        self.assertEqual(
            data[127],
             "   :END:")
        self.assertEqual(
            data[128],
             "** <2010-12-24 Fri>--<2010-12-24 Fri> Christmas Eve")
        self.assertEqual(
            data[129],
             "   :PROPERTIES:")
        self.assertEqual(
            data[130],
             "   :ID:         b3b00147203e50aa69fdae2f6745b78d13a39231")
        self.assertEqual(
            data[131],
             "   :END:")
        self.assertEqual(
            data[132],
             "** <2012-12-24 Mon>--<2012-12-24 Mon> Christmas Eve")
        self.assertEqual(
            data[133],
             "   :PROPERTIES:")
        self.assertEqual(
            data[134],
             "   :ID:         23506451af37175457bfff7b113aff5ff75881e7")
        self.assertEqual(
            data[135],
             "   :END:")
        self.assertEqual(
            data[136],
             "** <2010-12-25 Sat>--<2010-12-25 Sat> Christmas")
        self.assertEqual(
            data[137],
             "   :PROPERTIES:")
        self.assertEqual(
            data[138],
             "   :ID:         ae52748d82d25b1ada9ef73e6c608519c0cecca5")
        self.assertEqual(
            data[139],
             "   :END:")
        self.assertEqual(
            data[140],
             "** <2011-12-25 Sun>--<2011-12-25 Sun> Christmas")
        self.assertEqual(
            data[141],
             "   :PROPERTIES:")
        self.assertEqual(
            data[142],
             "   :ID:         802fb8acb3618909a6d7aaf605bf732a97a84d39")
        self.assertEqual(
            data[143],
             "   :END:")
        self.assertEqual(
            data[144],
             "** <2012-12-25 Tue>--<2012-12-25 Tue> Christmas")
        self.assertEqual(
            data[145],
             "   :PROPERTIES:")
        self.assertEqual(
            data[146],
             "   :ID:         1dc9ebe2f8ff2c91ca155c30ae65a67db11cf8aa")
        self.assertEqual(
            data[147],
             "   :END:")
        self.assertEqual(
            data[148],
             "** <2010-08-15 Sun>--<2010-08-15 Sun> Assumption")
        self.assertEqual(
            data[149],
             "   :PROPERTIES:")
        self.assertEqual(
            data[150],
             "   :ID:         c3e85e7c44c5cca95efa0751c7c52375640b43c2")
        self.assertEqual(
            data[151],
             "   :END:")
        self.assertEqual(
            data[152],
             "** <2012-08-15 Wed>--<2012-08-15 Wed> Assumption")
        self.assertEqual(
            data[153],
             "   :PROPERTIES:")
        self.assertEqual(
            data[154],
             "   :ID:         52c49d4ca2a196e6409ac362183cedcd656975ef")
        self.assertEqual(
            data[155],
             "   :END:")
        self.assertEqual(
            data[156],
             "** <2011-08-15 Mon>--<2011-08-15 Mon> Assumption")
        self.assertEqual(
            data[157],
             "   :PROPERTIES:")
        self.assertEqual(
            data[158],
             "   :ID:         be957e5083131794b874b06597cd1cc935d35408")
        self.assertEqual(
            data[159],
             "   :END:")
        self.assertEqual(
            data[160],
             "** <2012-05-17 Thu>--<2012-05-17 Thu> Ascension Day")
        self.assertEqual(
            data[161],
             "   :PROPERTIES:")
        self.assertEqual(
            data[162],
             "   :ID:         f718e41128812a9864df1a1aa649c23c82f453f9")
        self.assertEqual(
            data[163],
             "   :END:")
        self.assertEqual(
            data[164],
             "** <2011-11-02 Wed>--<2011-11-02 Wed> All Souls' Day")
        self.assertEqual(
            data[165],
             "   :PROPERTIES:")
        self.assertEqual(
            data[166],
             "   :ID:         f55d246b411fd4fe3d47205041538d04f56cac53")
        self.assertEqual(
            data[167],
             "   :END:")
        self.assertEqual(
            data[168],
             "** <2010-11-02 Tue>--<2010-11-02 Tue> All Souls' Day")
        self.assertEqual(
            data[169],
             "   :PROPERTIES:")
        self.assertEqual(
            data[170],
             "   :ID:         62e1a6c16ce2c40e33d67961b6cec5c0a099b14d")
        self.assertEqual(
            data[171],
             "   :END:")
        self.assertEqual(
            data[172],
             "** <2012-11-02 Fri>--<2012-11-02 Fri> All Souls' Day")
        self.assertEqual(
            data[173],
             "   :PROPERTIES:")
        self.assertEqual(
            data[174],
             "   :ID:         c9eae72e34489720698a1054cd03bb4cc8859e71")
        self.assertEqual(
            data[175],
             "   :END:")
        self.assertEqual(
            data[176],
             "** <2010-11-01 Mon>--<2010-11-01 Mon> All Saints' Day")
        self.assertEqual(
            data[177],
             "   :PROPERTIES:")
        self.assertEqual(
            data[178],
             "   :ID:         b87bcffe87fda005047d738c07a31cd8c25f609c")
        self.assertEqual(
            data[179],
             "   :END:")
        self.assertEqual(
            data[180],
             "** <2012-11-01 Thu>--<2012-11-01 Thu> All Saints' Day")
        self.assertEqual(
            data[181],
             "   :PROPERTIES:")
        self.assertEqual(
            data[182],
             "   :ID:         37b17e9da936c61a627101afd0cc87d28aafbe70")
        self.assertEqual(
            data[183],
             "   :END:")
        self.assertEqual(
            data[184],
             "** <2011-11-01 Tue>--<2011-11-01 Tue> All Saints' Day")
        self.assertEqual(
            data[185],
             "   :PROPERTIES:")
        self.assertEqual(
            data[186],
             "   :ID:         fe605142ace6ab6268fc672fccece05219c17148")
        self.assertEqual(
            data[187],
             "   :END:")
        self.assertEqual(
            data[188:194], [u'** <2011-08-22 Mon>-<9999-12-31 Fri> No end time/date',
                            u'   :PROPERTIES:',
                            u'   :DESCRIPTION: No end time/date',
                            u'   :ID:          62bf353bf19c0379faf4910741635dfd6a804b11',
                            u'   :END:'])

Example 156

Project: tempest Source File: test_subunit_describe_calls.py
Function: test_parse
    def test_parse(self):
        subunit_file = os.path.join(
            os.path.dirname(os.path.abspath(__file__)),
            'sample_streams/calls.subunit')
        parser = subunit_describe_calls.parse(
            open(subunit_file), "pythonlogging", None)
        expected_result = {
            'bar': [{
                'name': 'AgentsAdminTestJSON:setUp',
                'request_body': '{"agent": {"url": "xxx://xxxx/xxx/xxx", '
                '"hypervisor": "common", "md5hash": '
                '"add6bb58e139be103324d04d82d8f545", "version": "7.0", '
                '"architecture": "tempest-x86_64-424013832", "os": "linux"}}',
                'request_headers': "{'Content-Type': 'application/json', "
                "'Accept': 'application/json', 'X-Auth-Token': '<omitted>'}",
                'response_body': '{"agent": {"url": "xxx://xxxx/xxx/xxx", '
                '"hypervisor": "common", "md5hash": '
                '"add6bb58e139be103324d04d82d8f545", "version": "7.0", '
                '"architecture": "tempest-x86_64-424013832", "os": "linux", '
                '"agent_id": 1}}',
                'response_headers': "{'status': '200', 'content-length': "
                "'203', 'x-compute-request-id': "
                "'req-25ddaae2-0ef1-40d1-8228-59bd64a7e75b', 'vary': "
                "'X-OpenStack-Nova-API-Version', 'connection': 'close', "
                "'x-openstack-nova-api-version': '2.1', 'date': "
                "'Tue, 02 Feb 2016 03:27:00 GMT', 'content-type': "
                "'application/json'}",
                'service': 'Nova',
                'status_code': '200',
                'url': 'v2.1/<id>/os-agents',
                'verb': 'POST'}, {
                'name': 'AgentsAdminTestJSON:test_create_agent',
                'request_body': '{"agent": {"url": "xxx://xxxx/xxx/xxx", '
                '"hypervisor": "kvm", "md5hash": '
                '"add6bb58e139be103324d04d82d8f545", "version": "7.0", '
                '"architecture": "tempest-x86-252246646", "os": "win"}}',
                'request_headers': "{'Content-Type': 'application/json', "
                "'Accept': 'application/json', 'X-Auth-Token': '<omitted>'}",
                'response_body': '{"agent": {"url": "xxx://xxxx/xxx/xxx", '
                '"hypervisor": "kvm", "md5hash": '
                '"add6bb58e139be103324d04d82d8f545", "version": "7.0", '
                '"architecture": "tempest-x86-252246646", "os": "win", '
                '"agent_id": 2}}',
                'response_headers': "{'status': '200', 'content-length': "
                "'195', 'x-compute-request-id': "
                "'req-b4136f06-c015-4e7e-995f-c43831e3ecce', 'vary': "
                "'X-OpenStack-Nova-API-Version', 'connection': 'close', "
                "'x-openstack-nova-api-version': '2.1', 'date': "
                "'Tue, 02 Feb 2016 03:27:00 GMT', 'content-type': "
                "'application/json'}",
                'service': 'Nova',
                'status_code': '200',
                'url': 'v2.1/<id>/os-agents',
                'verb': 'POST'}, {
                'name': 'AgentsAdminTestJSON:tearDown',
                'request_body': 'None',
                'request_headers': "{'Content-Type': 'application/json', "
                "'Accept': 'application/json', 'X-Auth-Token': '<omitted>'}",
                'response_body': '',
                'response_headers': "{'status': '200', 'content-length': "
                "'0', 'x-compute-request-id': "
                "'req-ee905fd6-a5b5-4da4-8c37-5363cb25bd9d', 'vary': "
                "'X-OpenStack-Nova-API-Version', 'connection': 'close', "
                "'x-openstack-nova-api-version': '2.1', 'date': "
                "'Tue, 02 Feb 2016 03:27:00 GMT', 'content-type': "
                "'application/json'}",
                'service': 'Nova',
                'status_code': '200',
                'url': 'v2.1/<id>/os-agents/1',
                'verb': 'DELETE'}, {
                'name': 'AgentsAdminTestJSON:_run_cleanups',
                'request_body': 'None',
                'request_headers': "{'Content-Type': 'application/json', "
                "'Accept': 'application/json', 'X-Auth-Token': '<omitted>'}",
                'response_headers': "{'status': '200', 'content-length': "
                "'0', 'x-compute-request-id': "
                "'req-e912cac0-63e0-4679-a68a-b6d18ddca074', 'vary': "
                "'X-OpenStack-Nova-API-Version', 'connection': 'close', "
                "'x-openstack-nova-api-version': '2.1', 'date': "
                "'Tue, 02 Feb 2016 03:27:00 GMT', 'content-type': "
                "'application/json'}",
                'service': 'Nova',
                'status_code': '200',
                'url': 'v2.1/<id>/os-agents/2',
                'verb': 'DELETE'}],
            'foo': [{
                'name': 'AgentsAdminTestJSON:setUp',
                'request_body': '{"agent": {"url": "xxx://xxxx/xxx/xxx", '
                '"hypervisor": "common", "md5hash": '
                '"add6bb58e139be103324d04d82d8f545", "version": "7.0", '
                '"architecture": "tempest-x86_64-948635295", "os": "linux"}}',
                'request_headers': "{'Content-Type': 'application/json', "
                "'Accept': 'application/json', 'X-Auth-Token': '<omitted>'}",
                'response_body': '{"agent": {"url": "xxx://xxxx/xxx/xxx", '
                '"hypervisor": "common", "md5hash": '
                '"add6bb58e139be103324d04d82d8f545", "version": "7.0", '
                '"architecture": "tempest-x86_64-948635295", "os": "linux", '
                '"agent_id": 3}}',
                'response_headers': "{'status': '200', 'content-length': "
                "'203', 'x-compute-request-id': "
                "'req-ccd2116d-04b1-4ffe-ae32-fb623f68bf1c', 'vary': "
                "'X-OpenStack-Nova-API-Version', 'connection': 'close', "
                "'x-openstack-nova-api-version': '2.1', 'date': "
                "'Tue, 02 Feb 2016 03:27:01 GMT', 'content-type': "
                "'application/json'}",
                'service': 'Nova',
                'status_code': '200',
                'url': 'v2.1/<id>/os-agents',
                'verb': 'POST'}, {
                'name': 'AgentsAdminTestJSON:test_delete_agent',
                'request_body': 'None',
                'request_headers': "{'Content-Type': 'application/json', "
                "'Accept': 'application/json', 'X-Auth-Token': '<omitted>'}",
                'response_body': '',
                'response_headers': "{'status': '200', 'content-length': "
                "'0', 'x-compute-request-id': "
                "'req-6e7fa28f-ae61-4388-9a78-947c58bc0588', 'vary': "
                "'X-OpenStack-Nova-API-Version', 'connection': 'close', "
                "'x-openstack-nova-api-version': '2.1', 'date': "
                "'Tue, 02 Feb 2016 03:27:01 GMT', 'content-type': "
                "'application/json'}",
                'service': 'Nova',
                'status_code': '200',
                'url': 'v2.1/<id>/os-agents/3',
                'verb': 'DELETE'}, {
                'name': 'AgentsAdminTestJSON:test_delete_agent',
                'request_body': 'None',
                'request_headers': "{'Content-Type': 'application/json', "
                "'Accept': 'application/json', 'X-Auth-Token': '<omitted>'}",
                'response_body': '{"agents": []}',
                'response_headers': "{'status': '200', 'content-length': "
                "'14', 'content-location': "
                "'http://23.253.76.97:8774/v2.1/"
                "cf6b1933fe5b476fbbabb876f6d1b924/os-agents', "
                "'x-compute-request-id': "
                "'req-e41aa9b4-41a6-4138-ae04-220b768eb644', 'vary': "
                "'X-OpenStack-Nova-API-Version', 'connection': 'close', "
                "'x-openstack-nova-api-version': '2.1', 'date': "
                "'Tue, 02 Feb 2016 03:27:01 GMT', 'content-type': "
                "'application/json'}",
                'service': 'Nova',
                'status_code': '200',
                'url': 'v2.1/<id>/os-agents',
                'verb': 'GET'}, {
                'name': 'AgentsAdminTestJSON:tearDown',
                'request_body': 'None',
                'request_headers': "{'Content-Type': 'application/json', "
                "'Accept': 'application/json', 'X-Auth-Token': '<omitted>'}",
                'response_headers': "{'status': '404', 'content-length': "
                "'82', 'x-compute-request-id': "
                "'req-e297aeea-91cf-4f26-b49c-8f46b1b7a926', 'vary': "
                "'X-OpenStack-Nova-API-Version', 'connection': 'close', "
                "'x-openstack-nova-api-version': '2.1', 'date': "
                "'Tue, 02 Feb 2016 03:27:02 GMT', 'content-type': "
                "'application/json; charset=UTF-8'}",
                'service': 'Nova',
                'status_code': '404',
                'url': 'v2.1/<id>/os-agents/3',
                'verb': 'DELETE'}]}

        self.assertEqual(expected_result, parser.test_logs)
See More Examples - Go to Next Page
Page 1 Page 2 Page 3 Page 4 Selected