os.unlink

Here are the examples of the python api os.unlink taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

200 Examples 7

Example 1

Project: GAE-Bulk-Mailer
Source File: makemessages.py
View license
def write_po_file(pofile, potfile, domain, locale, verbosity, stdout,
                  copy_pforms, wrap, location, no_obsolete):
    """
    Creates of updates the :param pofile: PO file for :param domain: and :param
    locale:.  Uses contents of the existing :param potfile:.

    Uses mguniq, msgmerge, and msgattrib GNU gettext utilities.
    """
    msgs, errors, status = _popen('msguniq %s %s --to-code=utf-8 "%s"' %
                                    (wrap, location, potfile))
    if errors:
        if status != STATUS_OK:
            os.unlink(potfile)
            raise CommandError(
                "errors happened while running msguniq\n%s" % errors)
        elif verbosity > 0:
            stdout.write(errors)

    if os.path.exists(pofile):
        with open(potfile, 'w') as fp:
            fp.write(msgs)
        msgs, errors, status = _popen('msgmerge %s %s -q "%s" "%s"' %
                                        (wrap, location, pofile, potfile))
        if errors:
            if status != STATUS_OK:
                os.unlink(potfile)
                raise CommandError(
                    "errors happened while running msgmerge\n%s" % errors)
            elif verbosity > 0:
                stdout.write(errors)
    elif copy_pforms:
        msgs = copy_plural_forms(msgs, locale, domain, verbosity, stdout)
    msgs = msgs.replace(
        "#. #-#-#-#-#  %s.pot (PACKAGE VERSION)  #-#-#-#-#\n" % domain, "")
    with open(pofile, 'w') as fp:
        fp.write(msgs)
    os.unlink(potfile)
    if no_obsolete:
        msgs, errors, status = _popen(
            'msgattrib %s %s -o "%s" --no-obsolete "%s"' %
            (wrap, location, pofile, pofile))
        if errors:
            if status != STATUS_OK:
                raise CommandError(
                    "errors happened while running msgattrib\n%s" % errors)
            elif verbosity > 0:
                stdout.write(errors)

Example 2

View license
    @commands.command_add('agentupdate')
    def update_cmd(self, data):

        if isinstance(data, basestring):
            (url, md5sum) = data.split(',', 1)
        elif isinstance(data, dict):
            try:
                url = data['url']
                md5sum = data['md5sum']
            except KeyError:
                return (500,
                        "Missing URL or MD5 sum in dictionary arguments")
        else:
            return (500, "Invalid arguments")

        try:
            local_filename = self._get_to_local_file(url, md5sum)
        except AgentUpdateError, e:
            return (500, str(e))

        ext = local_filename[local_filename.rindex('.') + 1:]

        if ext != ".tar":
            dest_filename = "%s.%s" % (
                    DEST_FILE,
                    ext)
        else:
            dest_filename = "%s"

        try:
            t = tarfile.open(local_filename, 'r:*')
        except tarfile.TarError, e:
            os.unlink(local_filename)
            return (500, "%s: %s" % (local_filename, str(e)))

        found_installer = None
        for tarinfo in t.getmembers():
            name = tarinfo.name
            while name.startswith('../') or name.startswith('./') \
                    or name.startswith('/'):
                name = name[1:]
            # Check for 'installer.sh' in root of the tar or in a
            # subdirectory off of the root
            if name == "installer.sh" or (name.count('/') == 1 and
                    name.split('/')[1] == "installer.sh"):
                found_installer = name
                break

        pipe = subprocess.PIPE

        if found_installer:
            dest_path = "%s.%d" % (DEST_PATH, os.getpid())

            try:
                t.extractall(dest_path)
                t.close()
            except tarfile.TarError, e:
                os.unlink(local_filename)
                return (500, str(e))

            os.unlink(local_filename)

            p = subprocess.Popen(["%s/%s" % (dest_path, found_installer)],
                    stdin=pipe, stdout=pipe, stderr=pipe, env={})
            p.communicate(None)
            retcode = p.returncode

            shutil.rmtree(dest_path, ignore_errors=True)

            if retcode != 0:
                return (500, "Agent installer script failed: %d" % retcode)
        else:
            #
            # Old way, no installer
            #

            t.close()

            # Using shutil.move instead of os.rename() because we might be
            # moving across filesystems.
            shutil.move(local_filename, dest_filename)

        init_script = None
        for script in INIT_SCRIPTS:
            if os.path.exists(script):
                init_script = script
                break

        if not init_script:
            return(404, "No init script found to restart")

        try:
            p = subprocess.Popen(["sh", init_script, "restart"],
                    stdin=pipe, stdout=pipe, stderr=pipe, env={})
            p.communicate(None)
            retcode = p.returncode
        except OSError, e:
            return (500, "Couldn't restart the agent: %s" % str(e))

        if retcode != 0:
            return (500, "Couldn't restart the agent: %d" % retcode)

        return (0, "")

Example 3

Project: rbtools
Source File: plastic.py
View license
    def _process_diffs(self, my_diff_entries):
        # Diff generation based on perforce client
        diff_lines = []

        empty_filename = make_tempfile()
        tmp_diff_from_filename = make_tempfile()
        tmp_diff_to_filename = make_tempfile()

        for f in my_diff_entries:
            f = f.strip()

            if not f:
                continue

            m = re.search(r'(?P<type>[ACMD]) (?P<file>.*) '
                          r'(?P<revspec>rev:revid:[-\d]+) '
                          r'(?P<parentrevspec>rev:revid:[-\d]+) '
                          r'src:(?P<srcpath>.*) '
                          r'dst:(?P<dstpath>.*)$',
                          f)
            if not m:
                die("Could not parse 'cm log' response: %s" % f)

            changetype = m.group("type")
            filename = m.group("file")

            if changetype == "M":
                # Handle moved files as a delete followed by an add.
                # Clunky, but at least it works
                oldfilename = m.group("srcpath")
                oldspec = m.group("revspec")
                newfilename = m.group("dstpath")
                newspec = m.group("revspec")

                self._write_file(oldfilename, oldspec, tmp_diff_from_filename)
                dl = self._diff_files(tmp_diff_from_filename, empty_filename,
                                      oldfilename, "rev:revid:-1", oldspec,
                                      changetype)
                diff_lines += dl

                self._write_file(newfilename, newspec, tmp_diff_to_filename)
                dl = self._diff_files(empty_filename, tmp_diff_to_filename,
                                      newfilename, newspec, "rev:revid:-1",
                                      changetype)
                diff_lines += dl

            else:
                newrevspec = m.group("revspec")
                parentrevspec = m.group("parentrevspec")

                logging.debug("Type %s File %s Old %s New %s"
                              % (changetype, filename, parentrevspec,
                                 newrevspec))

                old_file = new_file = empty_filename

                if (changetype in ['A'] or
                    (changetype in ['C'] and parentrevspec == "rev:revid:-1")):
                    # There's only one content to show
                    self._write_file(filename, newrevspec,
                                     tmp_diff_to_filename)
                    new_file = tmp_diff_to_filename
                elif changetype in ['C']:
                    self._write_file(filename, parentrevspec,
                                     tmp_diff_from_filename)
                    old_file = tmp_diff_from_filename
                    self._write_file(filename, newrevspec,
                                     tmp_diff_to_filename)
                    new_file = tmp_diff_to_filename
                elif changetype in ['D']:
                    self._write_file(filename, parentrevspec,
                                     tmp_diff_from_filename)
                    old_file = tmp_diff_from_filename
                else:
                    die("Don't know how to handle change type '%s' for %s" %
                        (changetype, filename))

                dl = self._diff_files(old_file, new_file, filename,
                                      newrevspec, parentrevspec, changetype)
                diff_lines += dl

        os.unlink(empty_filename)
        os.unlink(tmp_diff_from_filename)
        os.unlink(tmp_diff_to_filename)

        return ''.join(diff_lines)

Example 4

View license
def fetch_server_certificate (host, port):

    def subproc(cmd):
        from subprocess import Popen, PIPE, STDOUT
        proc = Popen(cmd, stdout=PIPE, stderr=STDOUT, shell=True)
        status = proc.wait()
        output = proc.stdout.read()
        return status, output

    def strip_to_x509_cert(certfile_contents, outfile=None):
        m = re.search(br"^([-]+BEGIN CERTIFICATE[-]+[\r]*\n"
                      br".*[\r]*^[-]+END CERTIFICATE[-]+)$",
                      certfile_contents, re.MULTILINE | re.DOTALL)
        if not m:
            return None
        else:
            tn = tempfile.mktemp()
            fp = open(tn, "wb")
            fp.write(m.group(1) + b"\n")
            fp.close()
            try:
                tn2 = (outfile or tempfile.mktemp())
                status, output = subproc(r'openssl x509 -in "%s" -out "%s"' %
                                         (tn, tn2))
                if status != 0:
                    raise RuntimeError('OpenSSL x509 failed with status %s and '
                                       'output: %r' % (status, output))
                fp = open(tn2, 'rb')
                data = fp.read()
                fp.close()
                os.unlink(tn2)
                return data
            finally:
                os.unlink(tn)

    if sys.platform.startswith("win"):
        tfile = tempfile.mktemp()
        fp = open(tfile, "w")
        fp.write("quit\n")
        fp.close()
        try:
            status, output = subproc(
                'openssl s_client -connect "%s:%s" -showcerts < "%s"' %
                (host, port, tfile))
        finally:
            os.unlink(tfile)
    else:
        status, output = subproc(
            'openssl s_client -connect "%s:%s" -showcerts < /dev/null' %
            (host, port))
    if status != 0:
        raise RuntimeError('OpenSSL connect failed with status %s and '
                           'output: %r' % (status, output))
    certtext = strip_to_x509_cert(output)
    if not certtext:
        raise ValueError("Invalid response received from server at %s:%s" %
                         (host, port))
    return certtext

Example 5

Project: tensor
Source File: test_logs.py
View license
    def test_logfollow(self):
        try:
            os.unlink('test.log.lf')
            os.unlink('test.log')
        except:
            pass

        log = open('test.log', 'wt')
        log.write('foo\nbar\n')
        log.flush()

        f = follower.LogFollower('test.log', tmp_path=".", history=True)

        r = f.get()

        log.write('test')
        log.flush()

        r2 = f.get()

        log.write('ing\n')
        log.flush()

        r3 = f.get()

        self.assertEqual(r[0], 'foo')
        self.assertEqual(r[1], 'bar')

        self.assertEqual(r2, [])
        self.assertEqual(r3[0], 'testing')

        log.close()

        # Move inode
        os.rename('test.log', 'testold.log')

        log = open('test.log', 'wt')
        log.write('foo2\nbar2\n')
        log.close()

        r = f.get()

        self.assertEqual(r[0], 'foo2')
        self.assertEqual(r[1], 'bar2')

        # Go backwards
        log = open('test.log', 'wt')
        log.write('foo3\n')
        log.close()

        r = f.get()

        self.assertEqual(r[0], 'foo3')

        os.unlink('test.log')
        os.unlink('testold.log')

Example 6

Project: esky
Source File: finder.py
View license
    def _prepare_version(self,app,version,path):
        """Prepare the requested version from downloaded data.

        This method is responsible for unzipping downloaded versions, applying
        patches and so-forth, and making the result available as a local
        directory ready for renaming into the appdir.
        """
        uppath = tempfile.mkdtemp(dir=self._workdir(app,"unpack"))
        try:
            if not path:
                #  There's nothing to prepare, just copy the current version.
                self._copy_best_version(app,uppath)
            else:
                if path[0][0].endswith(".patch"):
                    #  We're direcly applying a series of patches.
                    #  Copy the current version across and go from there.
                    try:
                        self._copy_best_version(app,uppath)
                    except EnvironmentError, e:
                        self.version_graph.remove_all_links(path[0][1])
                        err = "couldn't copy current version: %s" % (e,)
                        raise PatchError(err)
                    patches = path
                else:
                    #  We're starting from a zipfile.  Extract the first dir
                    #  containing more than a single item and go from there.
                    try:
                        deep_extract_zipfile(path[0][0],uppath)
                    except (zipfile.BadZipfile,zipfile.LargeZipFile):
                        self.version_graph.remove_all_links(path[0][1])
                        try:
                            os.unlink(path[0][0])
                        except EnvironmentError:
                            pass
                        raise
                    patches = path[1:]
                # TODO: remove compatability hooks for ESKY_APPDATA_DIR="".
                # If a patch fails to apply because we've put an appdata dir
                # where it doesn't expect one, try again with old layout. 
                for _ in xrange(2):
                    #  Apply any patches in turn.
                    for (patchfile,patchurl) in patches:
                        try:
                            try:
                                with open(patchfile,"rb") as f:
                                    apply_patch(uppath,f)
                            except EnvironmentError, e:
                                if e.errno not in (errno.ENOENT,):
                                    raise
                                if not path[0][0].endswith(".patch"):
                                    raise
                                really_rmtree(uppath)
                                os.mkdir(uppath)
                                self._copy_best_version(app,uppath,False)
                                break
                        except (PatchError,EnvironmentError):
                            self.version_graph.remove_all_links(patchurl)
                            try:
                                os.unlink(patchfile)
                            except EnvironmentError:
                                pass
                            raise
                    else:
                        break
            # Find the actual version dir that we're unpacking.
            # TODO: remove compatability hooks for ESKY_APPDATA_DIR=""
            vdir = join_app_version(app.name,version,app.platform)
            vdirpath = os.path.join(uppath,ESKY_APPDATA_DIR,vdir)
            if not os.path.isdir(vdirpath):
                vdirpath = os.path.join(uppath,vdir)
                if not os.path.isdir(vdirpath):
                    self.version_graph.remove_all_links(path[0][1])
                    err = version + ": version directory does not exist"
                    raise EskyVersionError(err)
            # Move anything that's not the version dir into "bootstrap" dir.
            ctrlpath = os.path.join(vdirpath,ESKY_CONTROL_DIR)
            bspath = os.path.join(ctrlpath,"bootstrap")
            if not os.path.isdir(bspath):
                os.makedirs(bspath)
            for nm in os.listdir(uppath):
                if nm != vdir and nm != ESKY_APPDATA_DIR:
                    really_rename(os.path.join(uppath,nm),
                                  os.path.join(bspath,nm))
            # Check that it has an esky-files/bootstrap-manifest.txt file
            bsfile = os.path.join(ctrlpath,"bootstrap-manifest.txt")
            if not os.path.exists(bsfile):
                self.version_graph.remove_all_links(path[0][1])
                err = version + ": version has no bootstrap-manifest.txt"
                raise EskyVersionError(err)
            # Make it available for upgrading, replacing anything
            # that we previously had available.
            rdpath = self._ready_name(app,version)
            tmpnm = None
            try:
                if os.path.exists(rdpath):
                    tmpnm = rdpath + ".old"
                    while os.path.exists(tmpnm):
                        tmpnm = tmpnm + ".old"
                    really_rename(rdpath,tmpnm)
                really_rename(vdirpath,rdpath)
            finally:
                if tmpnm is not None:
                    really_rmtree(tmpnm)
            #  Clean up any downloaded files now that we've used them.
            for (filenm,_) in path:
                os.unlink(filenm)
        finally:
            really_rmtree(uppath)

Example 7

Project: decode-Django
Source File: makemessages.py
View license
def write_po_file(pofile, potfile, domain, locale, verbosity, stdout,
                  copy_pforms, wrap, location, no_obsolete):
    """
    Creates of updates the :param pofile: PO file for :param domain: and :param
    locale:.  Uses contents of the existing :param potfile:.

    Uses mguniq, msgmerge, and msgattrib GNU gettext utilities.
    """
    msgs, errors, status = _popen('msguniq %s %s --to-code=utf-8 "%s"' %
                                    (wrap, location, potfile))
    if errors:
        if status != STATUS_OK:
            os.unlink(potfile)
            raise CommandError(
                "errors happened while running msguniq\n%s" % errors)
        elif verbosity > 0:
            stdout.write(errors)

    if os.path.exists(pofile):
        with open(potfile, 'w') as fp:
            fp.write(msgs)
        msgs, errors, status = _popen('msgmerge %s %s -q "%s" "%s"' %
                                        (wrap, location, pofile, potfile))
        if errors:
            if status != STATUS_OK:
                os.unlink(potfile)
                raise CommandError(
                    "errors happened while running msgmerge\n%s" % errors)
            elif verbosity > 0:
                stdout.write(errors)
    elif copy_pforms:
        msgs = copy_plural_forms(msgs, locale, domain, verbosity, stdout)
    msgs = msgs.replace(
        "#. #-#-#-#-#  %s.pot (PACKAGE VERSION)  #-#-#-#-#\n" % domain, "")
    with open(pofile, 'w') as fp:
        fp.write(msgs)
    os.unlink(potfile)
    if no_obsolete:
        msgs, errors, status = _popen(
            'msgattrib %s %s -o "%s" --no-obsolete "%s"' %
            (wrap, location, pofile, pofile))
        if errors:
            if status != STATUS_OK:
                raise CommandError(
                    "errors happened while running msgattrib\n%s" % errors)
            elif verbosity > 0:
                stdout.write(errors)

Example 8

Project: Ezhil-Lang
Source File: views.py
View license
def evaluate( request ):
    progin = ''
    evaluated = False
    failed = False
    progout = ''
    exception = ''

    if request.method == "POST":
        print "received POST request"
        vars = {}
        vars['eval'] = request.POST['eval']
        vars['prog'] = request.POST['prog']
        progin = vars['prog']

        try:
            evaluated = True
            # 10s timeout and executor
            obj = EzhilFileExecuter( file_input = [vars['prog']],
                                     redirectop =  True, TIMEOUT = 10 )
            
            # actually run the process
            obj.run()
            
            # get executed output in 'progout' and name of the two tmp files to cleanup
            [tmpfile,filename,progout] = obj.get_output()
            os.unlink( tmpfile )
            os.unlink( filename )
            
            if obj.exitcode != 0 and EzhilWeb.error_qualifiers(progout):
                failed = True
            else:
                failed = False
            
        except Exception as e:
            exception = str(e)
            failed = True
            [tmpfile,filename,progout] = obj.get_output()

            try:
                os.unlink( tmpfile )
            except Except as e:
                pass

            try:
                os.unlink( filename )
            except Except as e:
                pass
            
            #traceback.print_tb(sys.exc_info()[2])
            #raise e #debug mode
    ctx_data = {'evaluated_flag':evaluated,
                   'failed_flag':failed,
                   'program_input':progin,
                   'program_output':progout,
                   'exception_message':exception,
                   'DOCROOT':settings.DOCROOT}
    ctx = Context(ctx_data)

    if request.is_ajax():
        json_data = simplejson.dumps( ctx_data )
        return HttpResponse( json_data )
    return StreamingHttpResponse( render(request,"ezplay/ezhil_eval.html", ctx ) )

Example 9

Project: pysymemu
Source File: test_memory.py
View license
    def test_basic_mapping_with_mmapFile(self):
        mem = SMemory(Solver(), 32, 12)
        
        #start with no maps
        self.assertEqual(len(mem.mappings()), 0)
        
        rwx_file = tempfile.NamedTemporaryFile('w+b', delete=False)
        rwx_file.file.write('d'*0x1001)
        rwx_file.close()
        addr = mem.mmapFile(0, 0x1000, 'rwx', rwx_file.name)

        #One mapping
        self.assertEqual(len(mem.mappings()), 1)

        for i in xrange(addr, addr+0x1000):
            self.assertTrue(mem.isValid(i))
            self.assertTrue(mem.isReadable(i))
            self.assertTrue(mem.isWriteable(i))

        self.assertFalse(mem.isValid(addr-1))
        self.assertFalse(mem.isReadable(addr-1))
        self.assertFalse(mem.isWriteable(addr-1))
        self.assertFalse(mem.isValid(addr+0x1000))
        self.assertFalse(mem.isReadable(addr+0x1000))
        self.assertFalse(mem.isWriteable(addr+0x1000))

        self.assertEqual(len(mem.mappings()), 1)
        
        r_file = tempfile.NamedTemporaryFile('w+b', delete=False)
        r_file.file.write('b'*0x1000)
        r_file.close()
        mem.mmapFile(0, 0x1000, 'r', r_file.name)

        #Two mapping
        self.assertEqual(len(mem.mappings()), 2)

        rw_file = tempfile.NamedTemporaryFile('w+b', delete=False)
        rw_file.file.write('c'*0x1000)
        rw_file.close()
        mem.mmapFile(None, 0x1000, 'rw', rw_file.name)

        #Three mapping
        self.assertEqual(len(mem.mappings()), 3)
        
        size = 0x30000
        w_file = tempfile.NamedTemporaryFile('w+b', delete=False)
        w_file.file.write('a'*size)
        w_file.close()
        addr = mem.mmapFile(0x20000000, size, 'w', w_file.name)
        
        #Four mapping
        self.assertEqual(len(mem.mappings()), 4)

        #Okay unmap 
        mem.munmap(addr+size/3, size/3)

        #Okay 2 maps
        self.assertEqual(len(mem.mappings()), 5)

        #limits
        self.assertTrue(mem.isValid(addr))
        self.assertTrue(mem.isValid(addr+size/3-1))
        self.assertTrue(mem.isValid(addr+2*size/3))
        self.assertTrue(mem.isValid(addr+size-1))
        self.assertFalse(mem.isValid(addr-1))
        self.assertFalse(mem.isValid(addr+size/3))
        self.assertFalse(mem.isValid(addr+2*size/3-1))
        self.assertFalse(mem.isValid(addr+size))

        #re alloc mem should be at the same address
        addr1 = mem.mmap(addr, size, 'rwx')
        self.assertTrue(addr1, addr)

        #Delete the temporary file
        os.unlink(rwx_file.name)
        os.unlink(r_file.name)
        os.unlink(w_file.name)

Example 10

Project: golismero
Source File: makemessages.py
View license
def write_po_file(pofile, potfile, domain, locale, verbosity, stdout,
                  copy_pforms, wrap, location, no_obsolete):
    """
    Creates of updates the :param pofile: PO file for :param domain: and :param
    locale:.  Uses contents of the existing :param potfile:.

    Uses mguniq, msgmerge, and msgattrib GNU gettext utilities.
    """
    msgs, errors, status = _popen('msguniq %s %s --to-code=utf-8 "%s"' %
                                    (wrap, location, potfile))
    if errors:
        if status != STATUS_OK:
            os.unlink(potfile)
            raise CommandError(
                "errors happened while running msguniq\n%s" % errors)
        elif verbosity > 0:
            stdout.write(errors)

    if os.path.exists(pofile):
        with open(potfile, 'w') as fp:
            fp.write(msgs)
        msgs, errors, status = _popen('msgmerge %s %s -q "%s" "%s"' %
                                        (wrap, location, pofile, potfile))
        if errors:
            if status != STATUS_OK:
                os.unlink(potfile)
                raise CommandError(
                    "errors happened while running msgmerge\n%s" % errors)
            elif verbosity > 0:
                stdout.write(errors)
    elif copy_pforms:
        msgs = copy_plural_forms(msgs, locale, domain, verbosity, stdout)
    msgs = msgs.replace(
        "#. #-#-#-#-#  %s.pot (PACKAGE VERSION)  #-#-#-#-#\n" % domain, "")
    with open(pofile, 'w') as fp:
        fp.write(msgs)
    os.unlink(potfile)
    if no_obsolete:
        msgs, errors, status = _popen(
            'msgattrib %s %s -o "%s" --no-obsolete "%s"' %
            (wrap, location, pofile, pofile))
        if errors:
            if status != STATUS_OK:
                raise CommandError(
                    "errors happened while running msgattrib\n%s" % errors)
            elif verbosity > 0:
                stdout.write(errors)

Example 11

Project: karesansui
Source File: write_conf.py
View license
    def process(self):
        (opts, args) = getopts()
        chkopts(opts)
        self.up_progress(1)

        uniq_id = time.strftime("%Y%m%d%H%M%S", time.localtime())

        if opts.pre_command is not None:
            if opts.pre_command[0:4] == "b64:":
                command = base64_decode(opts.pre_command[4:])
            else:
                command = opts.pre_command
            self.logger.info("execute command - %s" % command)
            (_ret,_res) = execute_command(command.split())
            if _ret != 0:
                error_msg = "execute error - %s" % command
                self.logger.error(error_msg)
                #raise KssCommandOptException("ERROR: %s" % error_msg)

        dop = DictOp()
        modules = opts.module.split(":")
        files   = opts.file.split(":")

        source_files = []
        retval = True
        cnt = 0
        for _mod in modules:
            _file = files[cnt]
            try:
                exec("from karesansui.lib.parser.%s import %sParser as Parser" % (_mod,_mod,))

                self.up_progress(5)
                parser = Parser()

                # 辞書オペレータに追加
                self.up_progress(5)
                if opts.php is True:
                    conf_arr = php_array_to_python_dict(open(_file).read())
                else:
                    exec("conf_arr = %s" % open(_file).read())
                dop.addconf(_mod,conf_arr)

                """
                必要ならここで配列操作
                通常は、配列操作後の辞書が_fileに書き込まれているので必要ない
                dop.add   (_mod,"foo","bar")
                dop.delete(_mod,"foo")
                """

                # 設定ファイル一覧に作成(バックアップ用)
                self.up_progress(5)
                source_file = parser.source_file()
                for _afile in source_file:
                    _bak_afile = "%s.%s" % (_afile,uniq_id)
                    copy_file(_afile,_bak_afile)
                source_files = source_files + source_file

                # 辞書に戻す
                self.up_progress(5)
                conf_arr = dop.getconf(_mod)
                #dop.preprint_r(_mod)

                # 設定ファイルに書き込み
                self.up_progress(5)
                extra_args = {}
                extra_args["include"] = opts.include
                if opts.early_exit is True:
                    retval = retval and parser.write_conf(conf_arr,extra_args=extra_args)
                else:
                    retval = parser.write_conf(conf_arr,extra_args=extra_args) and retval

                if opts.delete is True:
                    os.unlink(_file)

            finally:
                cnt = cnt + 1

        if retval is False:
            for _afile in source_files:
                _bak_afile = "%s.%s" % (_afile,uniq_id)
                os.unlink(_afile)
                copy_file(_bak_afile,_afile)
                os.unlink(_bak_afile)
            raise KssCommandOptException("ERROR: write configure failure")

        for _afile in source_files:
            _bak_afile = "%s.%s" % (_afile,uniq_id)
            os.unlink(_bak_afile)

        if opts.post_command is not None:
            if opts.post_command[0:4] == "b64:":
                command = base64_decode(opts.post_command[4:])
            else:
                command = opts.post_command
            self.logger.info("execute command - %s" % command)
            (_ret,_res) = execute_command(command.split())
            if _ret != 0:
                error_msg = "execute error - %s" % command
                self.logger.error(error_msg)
                raise KssCommandOptException("ERROR: %s" % error_msg)

        self.up_progress(10)
        return True

Example 12

Project: karesansui
Source File: write_conf.py
View license
    def process(self):
        (opts, args) = getopts()
        chkopts(opts)
        self.up_progress(1)

        uniq_id = time.strftime("%Y%m%d%H%M%S", time.localtime())

        if opts.pre_command is not None:
            if opts.pre_command[0:4] == "b64:":
                command = base64_decode(opts.pre_command[4:])
            else:
                command = opts.pre_command
            self.logger.info("execute command - %s" % command)
            (_ret,_res) = execute_command(command.split())
            if _ret != 0:
                error_msg = "execute error - %s" % command
                self.logger.error(error_msg)
                #raise KssCommandOptException("ERROR: %s" % error_msg)

        dop = DictOp()
        modules = opts.module.split(":")
        files   = opts.file.split(":")

        source_files = []
        retval = True
        cnt = 0
        for _mod in modules:
            _file = files[cnt]
            try:
                exec("from karesansui.lib.parser.%s import %sParser as Parser" % (_mod,_mod,))

                self.up_progress(5)
                parser = Parser()

                # 辞書オペレータに追加
                self.up_progress(5)
                if opts.php is True:
                    conf_arr = php_array_to_python_dict(open(_file).read())
                else:
                    exec("conf_arr = %s" % open(_file).read())
                dop.addconf(_mod,conf_arr)

                """
                必要ならここで配列操作
                通常は、配列操作後の辞書が_fileに書き込まれているので必要ない
                dop.add   (_mod,"foo","bar")
                dop.delete(_mod,"foo")
                """

                # 設定ファイル一覧に作成(バックアップ用)
                self.up_progress(5)
                source_file = parser.source_file()
                for _afile in source_file:
                    _bak_afile = "%s.%s" % (_afile,uniq_id)
                    copy_file(_afile,_bak_afile)
                source_files = source_files + source_file

                # 辞書に戻す
                self.up_progress(5)
                conf_arr = dop.getconf(_mod)
                #dop.preprint_r(_mod)

                # 設定ファイルに書き込み
                self.up_progress(5)
                extra_args = {}
                extra_args["include"] = opts.include
                if opts.early_exit is True:
                    retval = retval and parser.write_conf(conf_arr,extra_args=extra_args)
                else:
                    retval = parser.write_conf(conf_arr,extra_args=extra_args) and retval

                if opts.delete is True:
                    os.unlink(_file)

            finally:
                cnt = cnt + 1

        if retval is False:
            for _afile in source_files:
                _bak_afile = "%s.%s" % (_afile,uniq_id)
                os.unlink(_afile)
                copy_file(_bak_afile,_afile)
                os.unlink(_bak_afile)
            raise KssCommandOptException("ERROR: write configure failure")

        for _afile in source_files:
            _bak_afile = "%s.%s" % (_afile,uniq_id)
            os.unlink(_bak_afile)

        if opts.post_command is not None:
            if opts.post_command[0:4] == "b64:":
                command = base64_decode(opts.post_command[4:])
            else:
                command = opts.post_command
            self.logger.info("execute command - %s" % command)
            (_ret,_res) = execute_command(command.split())
            if _ret != 0:
                error_msg = "execute error - %s" % command
                self.logger.error(error_msg)
                raise KssCommandOptException("ERROR: %s" % error_msg)

        self.up_progress(10)
        return True

Example 13

Project: skytools
Source File: checker.py
View license
    def do_repair(self, src_db, dst_db, where, pfx = 'repair', apply_fixes = False):
        """Actual comparison."""

        self.reset()

        src_curs = src_db.cursor()
        dst_curs = dst_db.cursor()

        self.apply_fixes = apply_fixes
        if apply_fixes:
            self.apply_cursor = dst_curs

        self.log.info('Checking %s' % self.table_name)

        copy_tbl = self.gen_copy_tbl(src_curs, dst_curs, where)

        dump_src = "%s.%s.src" % (pfx, self.table_name)
        dump_dst = "%s.%s.dst" % (pfx, self.table_name)
        fix = "%s.%s.fix" % (pfx, self.table_name)

        self.log.info("Dumping src table: %s" % self.table_name)
        self.dump_table(copy_tbl, src_curs, dump_src)
        src_db.commit()
        self.log.info("Dumping dst table: %s" % self.table_name)
        self.dump_table(copy_tbl, dst_curs, dump_dst)
        dst_db.commit()

        self.log.info("Sorting src table: %s" % self.table_name)
        self.do_sort(dump_src, dump_src + '.sorted')

        self.log.info("Sorting dst table: %s" % self.table_name)
        self.do_sort(dump_dst, dump_dst + '.sorted')

        self.dump_compare(dump_src + ".sorted", dump_dst + ".sorted", fix)

        os.unlink(dump_src)
        os.unlink(dump_dst)
        os.unlink(dump_src + ".sorted")
        os.unlink(dump_dst + ".sorted")

        if apply_fixes:
            dst_db.commit()

Example 14

Project: acousticbrainz-server
Source File: hl_calc.py
View license
    def _calculate(self):
        """Invoke Essentia high-level extractor and return its JSON output."""

        try:
            f = tempfile.NamedTemporaryFile(delete=False)
            name = f.name
            f.write(self.ll_data.encode("utf-8"))
            f.close()
        except IOError:
            print("IO Error while writing temp file")
            # If we return early, remove the ll file we created
            os.unlink(name)
            return "{}"

        # Securely generate a temporary filename
        tmp_file = tempfile.mkstemp()
        out_file = tmp_file[1]
        os.close(tmp_file[0])

        fnull = open(os.devnull, 'w')
        try:
            subprocess.check_call([os.path.join(".", HIGH_LEVEL_EXTRACTOR_BINARY),
                                   name, out_file, PROFILE_CONF],
                                  stdout=fnull, stderr=fnull)
        except (subprocess.CalledProcessError, OSError):
            print("Cannot call high-level extractor")
            # If we return early, make sure we remove the temp
            # output file that we created
            os.unlink(out_file)
            return "{}"
        finally:
            # At this point we can remove the source file,
            # regardless of if we failed or if we succeeded
            fnull.close()
            os.unlink(name)

        try:
            f = open(out_file)
            hl_data = f.read()
            f.close()
        except IOError:
            print("IO Error while removing temp file")
            return "{}"
        finally:
            os.unlink(out_file)

        return hl_data

Example 15

Project: totp-cgi
Source File: test.py
View license
    def testPincodes(self):
        logger.debug('Running testPincodes')

        logger.debug('Testing in non-required mode')

        backends = getBackends()

        ga = totpcgi.GoogleAuthenticator(backends)
        gau = getValidUser()

        pincode   = 'wakkawakka'
        secret    = backends.secret_backend.get_user_secret(gau.user)
        tokencode = str(secret.get_totp_token()).zfill(6)

        token = pincode + tokencode

        if PINCODE_BACKEND == 'File':
            logger.debug('Testing without pincodes file')
            with self.assertRaisesRegexp(totpcgi.UserNotFound, 
                    'pincodes file not found'):
                ga.verify_user_token('valid', token)

            logger.debug('Testing with pincodes.db older than pincodes')
            setCustomPincode(pincode)
            setCustomPincode('blarg', makedb=False)

            with self.assertRaisesRegexp(totpcgi.UserPincodeError,
                'Pincode did not match'):
                ga.verify_user_token('valid', token)

            cleanState()

            logger.debug('Testing with fallback to pincodes')
            pincode_db_file = pincode_file + '.db'
            os.unlink(pincode_db_file)
            os.unlink(pincode_file)
            setCustomPincode('blarg', user='donotwant')
            os.unlink(pincode_file)
            setCustomPincode(pincode, user='valid', makedb=False)
            # Touch it, so it's newer than pincodes 
            os.utime(pincode_db_file, None)

            ret = ga.verify_user_token('valid', token)
            self.assertEqual(ret, 'Valid TOTP token used')

            cleanState()

            logger.debug('Testing with junk at the end')
            setCustomPincode(pincode, makedb=False, addjunk=True)
            ret = ga.verify_user_token('valid', token)
            self.assertEqual(ret, 'Valid TOTP token used')

            cleanState()

        if PINCODE_BACKEND in ('pgsql', 'mysql'):
            backends.pincode_backend.delete_user_hashcode('valid')
            logger.debug('Testing without a user pincode record present')
            with self.assertRaisesRegexp(totpcgi.UserNotFound, 
                    'no pincodes record'):
                ga.verify_user_token('valid', token)


        if PINCODE_BACKEND in ('pgsql', 'mysql', 'File'):
            logger.debug('Testing with 1-digit long pincode')
            setCustomPincode('1')
            ret = ga.verify_user_token('valid', '1'+tokencode)
            self.assertEqual(ret, 'Valid TOTP token used')

            cleanState()

            logger.debug('Testing with 2-digit long pincode + valid tokencode')
            setCustomPincode('99')
            ret = ga.verify_user_token('valid', '99'+tokencode)
            self.assertEqual(ret, 'Valid TOTP token used')

            cleanState()

            logger.debug('Testing with 2-digit long pincode + invalid tokencode')
            setCustomPincode('99')
            with self.assertRaisesRegexp(totpcgi.VerifyFailed,
                'TOTP token failed to verify'):
                ret = ga.verify_user_token('valid', '99'+'000000')

            cleanState()

            logger.debug('Testing with bcrypt')
            setCustomPincode(pincode, algo='bcrypt')
            ret = ga.verify_user_token('valid', token)
            self.assertEqual(ret, 'Valid TOTP token used')

            cleanState()

            logger.debug('Testing with md5')
            setCustomPincode(pincode, algo='md5')
            ret = ga.verify_user_token('valid', token)
            self.assertEqual(ret, 'Valid TOTP token used')

            cleanState()

            setCustomPincode(pincode)

        if PINCODE_BACKEND == 'ldap':
            valid_user = os.environ['ldap_user']
            pincode    = os.environ['ldap_password']
            token      = pincode + tokencode
        else:
            valid_user = 'valid'
            pincode = 'wakkawakka'
            setCustomPincode(pincode)

        logger.debug('Testing with pincode+scratch-code')
        ret = ga.verify_user_token(valid_user, pincode+VALID_SCRATCH_TOKENS[0])
        self.assertEqual(ret, 'Scratch-token used')

        logger.debug('Testing with pincode+invalid-scratch-code')
        if PINCODE_BACKEND == 'ldap':
            raisedmsg = 'LDAP bind failed'
        else:
            raisedmsg = 'Pincode did not match'

        with self.assertRaisesRegexp(totpcgi.VerifyFailed, 'TOTP token failed to verify'):
            ret = ga.verify_user_token(valid_user, pincode+'00000000')

        cleanState()

        logger.debug('Turning on pincode enforcing')
        ga = totpcgi.GoogleAuthenticator(backends, require_pincode=True)

        logger.debug('Trying valid token without pincode')
        with self.assertRaisesRegexp(totpcgi.UserPincodeError,
            'Pincode is required'):
            ga.verify_user_token(valid_user, tokencode)

        cleanState()

        logger.debug('Trying valid scratch token without pincode')
        with self.assertRaisesRegexp(totpcgi.UserPincodeError,
            'Pincode is required'):
            ga.verify_user_token(valid_user, VALID_SCRATCH_TOKENS[0])

        cleanState()

        logger.debug('Trying valid token with pincode in enforcing')
        ret = ga.verify_user_token(valid_user, token)
        self.assertEqual(ret, 'Valid TOTP token used')
        
        cleanState()

        logger.debug('Testing valid pincode+scratch-code in enforcing')
        ret = ga.verify_user_token(valid_user, pincode+VALID_SCRATCH_TOKENS[0])
        self.assertEqual(ret, 'Scratch-token used')

        cleanState()

        logger.debug('Testing with valid token but invalid pincode')
        with self.assertRaisesRegexp(totpcgi.UserPincodeError, raisedmsg):
            ga.verify_user_token(valid_user, 'blarg'+tokencode)

        logger.debug('Testing again with valid token and valid pincode')
        with self.assertRaisesRegexp(totpcgi.VerifyFailed,
                'already been used'):
            ga.verify_user_token(valid_user, token)

        cleanState()

        logger.debug('Testing with valid pincode but invalid token')
        with self.assertRaisesRegexp(totpcgi.VerifyFailed,
            'TOTP token failed to verify'):
            ga.verify_user_token(valid_user, pincode+'555555')

Example 16

Project: totp-cgi
Source File: test.py
View license
    def testPincodes(self):
        logger.debug('Running testPincodes')

        logger.debug('Testing in non-required mode')

        backends = getBackends()

        ga = totpcgi.GoogleAuthenticator(backends)
        gau = getValidUser()

        pincode   = 'wakkawakka'
        secret    = backends.secret_backend.get_user_secret(gau.user)
        tokencode = str(secret.get_totp_token()).zfill(6)

        token = pincode + tokencode

        if PINCODE_BACKEND == 'File':
            logger.debug('Testing without pincodes file')
            with self.assertRaisesRegexp(totpcgi.UserNotFound, 
                    'pincodes file not found'):
                ga.verify_user_token('valid', token)

            logger.debug('Testing with pincodes.db older than pincodes')
            setCustomPincode(pincode)
            setCustomPincode('blarg', makedb=False)

            with self.assertRaisesRegexp(totpcgi.UserPincodeError,
                'Pincode did not match'):
                ga.verify_user_token('valid', token)

            cleanState()

            logger.debug('Testing with fallback to pincodes')
            pincode_db_file = pincode_file + '.db'
            os.unlink(pincode_db_file)
            os.unlink(pincode_file)
            setCustomPincode('blarg', user='donotwant')
            os.unlink(pincode_file)
            setCustomPincode(pincode, user='valid', makedb=False)
            # Touch it, so it's newer than pincodes 
            os.utime(pincode_db_file, None)

            ret = ga.verify_user_token('valid', token)
            self.assertEqual(ret, 'Valid TOTP token used')

            cleanState()

            logger.debug('Testing with junk at the end')
            setCustomPincode(pincode, makedb=False, addjunk=True)
            ret = ga.verify_user_token('valid', token)
            self.assertEqual(ret, 'Valid TOTP token used')

            cleanState()

        if PINCODE_BACKEND in ('pgsql', 'mysql'):
            backends.pincode_backend.delete_user_hashcode('valid')
            logger.debug('Testing without a user pincode record present')
            with self.assertRaisesRegexp(totpcgi.UserNotFound, 
                    'no pincodes record'):
                ga.verify_user_token('valid', token)


        if PINCODE_BACKEND in ('pgsql', 'mysql', 'File'):
            logger.debug('Testing with 1-digit long pincode')
            setCustomPincode('1')
            ret = ga.verify_user_token('valid', '1'+tokencode)
            self.assertEqual(ret, 'Valid TOTP token used')

            cleanState()

            logger.debug('Testing with 2-digit long pincode + valid tokencode')
            setCustomPincode('99')
            ret = ga.verify_user_token('valid', '99'+tokencode)
            self.assertEqual(ret, 'Valid TOTP token used')

            cleanState()

            logger.debug('Testing with 2-digit long pincode + invalid tokencode')
            setCustomPincode('99')
            with self.assertRaisesRegexp(totpcgi.VerifyFailed,
                'TOTP token failed to verify'):
                ret = ga.verify_user_token('valid', '99'+'000000')

            cleanState()

            logger.debug('Testing with bcrypt')
            setCustomPincode(pincode, algo='bcrypt')
            ret = ga.verify_user_token('valid', token)
            self.assertEqual(ret, 'Valid TOTP token used')

            cleanState()

            logger.debug('Testing with md5')
            setCustomPincode(pincode, algo='md5')
            ret = ga.verify_user_token('valid', token)
            self.assertEqual(ret, 'Valid TOTP token used')

            cleanState()

            setCustomPincode(pincode)

        if PINCODE_BACKEND == 'ldap':
            valid_user = os.environ['ldap_user']
            pincode    = os.environ['ldap_password']
            token      = pincode + tokencode
        else:
            valid_user = 'valid'
            pincode = 'wakkawakka'
            setCustomPincode(pincode)

        logger.debug('Testing with pincode+scratch-code')
        ret = ga.verify_user_token(valid_user, pincode+VALID_SCRATCH_TOKENS[0])
        self.assertEqual(ret, 'Scratch-token used')

        logger.debug('Testing with pincode+invalid-scratch-code')
        if PINCODE_BACKEND == 'ldap':
            raisedmsg = 'LDAP bind failed'
        else:
            raisedmsg = 'Pincode did not match'

        with self.assertRaisesRegexp(totpcgi.VerifyFailed, 'TOTP token failed to verify'):
            ret = ga.verify_user_token(valid_user, pincode+'00000000')

        cleanState()

        logger.debug('Turning on pincode enforcing')
        ga = totpcgi.GoogleAuthenticator(backends, require_pincode=True)

        logger.debug('Trying valid token without pincode')
        with self.assertRaisesRegexp(totpcgi.UserPincodeError,
            'Pincode is required'):
            ga.verify_user_token(valid_user, tokencode)

        cleanState()

        logger.debug('Trying valid scratch token without pincode')
        with self.assertRaisesRegexp(totpcgi.UserPincodeError,
            'Pincode is required'):
            ga.verify_user_token(valid_user, VALID_SCRATCH_TOKENS[0])

        cleanState()

        logger.debug('Trying valid token with pincode in enforcing')
        ret = ga.verify_user_token(valid_user, token)
        self.assertEqual(ret, 'Valid TOTP token used')
        
        cleanState()

        logger.debug('Testing valid pincode+scratch-code in enforcing')
        ret = ga.verify_user_token(valid_user, pincode+VALID_SCRATCH_TOKENS[0])
        self.assertEqual(ret, 'Scratch-token used')

        cleanState()

        logger.debug('Testing with valid token but invalid pincode')
        with self.assertRaisesRegexp(totpcgi.UserPincodeError, raisedmsg):
            ga.verify_user_token(valid_user, 'blarg'+tokencode)

        logger.debug('Testing again with valid token and valid pincode')
        with self.assertRaisesRegexp(totpcgi.VerifyFailed,
                'already been used'):
            ga.verify_user_token(valid_user, token)

        cleanState()

        logger.debug('Testing with valid pincode but invalid token')
        with self.assertRaisesRegexp(totpcgi.VerifyFailed,
            'TOTP token failed to verify'):
            ga.verify_user_token(valid_user, pincode+'555555')

Example 17

Project: vosae-app
Source File: makemessages.py
View license
def write_po_file(pofile, potfile, domain, locale, verbosity, stdout,
                  copy_pforms, wrap, location, no_obsolete):
    """
    Creates of updates the :param pofile: PO file for :param domain: and :param
    locale:.  Uses contents of the existing :param potfile:.

    Uses mguniq, msgmerge, and msgattrib GNU gettext utilities.
    """
    msgs, errors, status = _popen('msguniq %s %s --to-code=utf-8 "%s"' %
                                    (wrap, location, potfile))
    if errors:
        if status != STATUS_OK:
            os.unlink(potfile)
            raise CommandError(
                "errors happened while running msguniq\n%s" % errors)
        elif verbosity > 0:
            stdout.write(errors)

    if os.path.exists(pofile):
        with open(potfile, 'w') as fp:
            fp.write(msgs)
        msgs, errors, status = _popen('msgmerge %s %s -q "%s" "%s"' %
                                        (wrap, location, pofile, potfile))
        if errors:
            if status != STATUS_OK:
                os.unlink(potfile)
                raise CommandError(
                    "errors happened while running msgmerge\n%s" % errors)
            elif verbosity > 0:
                stdout.write(errors)
    elif copy_pforms:
        msgs = copy_plural_forms(msgs, locale, domain, verbosity, stdout)
    msgs = msgs.replace(
        "#. #-#-#-#-#  %s.pot (PACKAGE VERSION)  #-#-#-#-#\n" % domain, "")
    with open(pofile, 'w') as fp:
        fp.write(msgs)
    os.unlink(potfile)
    if no_obsolete:
        msgs, errors, status = _popen(
            'msgattrib %s %s -o "%s" --no-obsolete "%s"' %
            (wrap, location, pofile, pofile))
        if errors:
            if status != STATUS_OK:
                raise CommandError(
                    "errors happened while running msgattrib\n%s" % errors)
            elif verbosity > 0:
                stdout.write(errors)

Example 18

Project: neurosynth
Source File: test_base.py
View license
    def test_selection_by_expression(self):
        """ Tests the expression-based search using the lexer/parser. This
        functionality is optional, so only run the test if ply is available.
        """
        try:
            import ply.lex as lex
            have_ply = True
        except:
            have_ply = False

        if have_ply:
            ids = self.dataset.get_studies(expression="* &~ (g*)", func=np.sum,
                                           frequency_threshold=0.003)
            self.assertEqual(sorted(ids), ['study3', 'study5'])
            ids = self.dataset.get_studies(
                expression="f* > 0.005", func=np.mean, frequency_threshold=0.0)
            self.assertEqual(ids, ['study3'])
            ids = self.dataset.get_studies(expression="f* < 0.05", func=np.sum,
                                           frequency_threshold=0.0)
            self.assertEqual(sorted(ids), ['study1', 'study2', 'study3',
                                           'study4', 'study5'])
            ids = self.dataset.get_studies(expression="f* | g*", func=np.mean,
                                           frequency_threshold=0.003)
            self.assertEqual(sorted(ids), ['study1', 'study2', 'study3',
                                           'study4'])
            ids = self.dataset.get_studies(expression="(f* & g*)", func=np.sum,
                                           frequency_threshold=0.001)
            self.assertEqual(sorted(ids), ['study1', 'study4'])
            # test N-gram feature handling
            self.dataset.feature_table.data.columns = ['f1', 'f2', 'my ngram',
                                                       'my ngram reprise', 'g1']
            ids = self.dataset.get_studies(expression="my ngram reprise")
            self.assertEqual(ids, ['study5'])
            ids = self.dataset.get_studies(expression="my ngram*",
                                           frequency_threshold=0.01)
            self.assertEqual(sorted(ids), ['study1', 'study4', 'study5'])
            try:
                os.unlink('lextab.py')
                os.unlink('parser.out')
                os.unlink('parsetab.py')
            except:
                pass

Example 19

Project: dodai-compute
Source File: images.py
View license
def fetch_to_raw(context, image_href, path, user_id, project_id):
    path_tmp = "%s.part" % path
    metadata = fetch(context, image_href, path_tmp, user_id, project_id)

    def _qemu_img_info(path):

        out, err = utils.execute('env', 'LC_ALL=C', 'LANG=C',
            'qemu-img', 'info', path)

        # output of qemu-img is 'field: value'
        # the fields of interest are 'file format' and 'backing file'
        data = {}
        for line in out.splitlines():
            (field, val) = line.split(':', 1)
            if val[0] == " ":
                val = val[1:]
            data[field] = val

        return(data)

    data = _qemu_img_info(path_tmp)

    fmt = data.get("file format", None)
    if fmt == None:
        os.unlink(path_tmp)
        raise exception.ImageUnacceptable(
            reason=_("'qemu-img info' parsing failed."), image_id=image_href)

    if fmt != "raw":
        staged = "%s.converted" % path
        if "backing file" in data:
            backing_file = data['backing file']
            os.unlink(path_tmp)
            raise exception.ImageUnacceptable(image_id=image_href,
                reason=_("fmt=%(fmt)s backed by: %(backing_file)s") % locals())

        LOG.debug("%s was %s, converting to raw" % (image_href, fmt))
        out, err = utils.execute('qemu-img', 'convert', '-O', 'raw',
                                 path_tmp, staged)
        os.unlink(path_tmp)

        data = _qemu_img_info(staged)
        if data.get('file format', None) != "raw":
            os.unlink(staged)
            raise exception.ImageUnacceptable(image_id=image_href,
                reason=_("Converted to raw, but format is now %s") %
                data.get('file format', None))

        os.rename(staged, path)

    else:
        os.rename(path_tmp, path)

    return metadata

Example 20

Project: dodai-compute
Source File: images.py
View license
def fetch_to_raw(context, image_href, path, user_id, project_id):
    path_tmp = "%s.part" % path
    metadata = fetch(context, image_href, path_tmp, user_id, project_id)

    def _qemu_img_info(path):

        out, err = utils.execute('env', 'LC_ALL=C', 'LANG=C',
            'qemu-img', 'info', path)

        # output of qemu-img is 'field: value'
        # the fields of interest are 'file format' and 'backing file'
        data = {}
        for line in out.splitlines():
            (field, val) = line.split(':', 1)
            if val[0] == " ":
                val = val[1:]
            data[field] = val

        return(data)

    data = _qemu_img_info(path_tmp)

    fmt = data.get("file format", None)
    if fmt == None:
        os.unlink(path_tmp)
        raise exception.ImageUnacceptable(
            reason=_("'qemu-img info' parsing failed."), image_id=image_href)

    if fmt != "raw":
        staged = "%s.converted" % path
        if "backing file" in data:
            backing_file = data['backing file']
            os.unlink(path_tmp)
            raise exception.ImageUnacceptable(image_id=image_href,
                reason=_("fmt=%(fmt)s backed by: %(backing_file)s") % locals())

        LOG.debug("%s was %s, converting to raw" % (image_href, fmt))
        out, err = utils.execute('qemu-img', 'convert', '-O', 'raw',
                                 path_tmp, staged)
        os.unlink(path_tmp)

        data = _qemu_img_info(staged)
        if data.get('file format', None) != "raw":
            os.unlink(staged)
            raise exception.ImageUnacceptable(image_id=image_href,
                reason=_("Converted to raw, but format is now %s") %
                data.get('file format', None))

        os.rename(staged, path)

    else:
        os.rename(path_tmp, path)

    return metadata

Example 21

Project: oh-mainline
Source File: makemessages.py
View license
def write_po_file(pofile, potfile, domain, locale, verbosity, stdout,
                  copy_pforms, wrap, location, no_obsolete):
    """
    Creates of updates the :param pofile: PO file for :param domain: and :param
    locale:.  Uses contents of the existing :param potfile:.

    Uses mguniq, msgmerge, and msgattrib GNU gettext utilities.
    """
    msgs, errors, status = _popen('msguniq %s %s --to-code=utf-8 "%s"' %
                                    (wrap, location, potfile))
    if errors:
        if status != STATUS_OK:
            os.unlink(potfile)
            raise CommandError(
                "errors happened while running msguniq\n%s" % errors)
        elif verbosity > 0:
            stdout.write(errors)

    if os.path.exists(pofile):
        with open(potfile, 'w') as fp:
            fp.write(msgs)
        msgs, errors, status = _popen('msgmerge %s %s -q "%s" "%s"' %
                                        (wrap, location, pofile, potfile))
        if errors:
            if status != STATUS_OK:
                os.unlink(potfile)
                raise CommandError(
                    "errors happened while running msgmerge\n%s" % errors)
            elif verbosity > 0:
                stdout.write(errors)
    elif copy_pforms:
        msgs = copy_plural_forms(msgs, locale, domain, verbosity, stdout)
    msgs = msgs.replace(
        "#. #-#-#-#-#  %s.pot (PACKAGE VERSION)  #-#-#-#-#\n" % domain, "")
    with open(pofile, 'w') as fp:
        fp.write(msgs)
    os.unlink(potfile)
    if no_obsolete:
        msgs, errors, status = _popen(
            'msgattrib %s %s -o "%s" --no-obsolete "%s"' %
            (wrap, location, pofile, pofile))
        if errors:
            if status != STATUS_OK:
                raise CommandError(
                    "errors happened while running msgattrib\n%s" % errors)
            elif verbosity > 0:
                stdout.write(errors)

Example 22

Project: isso
Source File: __init__.py
View license
def main():

    parser = ArgumentParser(description="a blog comment hosting service")
    subparser = parser.add_subparsers(help="commands", dest="command")

    parser.add_argument('--version', action='version', version='%(prog)s ' + dist.version)
    parser.add_argument("-c", dest="conf", default="/etc/isso.conf",
            metavar="/etc/isso.conf", help="set configuration file")

    imprt = subparser.add_parser('import', help="import Disqus XML export")
    imprt.add_argument("dump", metavar="FILE")
    imprt.add_argument("-n", "--dry-run", dest="dryrun", action="store_true",
                       help="perform a trial run with no changes made")
    imprt.add_argument("-t", "--type", dest="type", default=None,
                       choices=["disqus", "wordpress"], help="export type")
    imprt.add_argument("--empty-id", dest="empty_id", action="store_true",
                       help="workaround for weird Disqus XML exports, #135")

    serve = subparser.add_parser("run", help="run server")

    args = parser.parse_args()
    conf = config.load(join(dist.location, dist.project_name, "defaults.ini"), args.conf)

    if args.command == "import":
        conf.set("guard", "enabled", "off")

        if args.dryrun:
            xxx = tempfile.NamedTemporaryFile()
            dbpath = xxx.name
        else:
            dbpath = conf.get("general", "dbpath")

        mydb = db.SQLite3(dbpath, conf)
        migrate.dispatch(args.type, mydb, args.dump, args.empty_id)

        sys.exit(0)

    if conf.get("general", "log-file"):
        handler = logging.FileHandler(conf.get("general", "log-file"))

        logger.addHandler(handler)
        logging.getLogger("werkzeug").addHandler(handler)

        logger.propagate = False
        logging.getLogger("werkzeug").propagate = False

    if not any(conf.getiter("general", "host")):
        logger.error("No website(s) configured, Isso won't work.")
        sys.exit(1)

    if conf.get("server", "listen").startswith("http://"):
        host, port, _ = urlsplit(conf.get("server", "listen"))
        try:
            from gevent.pywsgi import WSGIServer
            WSGIServer((host, port), make_app(conf)).serve_forever()
        except ImportError:
            run_simple(host, port, make_app(conf), threaded=True,
                       use_reloader=conf.getboolean('server', 'reload'))
    else:
        sock = conf.get("server", "listen").partition("unix://")[2]
        try:
            os.unlink(sock)
        except OSError as ex:
            if ex.errno != errno.ENOENT:
                raise
        wsgi.SocketHTTPServer(sock, make_app(conf)).serve_forever()

Example 23

Project: wub-machine
Source File: dubstep.py
View license
    def remix(self):
        """
            Wub wub wub wub wub wub wub wub wub wub wub wub wub wub wub wub wub wub.
        """
        self.log("Looking up track...", 5)
        self.getTag()
        self.processArt()

        self.log("Listening to %s..." % ('"%s"' % self.tag['title'] if 'title' in self.tag else 'song'), 5)
        self.original = audio.LocalAudioFile(self.infile, False)
        if not 'title' in self.tag:
            self.detectSong(self.original)
        self.st = FastModify()
        
        self.log("Choosing key and tempo...", 10)
        self.tonic = self.original.analysis.key['value']
        self.tempo = self.original.analysis.tempo['value']
        self.bars = self.original.analysis.bars
        self.beats = self.original.analysis.beats
        self.sections = self.original.analysis.sections
        self.tag['key'] = self.keys[self.tonic] if self.tonic >= 0 and self.tonic < 12 else '?'
        self.tag['tempo'] = self.template['tempo']

        self.log("Arranging intro...", 40.0/(len(self.sections) + 1))
        self.partialEncode(self.compileIntro())

        past_progress = 0
        hats  = audio.AudioData(self.sample_path + self.template['hats'], sampleRate=44100, numChannels=2, verbose=False)

        i = 0 # Required if there are no sections
        for i, section in enumerate(self.sections):
            self.log("Arranging section %s of %s..." % (i+1, len(self.sections)), 40.0/(len(self.sections) + 1))
            a, b = self.compileSection(i, section, hats)
            self.partialEncode(a)
            self.partialEncode(b)
            del a, b
        del hats
        self.original.unload()

        self.log("Adding ending...", 5)
        self.partialEncode(
            audio.AudioData(
                self.sample_path + self.template['splash_ends'][(i + 1) % len(self.template['splash_ends'])],
                sampleRate=44100,
                numChannels=2,
                verbose=False
            )
        )
        
        self.log("Mixing...", 5)
        self.mixwav(self.tempfile)

        if self.deleteOriginal:
            try:
                unlink(self.infile)
            except:
                pass  # File could have been deleted by an eager cleanup script

        self.log("Mastering...", 5)
        self.lame(self.tempfile, self.outfile)
        unlink(self.tempfile)
        
        self.log("Adding artwork...", 20)
        self.updateTags(titleSuffix = " (Wub Machine Remix)")
        
        return self.outfile

Example 24

Project: DistrictBuilder
Source File: views.py
View license
@csrf_exempt
def getreport(request):
    """
    Get a BARD report.

    This view will write out an HTML-formatted BARD report to the directory
    given in the settings.
    
    Parameters:
        request -- An HttpRequest
    
    Returns:
        The HTML for use as a preview in the web application, along with 
        the web address of the BARD report.
    """
    global bardWorkSpaceLoaded
    if settings.DEBUG:
        print "Generating report. Is BARD loaded? %s" % bardWorkSpaceLoaded

    status = { 'status': 'failure' }
    stamp = request.POST.get('stamp','')

    # set up the temp dir and filename
    tempdir = settings.WEB_TEMP
    basename = '%s_p%d_v%d_%s' % (request.POST['plan_owner'], int(request.POST['plan_id']), int(request.POST['plan_version']), stamp)

    if not bardWorkSpaceLoaded:
        if settings.REPORTS_ENABLED != 'BARD':
            status['reason'] = 'Reports functionality is turned off.'

            if settings.DEBUG:
                print "Quitting request, because BARD is not ready."

            drop_error(tempdir, basename, 'BARD is not enabled.')
            return HttpResponse(json.dumps(status),mimetype='application/json')
        else:
            status['reason'] = 'Reports functionality is not ready. Please try again later.'
            loadbard(True)

            maxwait = 300
            while not bardWorkSpaceLoaded and maxwait > 0:
                if settings.DEBUG:
                    print 'Waiting for BARD to load...'
                maxwait -= 5
                time.sleep(5)

            if maxwait <= 0:
                status['reason'] = 'Waiting for BARD to load timed out.'
                drop_error(tempdir, basename, 'BARD load timed out.')
                return HttpResponse(json.dumps(status), mimetype='application/json') 
    #Get the variables from the request
    if request.method != 'POST':
        status['reason'] = 'Information for report wasn\'t sent via POST'
        if settings.DEBUG:
            print "Quitting request, because the request wasn't POSTed."
        drop_error(tempdir, basename, 'Requested items were not delivered via POST.')
        return HttpResponse(json.dumps(status),mimetype='application/json')

    sorted_district_list = request.POST.get('district_list').split(';')
    nseat_param = request.POST.get('nseats')
    mag_param = request.POST.get('district_mags').split(';')

    if settings.DEBUG:
        print "Got district list, getting other POSTed stuff."

    try:
        # Now we need an R Vector
        robjects.r.assign('block_ids',sorted_district_list)
        robjects.r.assign('num_seats',int(nseat_param))
        robjects.r.assign('magnitude',mag_param)
        robjects.r('bardplan = createAssignedPlan(bardmap, block_ids, nseats=num_seats, magnitude=magnitude)')
    except Exception as ex:
        status['reason'] = 'Could not create BARD plan from map.'
        if settings.DEBUG:
            print traceback.format_exc()
        drop_error(tempdir, basename, 'Could not create BARD plan from map.')
        return HttpResponse(json.dumps(status),mimetype='application/json')

    if settings.DEBUG:
        print "Created assigned plan."

    try: 
        # assign names to the districts
        robjects.r('sorted_name_list = vector()')
        names = request.POST.get('district_names').split(';')
        for district in names:
            robjects.r('sorted_name_list = c(sorted_name_list,"%s")' % district) 
        robjects.r('levels(bardplan) <- sorted_name_list')

        # Get the other report variables from the POST request.  We'll only add
        # them to the report if they're in the request
        popVar = request.POST.get('pop_var', None)
        if settings.DEBUG:
            print 'popVar',popVar
        get_named_vector(popVar,'popVar')

        popVarExtra = request.POST.get('pop_var_extra', None)
        if settings.DEBUG:
            print 'popVarExtra',popVarExtra
        get_named_vector(popVarExtra, 'popVarExtra')
        
        post_list = request.POST.get('ratio_vars').split(';')
        if settings.DEBUG:
            print 'post_list',post_list
        if len(post_list) > 0 and post_list[0] != '':
            robjects.r('ratioVars = vector()')
            # Each of the ratioVars should have been posted as a list of items separated by
            # double pipes
            for i, ratioVar in enumerate(post_list):
                ratioAttributes = ratioVar.split('||')
                get_named_vector(ratioAttributes[0], 'rden%d'%i)
                get_named_vector(ratioAttributes[2], 'rnum%d'%i)
                robjects.r("""
ratioVars = 
    c(ratioVars, 
      list("%s"=list(
        "denominator"=rden%d,
        "threshold"=%s,
        "numerators"=rnum%d)
      )
    )
""" % (ratioAttributes[3], i, ratioAttributes[1], i))
        else:
            robjects.r('ratioVars = NULL')

        splitVars = request.POST.get('split_vars', None)
        if settings.DEBUG:
            print 'splitVars',splitVars
        get_named_vector(splitVars, 'splitVars')
        
        repCompactness = request.POST.get('rep_comp', None)
        if settings.DEBUG:
            print 'repCompactness',repCompactness
        if 'true' == repCompactness:
            robjects.r('repCompactness = TRUE')
        else:
            robjects.r('repCompactness = FALSE')

        repCompactnessExtra = request.POST.get('rep_comp_extra', None)
        if settings.DEBUG:
            print 'repCompactnessExtra',repCompactnessExtra
        if 'true' == repCompactnessExtra:
            robjects.r('repCompactnessExtra = TRUE')
        else:
            robjects.r('repCompactnessExtra = FALSE')

        repSpatial = request.POST.get('rep_spatial', None)
        if settings.DEBUG:
            print 'repSpatial',repSpatial
        if 'true' == repSpatial:
            robjects.r('repSpatial = TRUE')
        else:
            robjects.r('repSpatial = FALSE')

        repSpatialExtra = request.POST.get('rep_spatial_extra', None)
        if settings.DEBUG:
            print 'repSpatialExtra',repSpatialExtra
        if 'true' == repSpatialExtra:
            robjects.r('repSpatialExtra = TRUE')
        else:
            robjects.r('repSpatialExtra = FALSE')
    except Exception as ex:
        if settings.DEBUG:
            print traceback.format_exc()
        status['reason'] = 'Exception: %s' % traceback.format_exc()
        drop_error(tempdir, basename, traceback.format_exc())
        return HttpResponse(json.dumps(status),mimetype='application/json')

    if settings.DEBUG:
        print "Variables loaded, starting BARD."

    try:
        robjects.r.assign('tempdir', tempdir)
        robjects.r('copyR2HTMLfiles(tempdir)')
        # Write to a temp file so that the reports-checker doesn't see it early
        robjects.r.assign('tempfiledir', tempfile.gettempdir())
        robjects.r.assign('filename', basename)
        robjects.r.assign('locale', translation.get_language())
        robjects.r('report = HTMLInitFile(tempfiledir, filename=filename, BackGroundColor="#BBBBEE", Title="Plan Analysis")')
        robjects.r('HTML.title("Plan Analysis", HR=2, file=report)')
        robjects.r("""PMPreport( bardplan, file=report, 
            popVar=popVar, 
            popVarExtra=popVarExtra, 
            ratioVars=ratioVars, 
            splitVars=splitVars, 
            repCompactness=repCompactness, 
            repCompactnessExtra=repCompactnessExtra,
            repSpatial=repSpatial, 
            repSpatialExtra=repSpatialExtra,
            locale=locale)""")
        robjects.r('HTMLEndFile()')

        # Now move the report back to the reports directory dir
        shutil.move('%s/%s.html' % (tempfile.gettempdir(), basename), tempdir)

        if settings.DEBUG:
            print "Removing pending file."

        try:
            os.unlink('%s/%s.pending' % (tempdir, basename))
        except:
            if settings.DEBUG:
                print "No pending file to remove - report finished"

        status['status'] = 'success'
        status['retval'] = '%s.html' % basename
    except Exception as ex:
        if settings.DEBUG:
            print traceback.format_exc()
        status['reason'] = 'Exception: %s' % ex
        drop_error(tempdir, basename, traceback.format_exc())

    return HttpResponse(json.dumps(status),mimetype='application/json')

Example 25

Project: pymo
Source File: test_file.py
View license
    def testIteration(self):
        # Test the complex interaction when mixing file-iteration and the
        # various read* methods.
        dataoffset = 16384
        filler = b"ham\n"
        assert not dataoffset % len(filler), \
            "dataoffset must be multiple of len(filler)"
        nchunks = dataoffset // len(filler)
        testlines = [
            b"spam, spam and eggs\n",
            b"eggs, spam, ham and spam\n",
            b"saussages, spam, spam and eggs\n",
            b"spam, ham, spam and eggs\n",
            b"spam, spam, spam, spam, spam, ham, spam\n",
            b"wonderful spaaaaaam.\n"
        ]
        methods = [("readline", ()), ("read", ()), ("readlines", ()),
                   ("readinto", (array("b", b" "*100),))]

        try:
            # Prepare the testfile
            bag = self.open(TESTFN, "wb")
            bag.write(filler * nchunks)
            bag.writelines(testlines)
            bag.close()
            # Test for appropriate errors mixing read* and iteration
            for methodname, args in methods:
                f = self.open(TESTFN, 'rb')
                if next(f) != filler:
                    self.fail, "Broken testfile"
                meth = getattr(f, methodname)
                meth(*args)  # This simply shouldn't fail
                f.close()

            # Test to see if harmless (by accident) mixing of read* and
            # iteration still works. This depends on the size of the internal
            # iteration buffer (currently 8192,) but we can test it in a
            # flexible manner.  Each line in the bag o' ham is 4 bytes
            # ("h", "a", "m", "\n"), so 4096 lines of that should get us
            # exactly on the buffer boundary for any power-of-2 buffersize
            # between 4 and 16384 (inclusive).
            f = self.open(TESTFN, 'rb')
            for i in range(nchunks):
                next(f)
            testline = testlines.pop(0)
            try:
                line = f.readline()
            except ValueError:
                self.fail("readline() after next() with supposedly empty "
                          "iteration-buffer failed anyway")
            if line != testline:
                self.fail("readline() after next() with empty buffer "
                          "failed. Got %r, expected %r" % (line, testline))
            testline = testlines.pop(0)
            buf = array("b", b"\x00" * len(testline))
            try:
                f.readinto(buf)
            except ValueError:
                self.fail("readinto() after next() with supposedly empty "
                          "iteration-buffer failed anyway")
            line = buf.tostring()
            if line != testline:
                self.fail("readinto() after next() with empty buffer "
                          "failed. Got %r, expected %r" % (line, testline))

            testline = testlines.pop(0)
            try:
                line = f.read(len(testline))
            except ValueError:
                self.fail("read() after next() with supposedly empty "
                          "iteration-buffer failed anyway")
            if line != testline:
                self.fail("read() after next() with empty buffer "
                          "failed. Got %r, expected %r" % (line, testline))
            try:
                lines = f.readlines()
            except ValueError:
                self.fail("readlines() after next() with supposedly empty "
                          "iteration-buffer failed anyway")
            if lines != testlines:
                self.fail("readlines() after next() with empty buffer "
                          "failed. Got %r, expected %r" % (line, testline))
            # Reading after iteration hit EOF shouldn't hurt either
            f = self.open(TESTFN, 'rb')
            try:
                for line in f:
                    pass
                try:
                    f.readline()
                    f.readinto(buf)
                    f.read()
                    f.readlines()
                except ValueError:
                    self.fail("read* failed after next() consumed file")
            finally:
                f.close()
        finally:
            os.unlink(TESTFN)

Example 26

Project: pymo
Source File: test_file2k.py
View license
    def testIteration(self):
        # Test the complex interaction when mixing file-iteration and the
        # various read* methods. Ostensibly, the mixture could just be tested
        # to work when it should work according to the Python language,
        # instead of fail when it should fail according to the current CPython
        # implementation.  People don't always program Python the way they
        # should, though, and the implemenation might change in subtle ways,
        # so we explicitly test for errors, too; the test will just have to
        # be updated when the implementation changes.
        dataoffset = 16384
        filler = "ham\n"
        assert not dataoffset % len(filler), \
            "dataoffset must be multiple of len(filler)"
        nchunks = dataoffset // len(filler)
        testlines = [
            "spam, spam and eggs\n",
            "eggs, spam, ham and spam\n",
            "saussages, spam, spam and eggs\n",
            "spam, ham, spam and eggs\n",
            "spam, spam, spam, spam, spam, ham, spam\n",
            "wonderful spaaaaaam.\n"
        ]
        methods = [("readline", ()), ("read", ()), ("readlines", ()),
                   ("readinto", (array("c", " "*100),))]

        try:
            # Prepare the testfile
            bag = open(TESTFN, "w")
            bag.write(filler * nchunks)
            bag.writelines(testlines)
            bag.close()
            # Test for appropriate errors mixing read* and iteration
            for methodname, args in methods:
                f = open(TESTFN)
                if f.next() != filler:
                    self.fail, "Broken testfile"
                meth = getattr(f, methodname)
                try:
                    meth(*args)
                except ValueError:
                    pass
                else:
                    self.fail("%s%r after next() didn't raise ValueError" %
                                     (methodname, args))
                f.close()

            # Test to see if harmless (by accident) mixing of read* and
            # iteration still works. This depends on the size of the internal
            # iteration buffer (currently 8192,) but we can test it in a
            # flexible manner.  Each line in the bag o' ham is 4 bytes
            # ("h", "a", "m", "\n"), so 4096 lines of that should get us
            # exactly on the buffer boundary for any power-of-2 buffersize
            # between 4 and 16384 (inclusive).
            f = open(TESTFN)
            for i in range(nchunks):
                f.next()
            testline = testlines.pop(0)
            try:
                line = f.readline()
            except ValueError:
                self.fail("readline() after next() with supposedly empty "
                          "iteration-buffer failed anyway")
            if line != testline:
                self.fail("readline() after next() with empty buffer "
                          "failed. Got %r, expected %r" % (line, testline))
            testline = testlines.pop(0)
            buf = array("c", "\x00" * len(testline))
            try:
                f.readinto(buf)
            except ValueError:
                self.fail("readinto() after next() with supposedly empty "
                          "iteration-buffer failed anyway")
            line = buf.tostring()
            if line != testline:
                self.fail("readinto() after next() with empty buffer "
                          "failed. Got %r, expected %r" % (line, testline))

            testline = testlines.pop(0)
            try:
                line = f.read(len(testline))
            except ValueError:
                self.fail("read() after next() with supposedly empty "
                          "iteration-buffer failed anyway")
            if line != testline:
                self.fail("read() after next() with empty buffer "
                          "failed. Got %r, expected %r" % (line, testline))
            try:
                lines = f.readlines()
            except ValueError:
                self.fail("readlines() after next() with supposedly empty "
                          "iteration-buffer failed anyway")
            if lines != testlines:
                self.fail("readlines() after next() with empty buffer "
                          "failed. Got %r, expected %r" % (line, testline))
            # Reading after iteration hit EOF shouldn't hurt either
            f = open(TESTFN)
            try:
                for line in f:
                    pass
                try:
                    f.readline()
                    f.readinto(buf)
                    f.read()
                    f.readlines()
                except ValueError:
                    self.fail("read* failed after next() consumed file")
            finally:
                f.close()
        finally:
            os.unlink(TESTFN)

Example 27

Project: python-bugzilla
Source File: rw_functional.py
View license
    def _test8Attachments(self):
        """
        Get and set attachments for a bug
        """
        bz = self.bzclass(url=self.url)
        getallbugid = "663674"
        setbugid = "461686"
        cmd = "bugzilla attach "
        testfile = "../tests/data/bz-attach-get1.txt"

        # Add attachment as CLI option
        setbug = bz.getbug(setbugid, extra_fields=["attachments"])
        orignumattach = len(setbug.attachments)

        # Add attachment from CLI with mime guessing
        desc1 = "python-bugzilla cli upload %s" % datetime.datetime.today()
        out1 = tests.clicomm(cmd + "%s --description \"%s\" --file %s" %
                             (setbugid, desc1, testfile), bz)

        desc2 = "python-bugzilla cli upload %s" % datetime.datetime.today()
        out2 = tests.clicomm(cmd + "%s --file test --description \"%s\"" %
                             (setbugid, desc2), bz, stdin=open(testfile))

        # Expected output format:
        #   Created attachment <attachid> on bug <bugid>

        setbug.refresh()
        self.assertEquals(len(setbug.attachments), orignumattach + 2)
        self.assertEquals(setbug.attachments[-2]["description"], desc1)
        self.assertEquals(setbug.attachments[-2]["id"],
                          int(out1.splitlines()[2].split()[2]))
        self.assertEquals(setbug.attachments[-1]["description"], desc2)
        self.assertEquals(setbug.attachments[-1]["id"],
                          int(out2.splitlines()[2].split()[2]))
        attachid = setbug.attachments[-2]["id"]

        # Set attachment flags
        self.assertEquals(setbug.attachments[-1]["flags"], [])
        bz.updateattachmentflags(setbug.id, setbug.attachments[-1]["id"],
                                 "review", status="+")
        setbug.refresh()

        self.assertEquals(len(setbug.attachments[-1]["flags"]), 1)
        self.assertEquals(setbug.attachments[-1]["flags"][0]["name"], "review")
        self.assertEquals(setbug.attachments[-1]["flags"][0]["status"], "+")

        bz.updateattachmentflags(setbug.id, setbug.attachments[-1]["id"],
                                 "review", status="X")
        setbug.refresh()
        self.assertEquals(setbug.attachments[-1]["flags"], [])


        # Get attachment, verify content
        out = tests.clicomm(cmd + "--get %s" % attachid, bz).splitlines()

        # Expect format:
        #   Wrote <filename>
        fname = out[2].split()[1].strip()

        self.assertEquals(len(out), 3)
        self.assertEquals(fname, "bz-attach-get1.txt")
        self.assertEquals(open(fname).read(),
                          open(testfile).read())
        os.unlink(fname)

        # Get all attachments
        getbug = bz.getbug(getallbugid)
        getbug.autorefresh = True
        numattach = len(getbug.attachments)
        out = tests.clicomm(cmd + "--getall %s" % getallbugid, bz).splitlines()

        self.assertEquals(len(out), numattach + 2)
        fnames = [l.split(" ", 1)[1].strip() for l in out[2:]]
        self.assertEquals(len(fnames), numattach)
        for f in fnames:
            if not os.path.exists(f):
                raise AssertionError("filename '%s' not found" % f)
            os.unlink(f)

Example 28

Project: piku
Source File: piku.py
View license
def spawn_app(app, deltas={}):
    """Create all workers for an app"""
    
    app_path = join(APP_ROOT, app)
    procfile = join(app_path, 'Procfile')
    workers = parse_procfile(procfile)
    ordinals = defaultdict(lambda:1)
    worker_count = {k:1 for k in workers.keys()}

    # the Python virtualenv
    virtualenv_path = join(ENV_ROOT, app)
    # Settings shipped with the app
    env_file = join(APP_ROOT, app, 'ENV')
    # Custom overrides
    settings = join(ENV_ROOT, app, 'ENV')
    # Live settings
    live = join(ENV_ROOT, app, 'LIVE_ENV')
    # Scaling
    scaling = join(ENV_ROOT, app, 'SCALING')

    # Bootstrap environment
    env = {
        'APP': app,
        'LOG_ROOT': LOG_ROOT,
        'HOME': environ['HOME'],
        'USER': environ['USER'],
        'PATH': environ['PATH'],
        'PWD': dirname(env_file),
        'VIRTUAL_ENV': virtualenv_path,
    }
    
    # Load environment variables shipped with repo (if any)
    if exists(env_file):
        env.update(parse_settings(env_file, env))
    
    # Override with custom settings (if any)
    if exists(settings):
        env.update(parse_settings(settings, env))

    if 'web' in workers or 'wsgi' in workers:
        # Pick a port if none defined and we're not running under nginx
        if 'PORT' not in env and 'NGINX_SERVER_NAME' not in env:
            env['PORT'] = str(get_free_port())

        # Safe default for bind address            
        if 'BIND_ADDRESS' not in env:
            env['BIND_ADDRESS'] = '127.0.0.1'
                
        # Set up nginx if we have NGINX_SERVER_NAME set
        if 'NGINX_SERVER_NAME' in env:
            nginx = command_output("nginx -V")
            nginx_ssl = "443 ssl"
            if "--with-http_v2_module" in nginx:
                nginx_ssl += " http2"
            elif "--with-http_spdy_module" in nginx and "nginx/1.6.2" not in nginx: # avoid Raspbian bug
                nginx_ssl += " spdy"
        
            env.update({ 
                'NGINX_SSL': nginx_ssl,
                'NGINX_ROOT': NGINX_ROOT,
            })
            
            if 'wsgi' in workers:
                sock = join(NGINX_ROOT, "%s.sock" % app)
                env['NGINX_SOCKET'] = env['BIND_ADDRESS'] = "unix://" + sock
                if 'PORT' in env:
                    del env['PORT']
            else:
                env['NGINX_SOCKET'] = "%(BIND_ADDRESS)s:%(PORT)s" % env 
        
            domain = env['NGINX_SERVER_NAME'].split()[0]       
            key, crt = [join(NGINX_ROOT,'%s.%s' % (app,x)) for x in ['key','crt']]
            if not exists(key):
                call('openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj "/C=US/ST=NY/L=New York/O=Piku/OU=Self-Signed/CN=%(domain)s" -keyout %(key)s -out %(crt)s' % locals(), shell=True)
            
            # restrict access to server from CloudFlare IP addresses
            acl = []
            if env.get('NGINX_CLOUDFLARE_ACL', 'false').lower() == 'true':
                try:
                    cf = loads(urlopen('https://api.cloudflare.com/client/v4/ips').read())
                except Exception, e:
                    cf = defaultdict()
                    echo("-----> Could not retrieve CloudFlare IP ranges: %s" % e.text, fg="red")
                if cf['success'] == True:
                    for i in cf['result']['ipv4_cidrs']:
                        acl.append("allow %s;" % i)
                    for i in cf['result']['ipv6_cidrs']:
                        acl.append("allow %s;" % i)
                    # allow access from controlling machine
                    if 'SSH_CLIENT' in environ:
                        remote_ip = environ['SSH_CLIENT'].split()[0]
                        echo("-----> Adding your IP (%s) to nginx ACL" % remote_ip)
                        acl.append("allow %s;" % remote_ip)
                    acl.extend(["allow 127.0.0.1;","deny all;"])
            env['NGINX_ACL'] = " ".join(acl)

            env['NGINX_STATIC_MAPPINGS'] = ''
            
            # Get a mapping of /url:path1,/url2:path2
            static_paths = env.get('NGINX_STATIC_PATHS','')
            if len(static_paths):
                try:
                    items = static_paths.split(',')
                    for item in items:
                        static_url, static_path = item.split(':')
                        if static_path[0] != '/':
                            static_path = join(app_path, static_path)
                        env['NGINX_STATIC_MAPPINGS'] = env['NGINX_STATIC_MAPPINGS'] + NGINX_STATIC_MAPPING % {'url': static_url, 'path': static_path}
                except Exception as e:
                    print "Error %s in static path spec: should be /url1:path1[,/url2:path2], ignoring." % e
                    env['NGINX_STATIC_MAPPINGS'] = ''

            buffer = expandvars(NGINX_TEMPLATE, env)
            echo("-----> Setting up nginx for '%s:%s'" % (app, env['NGINX_SERVER_NAME']))
            with open(join(NGINX_ROOT,"%s.conf" % app), "w") as h:
                h.write(buffer)            

    # Configured worker count
    if exists(scaling):
        worker_count.update({k: int(v) for k,v in parse_procfile(scaling).iteritems()})
    
    to_create = {}
    to_destroy = {}    
    for k, v in worker_count.iteritems():
        to_create[k] = range(1,worker_count[k] + 1)
        if k in deltas and deltas[k]:
            to_create[k] = range(1, worker_count[k] + deltas[k] + 1)
            if deltas[k] < 0:
                to_destroy[k] = range(worker_count[k], worker_count[k] + deltas[k], -1)
            worker_count[k] = worker_count[k]+deltas[k]

    # Save current settings
    write_config(live, env)
    write_config(scaling, worker_count, ':')
    
    # Create new workers
    for k, v in to_create.iteritems():
        for w in v:
            enabled = join(UWSGI_ENABLED, '%s_%s.%d.ini' % (app, k, w))
            if not exists(enabled):
                echo("-----> Spawning '%s:%s.%d'" % (app, k, w), fg='green')
                spawn_worker(app, k, workers[k], env, w)
        
    # Remove unnecessary workers (leave logfiles)
    for k, v in to_destroy.iteritems():
        for w in v:
            enabled = join(UWSGI_ENABLED, '%s_%s.%d.ini' % (app, k, w))
            if exists(enabled):
                echo("-----> Terminating '%s:%s.%d'" % (app, k, w), fg='yellow')
                unlink(enabled)

Example 29

Project: rdflib
Source File: test_graph_formula.py
View license
@nottest  # do not run on its own - only as part of generator
def testFormulaStore(store="default", configString=None):
    try:
        g = ConjunctiveGraph(store=store)
    except ImportError:
        raise SkipTest("Dependencies for store '%s' not available!" % store)

    if configString:
        g.destroy(configString)
        g.open(configString)
    else:
        if store == 'SQLite':
            _, path = mkstemp(prefix='test', dir='/tmp', suffix='.sqlite')
            g.open(path, create=True)
        else:
            g.open(mkdtemp(), create=True)

    g.parse(data=testN3, format="n3")
    try:
        for s, p, o in g.triples((None, implies, None)):
            formulaA = s
            formulaB = o

        assert type(formulaA) == QuotedGraph and type(formulaB) == QuotedGraph
        # a = URIRef('http://test/a')
        b = URIRef('http://test/b')
        c = URIRef('http://test/c')
        d = URIRef('http://test/d')
        v = Variable('y')

        universe = ConjunctiveGraph(g.store)

        #test formula as terms
        assert len(list(universe.triples((formulaA, implies, formulaB)))) == 1

        #test variable as term and variable roundtrip
        assert len(list(formulaB.triples((None, None, v)))) == 1
        for s, p, o in formulaB.triples((None, d, None)):
            if o != c:
                assert isinstance(o, Variable)
                assert o == v
        s = list(universe.subjects(RDF.type, RDFS.Class))[0]
        assert isinstance(s, BNode)
        assert len(list(universe.triples((None, implies, None)))) == 1
        assert len(list(universe.triples((None, RDF.type, None)))) == 1
        assert len(list(formulaA.triples((None, RDF.type, None)))) == 1
        assert len(list(formulaA.triples((None, None, None)))) == 2
        assert len(list(formulaB.triples((None, None, None)))) == 2
        assert len(list(universe.triples((None, None, None)))) == 3
        assert len(list(formulaB.triples(
            (None, URIRef('http://test/d'), None)))) == 2
        assert len(list(universe.triples(
            (None, URIRef('http://test/d'), None)))) == 1

        # #context tests
        # #test contexts with triple argument
        # assert len(list(universe.contexts((a, d, c)))) == 1, \
        #                     [ct for ct in universe.contexts((a, d, c))]

        # FAIL: test.test_graph_formula.testFormulaStores('SQLite',)
        # --------------------------------------------------------------------
        # Traceback (most recent call last):
        #   File ".../site-packages/nose/case.py", line 197, in runTest
        #     self.test(*self.arg)
        #   File ".../test_graph_formula.py", line 80, in testFormulaStore
        #     [ct for ct in universe.contexts((a, d, c))]
        # AssertionError: [
        #     <Graph identifier=N52fd4417ef7641089b2e4045ef19ad87
        #        (<class 'rdflib.graph.Graph'>)>,
        #     <Graph identifier=_:Formula16 (<class 'rdflib.graph.Graph'>)>
        #     ]

        #Remove test cases
        universe.remove((None, implies, None))
        assert len(list(universe.triples((None, implies, None)))) == 0
        assert len(list(formulaA.triples((None, None, None)))) == 2
        assert len(list(formulaB.triples((None, None, None)))) == 2

        formulaA.remove((None, b, None))
        assert len(list(formulaA.triples((None, None, None)))) == 1
        formulaA.remove((None, RDF.type, None))
        assert len(list(formulaA.triples((None, None, None)))) == 0

        universe.remove((None, RDF.type, RDFS.Class))

        #remove_context tests
        universe.remove_context(formulaB)
        assert len(list(universe.triples((None, RDF.type, None)))) == 0
        assert len(universe) == 1
        assert len(formulaB) == 0

        universe.remove((None, None, None))
        assert len(universe) == 0

        g.close()
        if store == 'SQLite':
            os.unlink(path)
        else:
            g.store.destroy(configString)
    except:
        g.close()
        if store == 'SQLite':
            os.unlink(path)
        else:
            g.store.destroy(configString)
        raise

Example 30

Project: coccigrep
Source File: coccigrep.py
View license
    def run(self, files):
        """
        Run the search against the files and directories given in argument

        This function is doing the main job. It will run spatch with
        the correct parameters by using subprocess or it will use
        multiprocessing if a concurrency level greater than 1 has been
        asked.

        :param args: list of filenames and directory names
        :type args: list of str
        :raise: :class:`CocciRunException` or :class:`CocciConfigException`
        """

        if len(files) == 0:
            raise CocciRunException("Can't use coccigrep without files "
                "to search")

        # get version of spatch
        if self.spatch_newer_than("1.0.0-rc6"):
            cocci_op = "=~"
        else:
            cocci_op = "~="
        # create tmp cocci file:
        tmp_cocci_file = NamedTemporaryFile(suffix=".cocci", delete=not self.verbose)
        tmp_cocci_file_name = tmp_cocci_file.name
        # open file with name matching operation
        cocci_file = open(self.operations[self.operation], 'r')
        # get the string and build template
        cocci_tmpl = cocci_file.read()
        cocci_smpl_tmpl = Template(cocci_tmpl)
        cocci_file.close()
        # do substitution
        cocci_smpl = cocci_smpl_tmpl.substitute(type=self.type,
            attribute=self.attribute, cocci_regexp_equal=cocci_op)
        cocci_grep = cocci_smpl + CocciGrep.cocci_python

        if sys.version < '3':
            tmp_cocci_file.write(cocci_grep)
        else:
            tmp_cocci_file.write(bytes(cocci_grep, 'UTF-8'))
        tmp_cocci_file.flush()

        # launch spatch
        output = "".encode('utf8')
        # Launch parallel spatch
        if self.ncpus > 1 and len(files) > 1:
            fseq = []
            splitsize = 1.0 / self.ncpus * len(files)
            for i in range(self.ncpus):
                start = int(round(i * splitsize))
                end = int(round((i + 1) * splitsize))
                rfiles = files[start:end]
                if len(rfiles) >= 1:
                    fseq.append(rfiles)
            for sub_files in fseq:
                cmd = [self.spatch]
                cmd += self.options
                cmd += ["-sp_file", tmp_cocci_file.name]
                cmd += sub_files
                sprocess = CocciProcess(cmd, self.verbose)
                sprocess.start()
                self.process.append(sprocess)
            for process in self.process:
                ret = process.recv().decode('utf8')
                process.join()
                if not ret.startswith('cexceptions\n'):
                    # CocciProcess return a serialized exception
                    # in case of exception
                    output += ret.encode('utf8')
                    continue
                import pickle
                err = pickle.loads(ret)
                _raise_run_err(err, cmd)
            tmp_cocci_file.close()
        # Fallback to one spatch
        else:
            cmd = [self.spatch]
            cmd += self.options
            cmd += ["-sp_file", tmp_cocci_file.name]
            cmd += files
            try:
                if self.verbose:
                    stderr.write("Running: %s.\n" % " ".join(cmd))
                    output = Popen(cmd, stdout=PIPE).communicate()[0]
                else:
                    output = Popen(cmd, stdout=PIPE, stderr=PIPE).communicate()[0]
            except OSError as err:
                unlink(tmp_cocci_file_name)
                _raise_run_err(err, cmd)

            tmp_cocci_file.close()

        prevfile = None
        prevline = None
        self.matches = []
        for ematch in output.decode('utf8').split("\n"):
            try:
                (efile, eline, ecol, elinend, ecolend) = ematch.split(":")
                nmatch = CocciMatch(efile, eline, ecol, elinend, ecolend)
                # if there is equality then we will already display the line
                if (efile == prevfile) and (eline == prevline):
                    continue
                prevfile = efile
                prevline = eline
                self.matches.append(nmatch)
            except ValueError:
                pass

Example 31

Project: scons
Source File: dblite.py
View license
def _exercise():
  db = open("tmp", "n")
  assert len(db) == 0
  db["foo"] = "bar"
  assert db["foo"] == "bar"
  db[unicode("ufoo")] = unicode("ubar")
  assert db[unicode("ufoo")] == unicode("ubar")
  db.sync()
  db = open("tmp", "c")
  assert len(db) == 2, len(db)
  assert db["foo"] == "bar"
  db["bar"] = "foo"
  assert db["bar"] == "foo"
  db[unicode("ubar")] = unicode("ufoo")
  assert db[unicode("ubar")] == unicode("ufoo")
  db.sync()
  db = open("tmp", "r")
  assert len(db) == 4, len(db)
  assert db["foo"] == "bar"
  assert db["bar"] == "foo"
  assert db[unicode("ufoo")] == unicode("ubar")
  assert db[unicode("ubar")] == unicode("ufoo")
  try:
    db.sync()
  except IOError, e:
    assert str(e) == "Read-only database: tmp.dblite"
  else:
    raise RuntimeError("IOError expected.")
  db = open("tmp", "w")
  assert len(db) == 4
  db["ping"] = "pong"
  db.sync()
  try:
    db[(1,2)] = "tuple"
  except TypeError, e:
    assert str(e) == "key `(1, 2)' must be a string but is <type 'tuple'>", str(e)
  else:
    raise RuntimeError("TypeError exception expected")
  try:
    db["list"] = [1,2]
  except TypeError, e:
    assert str(e) == "value `[1, 2]' must be a string but is <type 'list'>", str(e)
  else:
    raise RuntimeError("TypeError exception expected")
  db = open("tmp", "r")
  assert len(db) == 5
  db = open("tmp", "n")
  assert len(db) == 0
  dblite._open("tmp.dblite", "w")
  db = open("tmp", "r")
  dblite._open("tmp.dblite", "w").write("x")
  try:
    db = open("tmp", "r")
  except pickle.UnpicklingError:
    pass
  else:
    raise RuntimeError("pickle exception expected.")
  global ignore_corrupt_dbfiles
  ignore_corrupt_dbfiles = 2
  db = open("tmp", "r")
  assert len(db) == 0
  os.unlink("tmp.dblite")
  try:
    db = open("tmp", "w")
  except IOError, e:
    assert str(e) == "[Errno 2] No such file or directory: 'tmp.dblite'", str(e)
  else:
    raise RuntimeError("IOError expected.")
  print "OK"

Example 32

Project: m2wsgi
Source File: base.py
View license
    def process_request(self,req):
        """Process the given Request object.

        This method is the guts of the Mongrel2 => WSGI gateway.  It translates
        the mongrel2 request into a WSGI environ, invokes the application and
        sends the resulting response back to Mongrel2.
        """
        #  Mongrel2 uses JSON requests internally.
        #  We don't want them in our WSGI.
        if req.headers.get("METHOD","") == "JSON":
            return
        #  OK, it's a legitimate full HTTP request.
        #  Route it through the WSGI app.
        environ = {}
        responder = self.ResponderClass(req)
        try:
            #  If there's an async upload in progress, we have two options.
            #  If they sent a Content-Length header then we can do a streaming
            #  read from the file as it is being uploaded.  If there's no
            #  Content-Length then we have to wait for it all to upload (as
            #  there's no guarantee that the same handler will get both the
            #  start and end events for any upload).
            if "x-mongrel2-upload-start" in req.headers:
                if req.headers.get("content-length",""):
                    #  We'll streaming read it on the -start event,
                    #  so ignore the -done event.
                    if "x-mongrel2-upload-done" in req.headers:
                        return
                else:
                    #  We have to wait for the -done event,
                    #  so ignore the -start event.
                    if "x-mongrel2-upload-done" not in req.headers:
                        return
            #  Grab the full WSGI environ.
            #  This might error out, e.g. if someone tries any funny business
            #  with the mongrel2 upload headers.
            environ = self.get_wsgi_environ(req,environ)
            #  Call the WSGI app.
            #  Write all non-empty chunks, then clean up.
            chunks = self.application(environ,responder.start_response)
            try:
                for chunk in chunks:
                    if chunk:
                        responder.write(chunk)
                responder.finish()
            finally:
                if hasattr(chunks,"close"):
                    chunks.close()
        except Exception:
            print >>sys.stderr, "------- request handling error -------"
            traceback.print_exc()
            sys.stderr.write(str(environ) + "\n\n")
            print >>sys.stderr, "------------------------------ -------"
            #  Send an error response if we can.
            #  Always close the connection on error.
            if not responder.has_started:
                responder.start_response("500 Server Error",[],sys.exc_info())
                responder.write("server error")
                responder.finish()
            req.disconnect()
        finally:
            #  Make sure that the upload file is cleaned up.
            #  Mongrel doesn't reap these files itself, because the handler
            #  might e.g. move them somewhere.  We just read from them.
            try:
                environ["wsgi.input"].close()
            except (KeyError, AttributeError):
                pass
            upload_file = req.headers.get("x-mongrel2-upload-start",None)
            if upload_file:
                upload_file2 = req.headers.get("x-mongrel2-upload-done",None)
                if upload_file == upload_file2:
                    try:
                        os.unlink(upload_file)
                    except EnvironmentError:
                        pass

Example 33

Project: livecd-tools
Source File: creator.py
View license
    def install(self, repo_urls = {}):
        """Install packages into the install root.

        This function installs the packages listed in the supplied kickstart
        into the install root. By default, the packages are installed from the
        repository URLs specified in the kickstart.

        repo_urls -- a dict which maps a repository name to a repository URL;
                     if supplied, this causes any repository URLs specified in
                     the kickstart to be overridden.

        """
        yum_conf = self._mktemp(prefix = "yum.conf-")

        ayum = LiveCDYum(releasever=self.releasever, useplugins=self.useplugins)
        ayum.setup(yum_conf, self._instroot, cacheonly=self.cacheonly)

        for repo in kickstart.get_repos(self.ks, repo_urls):
            (name, baseurl, mirrorlist, proxy, inc, exc, cost, sslverify) = repo

            yr = ayum.addRepository(name, baseurl, mirrorlist)
            if inc:
                yr.includepkgs = inc
            if exc:
                yr.exclude = exc
            if proxy:
                yr.proxy = proxy
            if cost is not None:
                yr.cost = cost
            yr.sslverify = sslverify
        ayum.setup(yum_conf, self._instroot)

        if kickstart.exclude_docs(self.ks):
            rpm.addMacro("_excludedocs", "1")
        if not kickstart.selinux_enabled(self.ks):
            rpm.addMacro("__file_context_path", "%{nil}")
        if kickstart.inst_langs(self.ks) != None:
            rpm.addMacro("_install_langs", kickstart.inst_langs(self.ks))

        try:
            self.__select_packages(ayum)
            self.__select_groups(ayum)
            self.__deselect_packages(ayum)

            ayum.runInstall()
        except yum.Errors.RepoError, e:
            raise CreatorError("Unable to download from repo : %s" % (e,))
        except yum.Errors.YumBaseError, e:
            raise CreatorError("Unable to install: %s" % (e,))
        finally:
            ayum.closeRpmDB()
            ayum.close()
            os.unlink(yum_conf)

        # do some clean up to avoid lvm info leakage.  this sucks.
        for subdir in ("cache", "backup", "archive"):
            lvmdir = self._instroot + "/etc/lvm/" + subdir
            try:
                for f in os.listdir(lvmdir):
                    os.unlink(lvmdir + "/" + f)
            except:
                pass

Example 34

Project: imagrium
Source File: test_file.py
View license
    def testIteration(self):
        # Test the complex interaction when mixing file-iteration and the
        # various read* methods. Ostensibly, the mixture could just be tested
        # to work when it should work according to the Python language,
        # instead of fail when it should fail according to the current CPython
        # implementation.  People don't always program Python the way they
        # should, though, and the implemenation might change in subtle ways,
        # so we explicitly test for errors, too; the test will just have to
        # be updated when the implementation changes.
        dataoffset = 16384
        filler = "ham\n"
        assert not dataoffset % len(filler), \
            "dataoffset must be multiple of len(filler)"
        nchunks = dataoffset // len(filler)
        testlines = [
            "spam, spam and eggs\n",
            "eggs, spam, ham and spam\n",
            "saussages, spam, spam and eggs\n",
            "spam, ham, spam and eggs\n",
            "spam, spam, spam, spam, spam, ham, spam\n",
            "wonderful spaaaaaam.\n"
        ]
        methods = [("readline", ()), ("read", ()), ("readlines", ()),
                   ("readinto", (array("c", " "*100),))]

        try:
            # Prepare the testfile
            bag = open(TESTFN, "w")
            bag.write(filler * nchunks)
            bag.writelines(testlines)
            bag.close()
            # Test for appropriate errors mixing read* and iteration
            for methodname, args in methods:
                f = open(TESTFN)
                if f.next() != filler:
                    self.fail, "Broken testfile"
                meth = getattr(f, methodname)
                try:
                    meth(*args)
                except ValueError:
                    pass
                else:
                    self.fail("%s%r after next() didn't raise ValueError" %
                                     (methodname, args))
                f.close()

            # Test to see if harmless (by accident) mixing of read* and
            # iteration still works. This depends on the size of the internal
            # iteration buffer (currently 8192,) but we can test it in a
            # flexible manner.  Each line in the bag o' ham is 4 bytes
            # ("h", "a", "m", "\n"), so 4096 lines of that should get us
            # exactly on the buffer boundary for any power-of-2 buffersize
            # between 4 and 16384 (inclusive).
            f = open(TESTFN)
            for i in range(nchunks):
                f.next()
            testline = testlines.pop(0)
            try:
                line = f.readline()
            except ValueError:
                self.fail("readline() after next() with supposedly empty "
                          "iteration-buffer failed anyway")
            if line != testline:
                self.fail("readline() after next() with empty buffer "
                          "failed. Got %r, expected %r" % (line, testline))
            testline = testlines.pop(0)
            buf = array("c", "\x00" * len(testline))
            try:
                f.readinto(buf)
            except ValueError:
                self.fail("readinto() after next() with supposedly empty "
                          "iteration-buffer failed anyway")
            line = buf.tostring()
            if line != testline:
                self.fail("readinto() after next() with empty buffer "
                          "failed. Got %r, expected %r" % (line, testline))

            testline = testlines.pop(0)
            try:
                line = f.read(len(testline))
            except ValueError:
                self.fail("read() after next() with supposedly empty "
                          "iteration-buffer failed anyway")
            if line != testline:
                self.fail("read() after next() with empty buffer "
                          "failed. Got %r, expected %r" % (line, testline))
            try:
                lines = f.readlines()
            except ValueError:
                self.fail("readlines() after next() with supposedly empty "
                          "iteration-buffer failed anyway")
            if lines != testlines:
                self.fail("readlines() after next() with empty buffer "
                          "failed. Got %r, expected %r" % (line, testline))
            # Reading after iteration hit EOF shouldn't hurt either
            f = open(TESTFN)
            try:
                for line in f:
                    pass
                try:
                    f.readline()
                    f.readinto(buf)
                    f.read()
                    f.readlines()
                except ValueError:
                    self.fail("read* failed after next() consumed file")
            finally:
                f.close()
        finally:
            os.unlink(TESTFN)

Example 35

Project: imagrium
Source File: test_file2k.py
View license
    @unittest.skipIf(test_support.is_jython, "FIXME: Not working on Jython")
    def testIteration(self):
        # Test the complex interaction when mixing file-iteration and the
        # various read* methods. Ostensibly, the mixture could just be tested
        # to work when it should work according to the Python language,
        # instead of fail when it should fail according to the current CPython
        # implementation.  People don't always program Python the way they
        # should, though, and the implemenation might change in subtle ways,
        # so we explicitly test for errors, too; the test will just have to
        # be updated when the implementation changes.
        dataoffset = 16384
        filler = "ham\n"
        assert not dataoffset % len(filler), \
            "dataoffset must be multiple of len(filler)"
        nchunks = dataoffset // len(filler)
        testlines = [
            "spam, spam and eggs\n",
            "eggs, spam, ham and spam\n",
            "saussages, spam, spam and eggs\n",
            "spam, ham, spam and eggs\n",
            "spam, spam, spam, spam, spam, ham, spam\n",
            "wonderful spaaaaaam.\n"
        ]
        methods = [("readline", ()), ("read", ()), ("readlines", ()),
                   ("readinto", (array("c", " "*100),))]

        try:
            # Prepare the testfile
            bag = open(TESTFN, "w")
            bag.write(filler * nchunks)
            bag.writelines(testlines)
            bag.close()
            # Test for appropriate errors mixing read* and iteration
            for methodname, args in methods:
                f = open(TESTFN)
                if f.next() != filler:
                    self.fail, "Broken testfile"
                meth = getattr(f, methodname)
                try:
                    meth(*args)
                except ValueError:
                    pass
                else:
                    self.fail("%s%r after next() didn't raise ValueError" %
                                     (methodname, args))
                f.close()

            # Test to see if harmless (by accident) mixing of read* and
            # iteration still works. This depends on the size of the internal
            # iteration buffer (currently 8192,) but we can test it in a
            # flexible manner.  Each line in the bag o' ham is 4 bytes
            # ("h", "a", "m", "\n"), so 4096 lines of that should get us
            # exactly on the buffer boundary for any power-of-2 buffersize
            # between 4 and 16384 (inclusive).
            f = open(TESTFN)
            for i in range(nchunks):
                f.next()
            testline = testlines.pop(0)
            try:
                line = f.readline()
            except ValueError:
                self.fail("readline() after next() with supposedly empty "
                          "iteration-buffer failed anyway")
            if line != testline:
                self.fail("readline() after next() with empty buffer "
                          "failed. Got %r, expected %r" % (line, testline))
            testline = testlines.pop(0)
            buf = array("c", "\x00" * len(testline))
            try:
                f.readinto(buf)
            except ValueError:
                self.fail("readinto() after next() with supposedly empty "
                          "iteration-buffer failed anyway")
            line = buf.tostring()
            if line != testline:
                self.fail("readinto() after next() with empty buffer "
                          "failed. Got %r, expected %r" % (line, testline))

            testline = testlines.pop(0)
            try:
                line = f.read(len(testline))
            except ValueError:
                self.fail("read() after next() with supposedly empty "
                          "iteration-buffer failed anyway")
            if line != testline:
                self.fail("read() after next() with empty buffer "
                          "failed. Got %r, expected %r" % (line, testline))
            try:
                lines = f.readlines()
            except ValueError:
                self.fail("readlines() after next() with supposedly empty "
                          "iteration-buffer failed anyway")
            if lines != testlines:
                self.fail("readlines() after next() with empty buffer "
                          "failed. Got %r, expected %r" % (line, testline))
            # Reading after iteration hit EOF shouldn't hurt either
            f = open(TESTFN)
            try:
                for line in f:
                    pass
                try:
                    f.readline()
                    f.readinto(buf)
                    f.read()
                    f.readlines()
                except ValueError:
                    self.fail("read* failed after next() consumed file")
            finally:
                f.close()
        finally:
            os.unlink(TESTFN)

Example 36

Project: batch-shipyard
Source File: crypto.py
View license
def generate_pem_pfx_certificates(config):
    # type: (dict) -> str
    """Generate a pem and a derived pfx file
    :param dict config: configuration dict
    :rtype: str
    :return: sha1 thumbprint of pfx
    """
    # gather input
    try:
        pemfile = config['batch_shipyard']['encryption']['public_key_pem']
    except KeyError:
        pemfile = None
    try:
        pfxfile = config['batch_shipyard']['encryption']['pfx']['filename']
    except KeyError:
        pfxfile = None
    try:
        passphrase = config['batch_shipyard']['encryption']['pfx'][
            'passphrase']
    except KeyError:
        passphrase = None
    if pemfile is None:
        pemfile = util.get_input('Enter public key PEM filename to create: ')
    if pfxfile is None:
        pfxfile = util.get_input('Enter PFX filename to create: ')
    if passphrase is None:
        while passphrase is None or len(passphrase) == 0:
            passphrase = getpass.getpass('Enter password for PFX: ')
            if len(passphrase) == 0:
                print('passphrase cannot be empty')
    privatekey = pemfile + '.key'
    # generate pem file with private key and no password
    f = tempfile.NamedTemporaryFile(mode='wb', delete=False)
    f.close()
    try:
        subprocess.check_call(
            ['openssl', 'req', '-new', '-nodes', '-x509', '-newkey',
             'rsa:2048', '-keyout', privatekey, '-out', f.name, '-days', '730',
             '-subj', '/C=US/ST=None/L=None/O=None/CN=BatchShipyard']
        )
        # extract public key from private key
        subprocess.check_call(
            ['openssl', 'rsa', '-in', privatekey, '-pubout', '-outform',
             'PEM', '-out', pemfile]
        )
        logger.debug('created public key PEM file: {}'.format(pemfile))
        # convert pem to pfx for Azure Batch service
        subprocess.check_call(
            ['openssl', 'pkcs12', '-export', '-out', pfxfile, '-inkey',
             privatekey, '-in', f.name, '-certfile', f.name,
             '-passin', 'pass:', '-passout', 'pass:' + passphrase]
        )
        logger.debug('created PFX file: {}'.format(pfxfile))
    finally:
        # remove rsa private key file
        try:
            os.unlink(privatekey)
        except OSError:
            pass
        # remove temp cert pem
        os.unlink(f.name)
    # get sha1 thumbprint of pfx
    return get_sha1_thumbprint_pfx(pfxfile, passphrase)

Example 37

Project: babble
Source File: test_file.py
View license
    def testIteration(self):
        # Test the complex interaction when mixing file-iteration and the
        # various read* methods. Ostensibly, the mixture could just be tested
        # to work when it should work according to the Python language,
        # instead of fail when it should fail according to the current CPython
        # implementation.  People don't always program Python the way they
        # should, though, and the implemenation might change in subtle ways,
        # so we explicitly test for errors, too; the test will just have to
        # be updated when the implementation changes.
        dataoffset = 16384
        filler = "ham\n"
        assert not dataoffset % len(filler), \
            "dataoffset must be multiple of len(filler)"
        nchunks = dataoffset // len(filler)
        testlines = [
            "spam, spam and eggs\n",
            "eggs, spam, ham and spam\n",
            "saussages, spam, spam and eggs\n",
            "spam, ham, spam and eggs\n",
            "spam, spam, spam, spam, spam, ham, spam\n",
            "wonderful spaaaaaam.\n"
        ]
        methods = [("readline", ()), ("read", ()), ("readlines", ()),
                   ("readinto", (array("c", " "*100),))]

        try:
            # Prepare the testfile
            bag = open(TESTFN, "w")
            bag.write(filler * nchunks)
            bag.writelines(testlines)
            bag.close()
            # Test for appropriate errors mixing read* and iteration
            for methodname, args in methods:
                f = open(TESTFN)
                if f.next() != filler:
                    self.fail, "Broken testfile"
                meth = getattr(f, methodname)
                try:
                    meth(*args)
                except ValueError:
                    pass
                else:
                    self.fail("%s%r after next() didn't raise ValueError" %
                                     (methodname, args))
                f.close()

            # Test to see if harmless (by accident) mixing of read* and
            # iteration still works. This depends on the size of the internal
            # iteration buffer (currently 8192,) but we can test it in a
            # flexible manner.  Each line in the bag o' ham is 4 bytes
            # ("h", "a", "m", "\n"), so 4096 lines of that should get us
            # exactly on the buffer boundary for any power-of-2 buffersize
            # between 4 and 16384 (inclusive).
            f = open(TESTFN)
            for i in range(nchunks):
                f.next()
            testline = testlines.pop(0)
            try:
                line = f.readline()
            except ValueError:
                self.fail("readline() after next() with supposedly empty "
                          "iteration-buffer failed anyway")
            if line != testline:
                self.fail("readline() after next() with empty buffer "
                          "failed. Got %r, expected %r" % (line, testline))
            testline = testlines.pop(0)
            buf = array("c", "\x00" * len(testline))
            try:
                f.readinto(buf)
            except ValueError:
                self.fail("readinto() after next() with supposedly empty "
                          "iteration-buffer failed anyway")
            line = buf.tostring()
            if line != testline:
                self.fail("readinto() after next() with empty buffer "
                          "failed. Got %r, expected %r" % (line, testline))

            testline = testlines.pop(0)
            try:
                line = f.read(len(testline))
            except ValueError:
                self.fail("read() after next() with supposedly empty "
                          "iteration-buffer failed anyway")
            if line != testline:
                self.fail("read() after next() with empty buffer "
                          "failed. Got %r, expected %r" % (line, testline))
            try:
                lines = f.readlines()
            except ValueError:
                self.fail("readlines() after next() with supposedly empty "
                          "iteration-buffer failed anyway")
            if lines != testlines:
                self.fail("readlines() after next() with empty buffer "
                          "failed. Got %r, expected %r" % (line, testline))
            # Reading after iteration hit EOF shouldn't hurt either
            f = open(TESTFN)
            try:
                for line in f:
                    pass
                try:
                    f.readline()
                    f.readinto(buf)
                    f.read()
                    f.readlines()
                except ValueError:
                    self.fail("read* failed after next() consumed file")
            finally:
                f.close()
        finally:
            os.unlink(TESTFN)

Example 38

Project: pangyp
Source File: win_tool.py
View license
  def ExecLinkWithManifests(self, arch, embed_manifest, out, ldcmd, resname,
                            mt, rc, intermediate_manifest, *manifests):
    """A wrapper for handling creating a manifest resource and then executing
    a link command."""
    # The 'normal' way to do manifests is to have link generate a manifest
    # based on gathering dependencies from the object files, then merge that
    # manifest with other manifests supplied as sources, convert the merged
    # manifest to a resource, and then *relink*, including the compiled
    # version of the manifest resource. This breaks incremental linking, and
    # is generally overly complicated. Instead, we merge all the manifests
    # provided (along with one that includes what would normally be in the
    # linker-generated one, see msvs_emulation.py), and include that into the
    # first and only link. We still tell link to generate a manifest, but we
    # only use that to assert that our simpler process did not miss anything.
    variables = {
      'python': sys.executable,
      'arch': arch,
      'out': out,
      'ldcmd': ldcmd,
      'resname': resname,
      'mt': mt,
      'rc': rc,
      'intermediate_manifest': intermediate_manifest,
      'manifests': ' '.join(manifests),
    }
    add_to_ld = ''
    if manifests:
      subprocess.check_call(
          '%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
          '-manifest %(manifests)s -out:%(out)s.manifest' % variables)
      if embed_manifest == 'True':
        subprocess.check_call(
            '%(python)s gyp-win-tool manifest-to-rc %(arch)s %(out)s.manifest'
          ' %(out)s.manifest.rc %(resname)s' % variables)
        subprocess.check_call(
            '%(python)s gyp-win-tool rc-wrapper %(arch)s %(rc)s '
            '%(out)s.manifest.rc' % variables)
        add_to_ld = ' %(out)s.manifest.res' % variables
    subprocess.check_call(ldcmd + add_to_ld)

    # Run mt.exe on the theoretically complete manifest we generated, merging
    # it with the one the linker generated to confirm that the linker
    # generated one does not add anything. This is strictly unnecessary for
    # correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not
    # used in a #pragma comment.
    if manifests:
      # Merge the intermediate one with ours to .assert.manifest, then check
      # that .assert.manifest is identical to ours.
      subprocess.check_call(
          '%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
          '-manifest %(out)s.manifest %(intermediate_manifest)s '
          '-out:%(out)s.assert.manifest' % variables)
      assert_manifest = '%(out)s.assert.manifest' % variables
      our_manifest = '%(out)s.manifest' % variables
      # Load and normalize the manifests. mt.exe sometimes removes whitespace,
      # and sometimes doesn't unfortunately.
      with open(our_manifest, 'rb') as our_f:
        with open(assert_manifest, 'rb') as assert_f:
          our_data = our_f.read().translate(None, string.whitespace)
          assert_data = assert_f.read().translate(None, string.whitespace)
      if our_data != assert_data:
        os.unlink(out)
        def dump(filename):
          sys.stderr.write('%s\n-----\n' % filename)
          with open(filename, 'rb') as f:
            sys.stderr.write(f.read() + '\n-----\n')
        dump(intermediate_manifest)
        dump(our_manifest)
        dump(assert_manifest)
        sys.stderr.write(
            'Linker generated manifest "%s" added to final manifest "%s" '
            '(result in "%s"). '
            'Were /MANIFEST switches used in #pragma statements? ' % (
              intermediate_manifest, our_manifest, assert_manifest))
        return 1

Example 39

Project: s3ql
Source File: benchmark.py
View license
def main(args=None):
    if args is None:
        args = sys.argv[1:]

    options = parse_args(args)
    setup_logging(options)

    # /dev/urandom may be slow, so we cache the data first
    log.info('Preparing test data...')
    rnd_fh = tempfile.TemporaryFile()
    with open('/dev/urandom', 'rb', 0) as src:
        copied = 0
        while copied < 50 * 1024 * 1024:
            buf = src.read(BUFSIZE)
            rnd_fh.write(buf)
            copied += len(buf)

    log.info('Measuring throughput to cache...')
    backend_dir = tempfile.mkdtemp(prefix='s3ql-benchmark-')
    mnt_dir = tempfile.mkdtemp(prefix='s3ql-mnt')
    atexit.register(shutil.rmtree, backend_dir)
    atexit.register(shutil.rmtree, mnt_dir)

    block_sizes = [ 2**b for b in range(12, 18) ]
    for blocksize in block_sizes:
        write_time = 0
        size = 50 * 1024 * 1024
        while write_time < 3:
            log.debug('Write took %.3g seconds, retrying', write_time)
            subprocess.check_call([exec_prefix + 'mkfs.s3ql', '--plain', 'local://%s' % backend_dir,
                                   '--quiet', '--force', '--cachedir', options.cachedir])
            subprocess.check_call([exec_prefix + 'mount.s3ql', '--threads', '1', '--quiet',
                                   '--cachesize', '%d' % (2 * size / 1024), '--log',
                                   '%s/mount.log' % backend_dir, '--cachedir', options.cachedir,
                                   'local://%s' % backend_dir, mnt_dir])
            try:
                size *= 2
                with open('%s/bigfile' % mnt_dir, 'wb', 0) as dst:
                    rnd_fh.seek(0)
                    write_time = time.time()
                    copied = 0
                    while copied < size:
                        buf = rnd_fh.read(blocksize)
                        if not buf:
                            rnd_fh.seek(0)
                            continue
                        dst.write(buf)
                        copied += len(buf)

                write_time = time.time() - write_time
                os.unlink('%s/bigfile' % mnt_dir)
            finally:
                subprocess.check_call([exec_prefix + 'umount.s3ql', mnt_dir])

        fuse_speed = copied / write_time
        log.info('Cache throughput with %3d KiB blocks: %d KiB/sec',
                 blocksize / 1024, fuse_speed / 1024)

    # Upload random data to prevent effects of compression
    # on the network layer
    log.info('Measuring raw backend throughput..')
    try:
        backend = get_backend(options, raw=True)
    except DanglingStorageURLError as exc:
        raise QuietError(str(exc)) from None

    upload_time = 0
    size = 512 * 1024
    while upload_time < 10:
        size *= 2
        def do_write(dst):
            rnd_fh.seek(0)
            stamp = time.time()
            copied = 0
            while copied < size:
                buf = rnd_fh.read(BUFSIZE)
                if not buf:
                    rnd_fh.seek(0)
                    continue
                dst.write(buf)
                copied += len(buf)
            return (copied, stamp)
        (upload_size, upload_time) = backend.perform_write(do_write, 's3ql_testdata')
        upload_time = time.time() - upload_time
    backend_speed = upload_size / upload_time
    log.info('Backend throughput: %d KiB/sec', backend_speed / 1024)
    backend.delete('s3ql_testdata')

    src = options.file
    size = os.fstat(options.file.fileno()).st_size
    log.info('Test file size: %.2f MiB', (size / 1024 ** 2))

    in_speed = dict()
    out_speed = dict()
    for alg in ALGS:
        log.info('compressing with %s-6...', alg)
        backend = ComprencBackend(b'pass', (alg, 6), Backend('local://' + backend_dir, None, None))
        def do_write(dst): #pylint: disable=E0102
            src.seek(0)
            stamp = time.time()
            while True:
                buf = src.read(BUFSIZE)
                if not buf:
                    break
                dst.write(buf)
            return (dst, stamp)
        (dst_fh, stamp) = backend.perform_write(do_write, 's3ql_testdata')
        dt = time.time() - stamp
        in_speed[alg] = size / dt
        out_speed[alg] = dst_fh.get_obj_size() / dt
        log.info('%s compression speed: %d KiB/sec per thread (in)', alg, in_speed[alg] / 1024)
        log.info('%s compression speed: %d KiB/sec per thread (out)', alg, out_speed[alg] / 1024)

    print('')
    print('With %d KiB blocks, maximum performance for different compression'
          % (block_sizes[-1]/1024), 'algorithms and thread counts is:', '', sep='\n')

    threads = set([1,2,4,8])
    cores = os.sysconf('SC_NPROCESSORS_ONLN')
    if cores != -1:
        threads.add(cores)
    if options.threads:
        threads.add(options.threads)

    print('%-26s' % 'Threads:',
          ('%12d' * len(threads)) % tuple(sorted(threads)))

    for alg in ALGS:
        speeds = []
        limits = []
        for t in sorted(threads):
            if fuse_speed > t * in_speed[alg]:
                limit = 'CPU'
                speed = t * in_speed[alg]
            else:
                limit = 'S3QL/FUSE'
                speed = fuse_speed

            if speed / in_speed[alg] * out_speed[alg] > backend_speed:
                limit = 'uplink'
                speed = backend_speed * in_speed[alg] / out_speed[alg]

            limits.append(limit)
            speeds.append(speed / 1024)

        print('%-26s' % ('Max FS throughput (%s):' % alg),
              ('%7d KiB/s' * len(threads)) % tuple(speeds))
        print('%-26s' % '..limited by:',
              ('%12s' * len(threads)) % tuple(limits))

    print('')
    print('All numbers assume that the test file is representative and that',
          'there are enough processor cores to run all active threads in parallel.',
          'To compensate for network latency, you should use about twice as',
          'many upload threads as indicated by the above table.\n', sep='\n')

Example 40

Project: threatshell
Source File: geoip.py
View license
    def update(self):

        log.debug("Requesting IP address")
        db_fnames = []
        resp = requests.get(self.ipaddr_url)
        if resp.status_code != 200:
            log.error(
                "Failed to get external IP from %s - [%d]: %s" % (
                    self.ipaddr_url,
                    resp.status_code,
                    resp.content
                )
            )
            return

        ip_addr = resp.content.strip()
        log.debug("IP Address is %s" % ip_addr)
        ip_key_md5 = md5(self.config.get("GeoIP", "LicenseKey"))
        ip_key_md5.update(ip_addr)
        ip_key_hash = ip_key_md5.hexdigest()
        log.debug("IP/Key hash is %s" % ip_key_hash)

        product_ids = self.config.get("GeoIP", "ProductIds").split(",")
        product_ids = [x.strip() for x in product_ids]

        fnames = {}
        for pid in product_ids:

            log.debug("Requesting name for product id %s..." % pid)
            resp = requests.get(self.fnames_url, params={"product_id": pid})

            if resp.status_code != 200:
                log.error(
                    "Failed to resolve %s - [%d]: %s" % (
                        self.fnames_url,
                        resp.status_code,
                        resp.content
                    )
                )
                continue

            fname = resp.content.strip()
            log.debug("Product name is %s" % fname)
            fnames[fname] = pid

        for fname in fnames.keys():

            db_hash = "0" * 32
            if self.db_hashes.get(fname) is not None:
                db_hash = self.db_hashes[fname]

            log.debug("Requesting db file %s" % fname)
            params = {
                "db_md5": db_hash,
                "challenge_md5": ip_key_hash,
                "user_id": self.config.get("GeoIP", "UserId"),
                "edition_id": fnames[fname]
            }
            resp = requests.get(self.update_url, params=params)
            if resp.status_code != 200:
                log.error(
                    "Failed to download new db file - [%d]: %s" % (
                        resp.status_code,
                        resp.content
                    )
                )
            else:
                log.debug("Downloading new db file...")
                chunk_size = 4096
                current_pattern = re.compile(
                    ".*?No new updates.*", re.IGNORECASE)
                with open("%s/%s.gz" % (self.db_path, fname), 'wb') as fd:
                    for chunk in resp.iter_content(chunk_size):
                        fd.write(chunk)

                header = open("%s/%s.gz" % (self.db_path, fname)).read(1024)
                if not current_pattern.match(header):

                    log.debug("Decompressing db file")
                    gz_istream = gzip.open(
                        "%s/%s.gz" % (self.db_path, fname), "rb")
                    ostream = open("%s/%s" % (self.db_path, fname), "wb")
                    buf = gz_istream.read(4096)
                    while buf != "":
                        ostream.write(buf)
                        buf = gz_istream.read(4096)

                    gz_istream.close()
                    ostream.close()
                    os.unlink("%s/%s.gz" % (self.db_path, fname))
                    db_fnames.append("%s/%s" % (self.db_path, fname))

                else:

                    log.debug("%s is up to date" % fname)
                    db_fnames.append("%s/%s" % (self.db_path, fname))
                    os.unlink("%s/%s.gz" % (self.db_path, fname))

        if self.g1_country_reader is None:
            self._init_readers()
        else:
            self._reset_readers()

        return db_fnames

Example 41

Project: circlator
Source File: mapping.py
View license
def bwa_mem(
      ref,
      reads,
      outfile,
      threads=1,
      bwa_options = '-x pacbio',
      verbose=False,
      index=None
    ):

    samtools = external_progs.make_and_check_prog('samtools', verbose=verbose)
    bwa = external_progs.make_and_check_prog('bwa', verbose=verbose)
    unsorted_bam = outfile + '.tmp.unsorted.bam'
    tmp_index = outfile + '.tmp.bwa_index'
    bwa_index(ref, outprefix=tmp_index, verbose=verbose, bwa=bwa)

    cmd = ' '.join([
        bwa.exe(), 'mem',
        bwa_options,
        '-t', str(threads),
        tmp_index,
        reads,
        '|',
        samtools.exe(), 'view',
        '-F 0x0800',
        '-T', ref,
        '-b',
        '-o', unsorted_bam,
        '-',
    ])

    common.syscall(cmd, verbose=verbose)
    bwa_index_clean(tmp_index)
    threads = min(4, threads)
    thread_mem = int(500 / threads)

    # here we have to check for the version of samtools, starting from 1.3 the
    # -o flag is used for specifying the samtools sort output-file.
    # Starting from 1.2 you can use the -o flag, but can't have
    # -o out.bam at the end of the call, so use new style from 1.3 onwards.

    outparam = ''

    if samtools.version_at_least('1.3'):
        outparam = '-o'
        samout = outfile
    else:
        samout = outfile[:-4]

    cmd = ' '.join([
        samtools.exe(), 'sort',
        '[email protected]', str(threads),
        '-m', str(thread_mem) + 'M',
        unsorted_bam,
        outparam,samout
    ])

    common.syscall(cmd, verbose=verbose)
    os.unlink(unsorted_bam)

    cmd = samtools.exe() + ' index ' + outfile
    common.syscall(cmd, verbose=verbose)

Example 42

Project: iva
Source File: mapping.py
View license
def map_reads(reads_fwd, reads_rev, ref_fa, out_prefix, index_k=15, index_s=3, threads=1, max_insert=1000, minid=0.5, verbose=0, required_flag=None, sort=False, exclude_flag=None, mate_ref=None, extra_smalt_map_ops=None):
    if extra_smalt_map_ops is None:
        extra_smalt_map_ops = ''
    map_index = out_prefix + '.map_index'
    clean_files = [map_index + '.' + x for x in ['smi', 'sma']]
    index_cmd = ' '.join([
        'smalt index',
        '-k', str(index_k),
        '-s', str(index_s),
        map_index,
        ref_fa
    ])

    map_cmd = 'smalt map ' + extra_smalt_map_ops + ' '

    # depending on OS, -n can break smalt, so only use -n if it's > 1.
    if threads > 1:
        map_cmd += '-n ' + str(threads) + ' -O '

    if reads_rev is None:
        map_cmd += ' '.join([
            '-y', str(minid),
            map_index,
            reads_fwd,
        ])
    else:
        map_cmd += ' '.join([
            '-i', str(max_insert),
            '-y', str(minid),
            map_index,
            reads_fwd,
            reads_rev,
        ])

    if mate_ref is not None:
        map_cmd += r''' | awk '$7=="''' + mate_ref + '"\''


    map_cmd += ' | samtools view'

    if required_flag is not None:
        map_cmd += ' -f ' + str(required_flag)

    if exclude_flag is not None:
        map_cmd += ' -F ' + str(exclude_flag)

    final_bam = out_prefix + '.bam'
    if sort:
        intermediate_bam = out_prefix + '.unsorted.bam'
    else:
        intermediate_bam = final_bam

    map_cmd += ' -bS -T ' + ref_fa + '  - > ' + intermediate_bam
    common.syscall(index_cmd)
    common.syscall(map_cmd)
    if verbose >= 2:
        print('        map reads. Index:  ', index_cmd)
        print('        map reads. Mapping:', map_cmd)

    if sort:
        threads = min(4, threads)
        thread_mem = int(500 / threads)
        if str(external_progs.get_version('samtools')) >= '1.2':
            sort_cmd = 'samtools sort [email protected]' + str(threads) + ' -m ' + str(thread_mem) + 'M -o ' + final_bam + ' ' + intermediate_bam
        else:
            sort_cmd = 'samtools sort [email protected]' + str(threads) + ' -m ' + str(thread_mem) + 'M ' + intermediate_bam + ' ' + out_prefix
        index_cmd = 'samtools index ' + final_bam
        if verbose >= 2:
            print('        map reads. sort:  ', sort_cmd)
        common.syscall(sort_cmd)
        if verbose >= 2:
            print('        map reads. index:  ', index_cmd)
        common.syscall(index_cmd)
    for fname in clean_files:
        os.unlink(fname)

Example 43

Project: conary
Source File: changemail.py
View license
def doWork(repos, cfg, srcMap, pkgMap, grpMap, sourceuser, binaryuser, fromaddr, maxsize, argSet):
    exitCode = 0
    tmpfd, tmppath = tempfile.mkstemp('', 'changemail-')
    os.unlink(tmppath)
    tmpfile = os.fdopen(tmpfd)
    sys.stdout.flush()
    oldStdOut = os.dup(sys.stdout.fileno())
    os.dup2(tmpfd, 1)
    mailhost = argSet.pop('mailhost', 'localhost')

    if srcMap:
        sources = sorted(srcMap.keys())
        names = [ x.split(':')[0] for x in sources ]
        subjectList = []
        for sourceName in sources:
            for ver, shortver in srcMap[sourceName]:
                subjectList.append('%s=%s' %(
                    sourceName.split(':')[0], shortver))
        subject = 'Source: %s' %" ".join(subjectList)

        for sourceName in sources:
            for ver, shortver in srcMap[sourceName]:
                new = repos.findTrove(cfg.buildLabel, (sourceName, ver, None))
                newV = new[0][1]
                old, oldV = checkin.findRelativeVersion(repos, sourceName,
                                                        1, newV)
                if old:
                    old = ' (previous: %s)'%oldV.trailingRevision().asString()
                else:
                    old = ''
                print '================================'
                print '%s=%s%s' %(sourceName, shortver, old)
                print 'cvc rdiff %s -1 %s' %(sourceName[:-7], ver)
                print '================================'
                try:
                    checkin.rdiff(repos, cfg.buildLabel, sourceName, '-1', ver)
                except:
                    exitCode = 2
                    print 'rdiff failed for %s' %sourceName
                    try:
                        t, v, tb = sys.exc_info()
                        tbd = traceback.format_exception(t, v, tb)
                        sys.stdout.write(''.join(tbd[-min(2, len(tbd)):]))
                        sys.stderr.write(''.join(tbd))
                    except:
                        print 'Failed to print exception information'

                    print ''
                    print 'Please include a copy of this message in an issue'
                    print 'filed at https://issues.rpath.com/'
                print
        if sourceuser:
            print 'Committed by: %s' %sourceuser

        sendMail(tmpfile, subject, fromaddr, maxsize, argSet['email'], mailhost)

    if pkgMap or grpMap:
        # stdout is the tmpfile
        sys.stdout.flush()
        sys.stdout.seek(0)
        sys.stdout.truncate()

        binaries = sorted(pkgMap.keys())
        groups = sorted(grpMap.keys())
        subject = 'Binary: %s' %" ".join(binaries+groups)

        wrap = textwrap.TextWrapper(
            initial_indent='    ',
            subsequent_indent='        ',
        )

        if binaries:
            print "Binary package commits:"
            if binaryuser:
                print 'Committed by: %s' %binaryuser
        for package in binaries:
            for version in sorted(pkgMap[package].keys()):
                print '================================'
                print '%s=%s' %(package, version)
                flavorDict = pkgMap[package][version]
                for flavor in sorted(flavorDict.keys()):
                    print wrap.fill('%s:%s [%s]' %(package,
                        ' :'.join(sorted(flavorDict[flavor])),
                        ', '.join(flavor.split(','))))
                print

        if groups:
            print "Group commits:"
        for group in groups:
            for version in sorted(grpMap[group].keys()):
                print '================================'
                print '%s=%s' %(group, version)
                flavorSet = grpMap[group][version]
                for flavor in sorted(flavorSet):
                    print wrap.fill('[%s]' %
                        ', '.join(flavor.split(',')))
                print

        sendMail(tmpfile, subject, fromaddr, maxsize, argSet['email'], mailhost)
        os.dup2(oldStdOut, 1)

    return exitCode

Example 44

Project: scalyr-agent-2
Source File: config_main.py
View license
def set_api_key(config, config_file_path, new_api_key):
    """Replaces the current api key in the file at 'config_file_path' with the value of 'new_api_key'.

    @param config: The Configuration object created by reading config_file_path.
    @param config_file_path: The full path to the configuration file. This file will be overwritten.
    @param new_api_key: The new value for the api key to write into the file.
    """
    # We essentially search through the current configuration file, looking for the current key's value
    # and rewrite it to be the new_api_key.
    current_key = config.api_key

    tmp_file = None
    original_file = None

    try:
        try:
            # Create a temporary file that we will write the new file into.  We will just rename it when we are done
            # to the original file name.
            tmp_file_path = '%s.tmp' % config_file_path
            tmp_file = open(tmp_file_path, 'w')

            # Open up the current file for reading.
            original_file = open(config_file_path)
            found = 0

            for s in original_file:
                # For a sanity check, make sure we only see the current key once in the file.  That guarantees that
                # we are replacing the correct thing.
                found += s.count(current_key)
                if found > 1:
                    print >>sys.stderr, 'The existing API key was found in more than one place.  Config file has been'
                    print >>sys.stderr, 'modified already.  Cannot safely update modified config file so failing.'
                    sys.exit(1)
                s = s.replace(current_key, new_api_key)
                print >>tmp_file, s,

            if found != 1:
                print >>sys.stderr, 'The existing API key could not be found in file, failing'
                sys.exit(1)

            # For Win32, we must make sure the files are closed before rename.
            tmp_file.close()
            tmp_file = None
            original_file.close()
            original_file = None

            if 'win32' == sys.platform:
                os.unlink(config_file_path)

            # Determine how to make the file have the same permissions as the original config file.  For now, it
            # does not matter since if this command is only run as part of the install process, the file should
            # be owned by root already.
            os.rename(tmp_file_path, config_file_path)
        except IOError, error:
                if error.errno == 13:
                    print >>sys.stderr, 'You do not have permission to write to the file and directory required '
                    print >>sys.stderr, 'to update the API key.  Ensure you can write to the file at path'
                    print >>sys.stderr, '\'%s\' and create files in its parent directory.' % config_file_path
                else:
                    print >>sys.stderr, 'Error attempting to update the key: %s' % str(error)
                    print >>sys.stderr, traceback.format_exc()
                sys.exit(1)
        except Exception, err:
            print >>sys.stderr, 'Error attempting to update the key: %s' % str(err)
            print >> sys.stderr, traceback.format_exc()
            sys.exit(1)
    finally:
        if tmp_file is not None:
            tmp_file.close()
        if original_file is not None:
            original_file.close()

Example 45

Project: sdaps
Source File: add.py
View license
@script.connect(parser)
@script.logfile
def add(cmdline):
    import sys
    from sdaps.add import add_image, check_image
    from sdaps import image

    error = False
    survey = model.survey.Survey.load(cmdline['project'])

    filelist = []
    deletelist = []

    if not cmdline['convert']:
        for file in cmdline['images']:
            filelist.append(file)

            if not check_image(survey, file, cmdline['duplex'], cmdline['force'], message=True):
                error=True
        if error:
            return
    else:
        if not cmdline['copy']:
            log.error(_("The --no-copy option is not compatible with --convert!"))
            return 1

        try:
            from sdaps.convert import convert_images
        except:
            log.error("Need to convert the images to monochrome TIFF, however the conversion module cannot be imported. You are likely missing the OpenCV dependency.")
            return 1

        print _("Converting input files into a single temporary file.")

        tmp = tempfile.mktemp(suffix='.tif', prefix='sdaps-convert-')
        deletelist.append(tmp)
        filelist.append(tmp)

        # Run conversion
        # TODO: Allow 3D transformation here!
        try:
            convert_images(cmdline['images'], tmp, survey.defs.paper_width, survey.defs.paper_height, cmdline['transform'])

            if not check_image(survey, tmp, cmdline['duplex'], cmdline['force']):
                log.error(_("The page count of the created temporary file does not work with this survey."))
                raise AssertionError()

        except Exception, e:
            log.error(str(e))
            log.error(_("Running the conversion failed."))
            error = True

    if not error:
        for file in filelist:
            print _('Processing %s') % file

            add_image(survey, file, cmdline['duplex'], cmdline['force'], cmdline['copy'])

            print _('Done')

    for file in deletelist:
        os.unlink(file)

    if error:
        return 1
    else:
        survey.save()
        return 0

Example 46

Project: quicktill
Source File: till.py
View license
    @staticmethod
    def run(args):
        import sqlalchemy.engine.url
        import sqlalchemy
        import tempfile
        import subprocess
        url=sqlalchemy.engine.url.make_url(
            td.parse_database_name(tillconfig.database))
        try:
            current_schema=subprocess.check_output(
                ["pg_dump","-s"]+checkdb.connection_options(url))
        except OSError as e:
            print("Couldn't run pg_dump on current database; "
                  "is pg_dump installed?")
            print(e)
            return 1
        if args.createdb:
            engine=sqlalchemy.create_engine("postgresql+psycopg2:///postgres")
            conn=engine.connect()
            conn.execute('commit')
            conn.execute('create database "{}"'.format(args.tempdb))
            conn.close()
        try:
            engine=sqlalchemy.create_engine(
                "postgresql+psycopg2:///{}".format(args.tempdb))
            models.metadata.bind=engine
            models.metadata.create_all()
            try:
                pristine_schema=subprocess.check_output(
                    ["pg_dump","-s",args.tempdb])
            finally:
                models.metadata.drop_all()
                # If we don't explicitly close the connection to the
                # database here, we won't be able to drop it
                engine.dispose()
        finally:
            if args.createdb:
                engine=sqlalchemy.create_engine("postgresql+psycopg2:///postgres")
                conn=engine.connect()
                conn.execute('commit')
                conn.execute('drop database "{}"'.format(args.tempdb))
                conn.close()
        current=tempfile.NamedTemporaryFile(delete=False)
        current.write(current_schema)
        current.close()
        pristine=tempfile.NamedTemporaryFile(delete=False)
        pristine.write(pristine_schema)
        pristine.close()
        try:
            subprocess.check_call(["apgdiff", "--add-transaction",
                                   "--ignore-start-with",
                                   current.name, pristine.name])
        except OSError as e:
            print("Couldn't run apgdiff; is it installed?")
            print(e)
        finally:
            if args.keeptmp:
                print("Current database schema is in {}".format(current.name))
                print("Pristine database schema is in {}".format(pristine.name))
            else:
                os.unlink(current.name)
                os.unlink(pristine.name)

Example 47

Project: kevin
Source File: __main__.py
View license
def main():
    """ Falk service launch """

    cmd = argparse.ArgumentParser(
        description="Kevin CI Falk - VM provider")

    cmd.add_argument("-c", "--config", default="/etc/kevin/falk.conf",
                     help="file name of the configuration to use.")
    cmd.add_argument("-d", "--debug", action="store_true",
                     help="enable asyncio debugging")
    cmd.add_argument("-v", "--verbose", action="count", default=0,
                     help="increase program verbosity")
    cmd.add_argument("-q", "--quiet", action="count", default=0,
                     help="decrease program verbosity")

    args = cmd.parse_args()

    print("\x1b[1;32mFalk machine service initializing...\x1b[m")

    log_setup(args.verbose - args.quiet)

    loop = asyncio.get_event_loop()

    # enable asyncio debugging
    loop.set_debug(args.debug)

    # parse config
    CFG.load(args.config)

    try:
        os.unlink(CFG.control_socket)
    except OSError:
        if os.path.exists(CFG.control_socket):
            raise
        else:
            sockdir = os.path.dirname(CFG.control_socket)
            if not os.path.exists(sockdir):
                try:
                    logging.info("creating socket directory '%s'" % sockdir)
                    os.makedirs(sockdir, exist_ok=True)
                except PermissionError as exc:
                    raise exc from None

    logging.error("\x1b[1;32mstarting falk...\x1b[m")

    # state storage
    falk = Falk()

    logging.warn("listening on '%s'..." % CFG.control_socket)

    proto_tasks = set()

    def create_proto():
        """ creates the asyncio protocol instance """
        proto = FalkProto(falk)

        # create message "worker" task
        proto_task = loop.create_task(proto.process_messages())
        proto_tasks.add(proto_task)

        def conn_finished(fut):
            """ remove the task from the pending list """
            logging.log("[proto] done")
            proto_tasks.remove(proto_task)

        proto_task.add_done_callback(
            lambda fut: proto_tasks.remove(proto_task))

        return proto

    srv_coro = loop.create_unix_server(create_proto, CFG.control_socket)
    server = loop.run_until_complete(srv_coro)

    if CFG.control_socket_group:
        # this only works if the current user is a member of the
        # target group!
        shutil.chown(CFG.control_socket, None, CFG.control_socket_group)

    if CFG.control_socket_permissions:
        mode = int(CFG.control_socket_permissions, 8)
        os.chmod(CFG.control_socket, mode)

    try:
        loop.run_forever()
    except KeyboardInterrupt:
        print("\nexiting...")

    logging.warn("served %d connections" % falk.handle_id)

    logging.info("cleaning up...")

    for proto_task in proto_tasks:
        proto_task.cancel()

    # execute the cancellations
    loop.run_until_complete(asyncio.gather(*proto_tasks,
                                           return_exceptions=True))

    # server teardown
    server.close()
    loop.run_until_complete(server.wait_closed())

    loop.stop()
    loop.run_forever()
    loop.close()

    print("cya!")

Example 48

Project: baruwa2
Source File: accounts.py
View license
@task(name='import-accounts')
def importaccounts(domid, filename, skipfirst, userid):
    "import accounts"
    logger = importaccounts.get_logger()
    results = dict(rows=[], global_error=[])
    keys = tuple(ACCOUNTFIELDS + ADDRESSFIELDS)
    translator = _get_translator(None)
    pylons.translator._push_object(translator)
    try:
        with open(filename, 'rU') as handle:
            dialect = csv.Sniffer().sniff(handle.read(1024))
            handle.seek(0)
            rows = csv.DictReader(handle, fieldnames=keys, dialect=dialect)
            query = Session.query(Domain).filter(Domain.id == domid)
            domain = query.one()
            requester = Session.query(User).get(userid)
            logger.info("Importing accounts from file: %s for: %s" %
                        (filename, domain.name))
            try:
                count = 1
                for row in rows:
                    if skipfirst and (count == 1):
                        count += 1
                        continue
                    result = dict(id=None,
                                username=row['username'],
                                imported=False,
                                error=None)
                    try:
                        session_dict = {}
                        token = make_csrf(session_dict)
                        fields = getkeys(row)
                        post_data = dict2mdict(fields)
                        post_data.add('password2', row['password1'])
                        post_data.add('domains', domid)
                        post_data.add('csrf_token', token)
                        form = AddUserForm(post_data,
                                csrf_context=session_dict)
                        form.domains.query = query
                        if form.validate():
                            # db insert
                            if domain.name != form.email.data.split('@')[1]:
                                raise TypeError(
                                    'Cannot import: %s into domain: %s' %
                                    (form.email.data, domain.name))
                            user = User(form.username.data, form.email.data)
                            for attr in ['firstname', 'lastname', 'email',
                                'active', 'account_type', 'send_report',
                                'spam_checks', 'low_score', 'high_score',
                                'timezone']:
                                setattr(user, attr, getattr(form, attr).data)
                            user.local = True
                            user.set_password(form.password1.data)
                            if user.is_peleb:
                                user.domains = [domain]
                            Session.add(user)
                            Session.commit()
                            result['id'] = user.id
                            result['imported'] = True
                            logger.info("Imported account: %s" %
                                        row['username'])
                            # address add
                            add_address(row, user, requester)
                        else:
                            logger.info("Import failed account: %s" %
                                        row['username'])
                            if isinstance(form.errors, dict):
                                errors = []
                                for field in form.errors:
                                    themsg = u'%s: %s' % (field,
                                            unicode(form.errors[field][0]))
                                    errors.append(themsg)
                                result['error'] = u', '.join(errors)
                            else:
                                result['error'] = form.errors
                    except TypeError, err:
                        logger.info("Import failed account: %s" %
                                    row['username'])
                        result['error'] = str(err)
                    except IntegrityError, err:
                        Session.rollback()
                        logger.info("Import failed account: %s" %
                                    row['username'])
                        result['error'] = 'Account already exists'
                    finally:
                        count += 1
                        results['rows'].append(result)
            except csv.Error, err:
                logger.info("Import failure error: %s on line no: %s" %
                            (err, rows.line_num))
                errormsg = 'Error: %s on line no: %d' % (err, rows.line_num)
                results['global_error'] = errormsg
            logger.info("Processed file: %s" % filename)
    except (csv.Error, IOError), err:
        results['global_error'] = str(err)
        logger.info("Error: %s, processing %s" % (str(err), filename))
    finally:
        Session.close()
    try:
        os.unlink(filename)
    except OSError:
        pass
    pylons.translator._pop_object()
    return results

Example 49

Project: baruwa2
Source File: organizations.py
View license
@task(name='import-domains')
def importdomains(orgid, filename, skipfirst):
    "Import domains"
    logger = importdomains.get_logger()
    results = dict(rows=[], global_error=[])
    keys = tuple(DOMAINFIELDS + DAFIELDS + DSFIELDS + ASFIELDS)
    translator = _get_translator(None)
    pylons.translator._push_object(translator)
    try:
        with open(filename, 'rU') as handle:
            dialect = csv.Sniffer().sniff(handle.read(1024))
            handle.seek(0)
            rows = csv.DictReader(handle, fieldnames=keys, dialect=dialect)
            query = Session.query(Group).filter(Group.id == orgid)
            org = query.one()
            logger.info("Importing domains from file: %s for: %s" %
                        (filename, org.name))
            try:
                count = 1
                for row in rows:
                    if skipfirst and (count == 1):
                        count += 1
                        continue
                    result = dict(id=None,
                                name=row['name'],
                                imported=False,
                                error=None)
                    try:
                        session_dict = {}
                        token = make_csrf(session_dict)
                        fields = getkeys(row)
                        post_data = dict2mdict(fields)
                        post_data.add('csrf_token', token)
                        form = AddDomainForm(post_data,
                                            csrf_context=session_dict)
                        form.organizations.query = query
                        if form.validate():
                            # insert to db
                            domain = Domain()
                            for field in form:
                                if field.name != 'csrf_token':
                                    setattr(domain, field.name, field.data)
                            domain.organizations.append(org)
                            Session.add(domain)
                            Session.commit()
                            result['id'] = domain.id
                            result['imported'] = True
                            logger.info("Imported domain: %s" % row['name'])
                            # process other data
                            process_aux(row, domain.id)
                        else:
                            logger.info("Import failed domain: %s" %
                                        row['name'])
                            if isinstance(form.errors, dict):
                                errors = []
                                for field in form.errors:
                                    themsg = u'%s: %s' % (field,
                                                unicode(form.errors[field][0]))
                                    errors.append(themsg)
                                result['error'] = u', '.join(errors)
                            else:
                                result['error'] = form.errors
                    except TypeError, err:
                        logger.info("Import failed domain: %s" % row['name'])
                        result['error'] = str(err)
                    except IntegrityError, err:
                        Session.rollback()
                        logger.info("Import failed domain: %s" % row['name'])
                        result['error'] = 'Domain already exists'
                    finally:
                        count += 1
                        results['rows'].append(result)
            except csv.Error, err:
                logger.info("Import failure error: %s on line no: %s" %
                            (err, rows.line_num))
                errormsg = 'Error: %s on line no: %d' % (err, rows.line_num)
                results['global_error'] = errormsg
        logger.info("Processed file: %s" % filename)
    except (csv.Error, IOError), err:
        results['global_error'] = str(err)
        logger.info("Error: %s, processing %s" % (str(err), filename))
    finally:
        Session.close()
    try:
        os.unlink(filename)
    except OSError:
        pass
    pylons.translator._pop_object()
    return results

Example 50

Project: gyp
Source File: win_tool.py
View license
  def ExecLinkWithManifests(self, arch, embed_manifest, out, ldcmd, resname,
                            mt, rc, intermediate_manifest, *manifests):
    """A wrapper for handling creating a manifest resource and then executing
    a link command."""
    # The 'normal' way to do manifests is to have link generate a manifest
    # based on gathering dependencies from the object files, then merge that
    # manifest with other manifests supplied as sources, convert the merged
    # manifest to a resource, and then *relink*, including the compiled
    # version of the manifest resource. This breaks incremental linking, and
    # is generally overly complicated. Instead, we merge all the manifests
    # provided (along with one that includes what would normally be in the
    # linker-generated one, see msvs_emulation.py), and include that into the
    # first and only link. We still tell link to generate a manifest, but we
    # only use that to assert that our simpler process did not miss anything.
    variables = {
      'python': sys.executable,
      'arch': arch,
      'out': out,
      'ldcmd': ldcmd,
      'resname': resname,
      'mt': mt,
      'rc': rc,
      'intermediate_manifest': intermediate_manifest,
      'manifests': ' '.join(manifests),
    }
    add_to_ld = ''
    if manifests:
      subprocess.check_call(
          '%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
          '-manifest %(manifests)s -out:%(out)s.manifest' % variables)
      if embed_manifest == 'True':
        subprocess.check_call(
            '%(python)s gyp-win-tool manifest-to-rc %(arch)s %(out)s.manifest'
          ' %(out)s.manifest.rc %(resname)s' % variables)
        subprocess.check_call(
            '%(python)s gyp-win-tool rc-wrapper %(arch)s %(rc)s '
            '%(out)s.manifest.rc' % variables)
        add_to_ld = ' %(out)s.manifest.res' % variables
    subprocess.check_call(ldcmd + add_to_ld)

    # Run mt.exe on the theoretically complete manifest we generated, merging
    # it with the one the linker generated to confirm that the linker
    # generated one does not add anything. This is strictly unnecessary for
    # correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not
    # used in a #pragma comment.
    if manifests:
      # Merge the intermediate one with ours to .assert.manifest, then check
      # that .assert.manifest is identical to ours.
      subprocess.check_call(
          '%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
          '-manifest %(out)s.manifest %(intermediate_manifest)s '
          '-out:%(out)s.assert.manifest' % variables)
      assert_manifest = '%(out)s.assert.manifest' % variables
      our_manifest = '%(out)s.manifest' % variables
      # Load and normalize the manifests. mt.exe sometimes removes whitespace,
      # and sometimes doesn't unfortunately.
      with open(our_manifest, 'rb') as our_f:
        with open(assert_manifest, 'rb') as assert_f:
          our_data = our_f.read().translate(None, string.whitespace)
          assert_data = assert_f.read().translate(None, string.whitespace)
      if our_data != assert_data:
        os.unlink(out)
        def dump(filename):
          sys.stderr.write('%s\n-----\n' % filename)
          with open(filename, 'rb') as f:
            sys.stderr.write(f.read() + '\n-----\n')
        dump(intermediate_manifest)
        dump(our_manifest)
        dump(assert_manifest)
        sys.stderr.write(
            'Linker generated manifest "%s" added to final manifest "%s" '
            '(result in "%s"). '
            'Were /MANIFEST switches used in #pragma statements? ' % (
              intermediate_manifest, our_manifest, assert_manifest))
        return 1