Here are the examples of the python api sys.dont_write_bytecode taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
65 Examples
5
Example 1
def test_dont_write_bytecode(self):
# Bytecode is not written if sys.dont_write_bytecode is true.
# Can assume it is false already thanks to the skipIf class decorator.
try:
sys.dont_write_bytecode = True
self.loader.bytecode_path = "<does not exist>"
code_object = self.loader.get_code(self.name)
self.assertNotIn(self.cached, self.loader.written)
finally:
sys.dont_write_bytecode = False
5
Example 2
Project: ironpython3 Source File: test_install_lib.py
def test_dont_write_bytecode(self):
# makes sure byte_compile is not used
dist = self.create_dist()[1]
cmd = install_lib(dist)
cmd.compile = 1
cmd.optimize = 1
old_dont_write_bytecode = sys.dont_write_bytecode
sys.dont_write_bytecode = True
try:
cmd.byte_compile([])
finally:
sys.dont_write_bytecode = old_dont_write_bytecode
self.assertIn('byte-compiling is disabled', self.logs[0][1])
5
Example 3
def test_dont_write_bytecode(self):
# makes sure byte_compile raise a DistutilsError
# if sys.dont_write_bytecode is True
old_dont_write_bytecode = sys.dont_write_bytecode
sys.dont_write_bytecode = True
try:
self.assertRaises(DistutilsByteCompileError, byte_compile, [])
finally:
sys.dont_write_bytecode = old_dont_write_bytecode
5
Example 4
Project: datafari Source File: test_install_lib.py
def test_dont_write_bytecode(self):
# makes sure byte_compile is not used
pkg_dir, dist = self.create_dist()
cmd = install_lib(dist)
cmd.compile = 1
cmd.optimize = 1
old_dont_write_bytecode = sys.dont_write_bytecode
sys.dont_write_bytecode = True
try:
cmd.byte_compile([])
finally:
sys.dont_write_bytecode = old_dont_write_bytecode
self.assertIn('byte-compiling is disabled', self.logs[0][1])
5
Example 5
Project: imagrium Source File: test_build_py.py
def test_dont_write_bytecode(self):
# makes sure byte_compile is not used
pkg_dir, dist = self.create_dist()
cmd = build_py(dist)
cmd.compile = 1
cmd.optimize = 1
old_dont_write_bytecode = sys.dont_write_bytecode
sys.dont_write_bytecode = True
try:
cmd.byte_compile([])
finally:
sys.dont_write_bytecode = old_dont_write_bytecode
self.assertTrue('byte-compiling is disabled' in self.logs[0][1])
5
Example 6
Project: TrustRouter Source File: util.py
def writes_bytecode_files(fxn):
"""Decorator to protect sys.dont_write_bytecode from mutation and to skip
tests that require it to be set to False."""
if sys.dont_write_bytecode:
return lambda *args, **kwargs: None
@functools.wraps(fxn)
def wrapper(*args, **kwargs):
original = sys.dont_write_bytecode
sys.dont_write_bytecode = False
try:
to_return = fxn(*args, **kwargs)
finally:
sys.dont_write_bytecode = original
return to_return
return wrapper
5
Example 7
Project: pymo Source File: test_install_lib.py
def test_dont_write_bytecode(self):
# makes sure byte_compile is not used
pkg_dir, dist = self.create_dist()
cmd = install_lib(dist)
cmd.compile = 1
cmd.optimize = 1
old_dont_write_bytecode = sys.dont_write_bytecode
sys.dont_write_bytecode = True
try:
cmd.byte_compile([])
finally:
sys.dont_write_bytecode = old_dont_write_bytecode
self.assertTrue('byte-compiling is disabled' in self.logs[0][1])
5
Example 8
Project: ironpython3 Source File: test_build_py.py
def test_dont_write_bytecode(self):
# makes sure byte_compile is not used
dist = self.create_dist()[1]
cmd = build_py(dist)
cmd.compile = 1
cmd.optimize = 1
old_dont_write_bytecode = sys.dont_write_bytecode
sys.dont_write_bytecode = True
try:
cmd.byte_compile([])
finally:
sys.dont_write_bytecode = old_dont_write_bytecode
self.assertIn('byte-compiling is disabled', self.logs[0][1])
5
Example 9
Project: pymo Source File: test_build_py.py
def test_dont_write_bytecode(self):
# makes sure byte_compile is not used
pkg_dir, dist = self.create_dist()
cmd = build_py(dist)
cmd.compile = 1
cmd.optimize = 1
old_dont_write_bytecode = sys.dont_write_bytecode
sys.dont_write_bytecode = True
try:
cmd.byte_compile([])
finally:
sys.dont_write_bytecode = old_dont_write_bytecode
self.assertIn('byte-compiling is disabled', self.logs[0][1])
3
Example 10
@unittest.skipUnless(os.name == 'posix',
"test meaningful only on posix systems")
@unittest.skipIf(sys.dont_write_bytecode,
"test meaningful only when writing bytecode")
def test_execute_bit_not_copied(self):
# Issue 6070: under posix .pyc files got their execute bit set if
# the .py file had the execute bit set, but they aren't executable.
oldmask = os.umask(022)
3
Example 11
def test_directory_compiled(self):
with temp_dir() as script_dir:
mod_name = '__main__'
script_name = self._make_test_script(script_dir, mod_name)
compiled_name = py_compile.compile(script_name, doraise=True)
os.remove(script_name)
if not sys.dont_write_bytecode:
legacy_pyc = make_legacy_pyc(script_name)
self._check_script(script_dir, "<run_path>", legacy_pyc,
script_dir)
3
Example 12
Project: datafari Source File: test_import.py
@unittest.skipIf(sys.dont_write_bytecode,
"test meaningful only when writing bytecode")
def test_rewrite_pyc_with_read_only_source(self):
# Issue 6074: a long time ago on posix, and more recently on Windows,
# a read only source file resulted in a read only pyc file, which
# led to problems with updating it later
sys.path.insert(0, os.curdir)
fname = TESTFN + os.extsep + "py"
try:
# Write a Python file, make it read-only and import it
with open(fname, 'w') as f:
f.write("x = 'original'\n")
# Tweak the mtime of the source to ensure pyc gets updated later
s = os.stat(fname)
os.utime(fname, (s.st_atime, s.st_mtime-100000000))
os.chmod(fname, 0400)
3
Example 13
@source_util.writes_bytecode_files
def run_test(self, dont_write_bytecode):
name = 'mod'
mock = PyPycLoaderMock({name: os.path.join('path', 'to', 'mod')})
sys.dont_write_bytecode = dont_write_bytecode
with util.uncache(name):
mock.load_module(name)
self.assertIsNot(name in mock.module_bytecode, dont_write_bytecode)
3
Example 14
Project: PySide Source File: pyside_reload_test.py
def increment_module_value():
modfile = open(dst, 'a')
modfile.write('Sentinel.value += 1' + os.linesep)
modfile.flush()
modfile.close()
if not sys.dont_write_bytecode:
if py3k.IS_PY3K:
import imp
cacheFile = imp.cache_from_source(dst)
else:
cacheFile = dst + 'c'
os.remove(cacheFile)
3
Example 15
Project: ironpython3 Source File: test_build_py.py
@unittest.skipIf(sys.dont_write_bytecode, 'byte-compile disabled')
def test_byte_compile_optimized(self):
project_dir, dist = self.create_dist(py_modules=['boiledeggs'])
os.chdir(project_dir)
self.write_file('boiledeggs.py', 'import antigravity')
cmd = build_py(dist)
cmd.compile = 0
cmd.optimize = 1
cmd.build_lib = 'here'
cmd.finalize_options()
cmd.run()
found = os.listdir(cmd.build_lib)
self.assertEqual(sorted(found), ['__pycache__', 'boiledeggs.py'])
found = os.listdir(os.path.join(cmd.build_lib, '__pycache__'))
self.assertEqual(sorted(found),
['boiledeggs.%s.pyo' % sys.implementation.cache_tag])
3
Example 16
Project: ironpython3 Source File: test_install_lib.py
@unittest.skipIf(sys.dont_write_bytecode, 'byte-compile disabled')
def test_byte_compile(self):
project_dir, dist = self.create_dist()
os.chdir(project_dir)
cmd = install_lib(dist)
cmd.compile = cmd.optimize = 1
f = os.path.join(project_dir, 'foo.py')
self.write_file(f, '# python file')
cmd.byte_compile([f])
pyc_file = importlib.util.cache_from_source('foo.py',
debug_override=True)
pyo_file = importlib.util.cache_from_source('foo.py',
debug_override=False)
self.assertTrue(os.path.exists(pyc_file))
self.assertTrue(os.path.exists(pyo_file))
3
Example 17
Project: ironpython3 Source File: test_build_py.py
@unittest.skipIf(sys.dont_write_bytecode, 'byte-compile disabled')
def test_byte_compile(self):
project_dir, dist = self.create_dist(py_modules=['boiledeggs'])
os.chdir(project_dir)
self.write_file('boiledeggs.py', 'import antigravity')
cmd = build_py(dist)
cmd.compile = 1
cmd.build_lib = 'here'
cmd.finalize_options()
cmd.run()
found = os.listdir(cmd.build_lib)
self.assertEqual(sorted(found), ['__pycache__', 'boiledeggs.py'])
found = os.listdir(os.path.join(cmd.build_lib, '__pycache__'))
self.assertEqual(found,
['boiledeggs.%s.pyc' % sys.implementation.cache_tag])
3
Example 18
Project: pymel Source File: __init__.py
def _deleteOldBatchGuiPyc():
import sys
if __file__.lower().endswith('.py') and not sys.dont_write_bytecode:
import os.path
folder = os.path.dirname(__file__)
for f in ('batch.pyc', 'gui.pyc'):
path = os.path.join(folder, f)
if os.path.isfile(path):
try:
os.remove(path)
except Exception:
pass
3
Example 19
@classmethod
def init_classes(cls, classpath):
sys.dont_write_bytecode = True
for p in [p for p in reversed(classpath) if os.path.exists(p) and os.path.isdir(p)]:
for module, path in [(item, p) for item in os.listdir(p) if item[-3:] == ".py" and item.startswith('datasource_')]:
try:
#print "try ds", module, path
path = os.path.abspath(path)
fp, filename, data = imp.find_module(module.replace('.py', ''), [path])
toplevel = imp.load_source(module.replace(".py", ""), filename)
for cl in inspect.getmembers(toplevel, inspect.isfunction):
if cl[0] == "__ds_ident__":
cls.class_factory.append([path, module, cl[1]])
except Exception, exp:
logger.critical("could not load datasource %s from %s: %s" % (module, path, exp))
finally:
if fp:
fp.close()
3
Example 20
@classmethod
def init_classes(cls, classpath):
sys.dont_write_bytecode = True
for p in [p for p in reversed(classpath) if os.path.exists(p) and os.path.isdir(p)]:
for module, path in [(item, p) for item in os.listdir(p) if item[-3:] == ".py" and item.startswith('detail_')]:
try:
path = os.path.abspath(path)
fp, filename, data = imp.find_module(module.replace('.py', ''), [path])
toplevel = imp.load_source(module.replace(".py", ""), filename)
for cl in inspect.getmembers(toplevel, inspect.isfunction):
if cl[0] == "__detail_ident__":
cls.class_factory.append([path, module, cl[1]])
except Exception, e:
print e
finally:
if fp:
fp.close()
3
Example 21
@classmethod
def init_classes(cls, classpath):
sys.dont_write_bytecode = True
for p in [p for p in reversed(classpath) if os.path.exists(p) and os.path.isdir(p)]:
for module, path in [(item, p) for item in os.listdir(p) if item[-3:] == ".py" and item.startswith('datarecipient_')]:
try:
#print "try dr", module, path
path = os.path.abspath(path)
fp, filename, data = imp.find_module(module.replace('.py', ''), [path])
toplevel = imp.load_source(module.replace(".py", ""), filename)
for cl in inspect.getmembers(toplevel, inspect.isfunction):
if cl[0] == "__ds_ident__":
cls.class_factory.append([path, module, cl[1]])
except Exception, exp:
logger.critical("could not load datarecipient %s from %s: %s" % (module, path, exp))
finally:
if fp:
fp.close()
3
Example 22
@classmethod
def init_classes(cls, classpath):
sys.dont_write_bytecode = True
for p in [p for p in reversed(classpath) if os.path.exists(p) and os.path.isdir(p)]:
for module, path in [(item, p) for item in os.listdir(p) if item[-3:] == ".py" and (item.startswith('app_') or item.startswith('os_'))]:
try:
path = os.path.abspath(path)
fp, filename, data = imp.find_module(module.replace('.py', ''), [path])
toplevel = imp.load_source(module.replace(".py", ""), filename)
for cl in inspect.getmembers(toplevel, inspect.isfunction):
if cl[0] == "__mi_ident__":
cls.class_factory.append([path, module, cl[1]])
except Exception, e:
print e
finally:
if fp:
fp.close()
3
Example 23
Project: TrustRouter Source File: test_install_lib.py
@unittest.skipUnless(not sys.dont_write_bytecode,
'byte-compile not supported')
def test_byte_compile(self):
pkg_dir, dist = self.create_dist()
cmd = install_lib(dist)
cmd.compile = cmd.optimize = 1
f = os.path.join(pkg_dir, 'foo.py')
self.write_file(f, '# python file')
cmd.byte_compile([f])
self.assertTrue(os.path.exists(os.path.join(pkg_dir, 'foo.pyc')))
self.assertTrue(os.path.exists(os.path.join(pkg_dir, 'foo.pyo')))
3
Example 24
Project: TrustRouter Source File: benchmark.py
def source_wo_bytecode(seconds, repeat):
"""Source w/o bytecode: simple"""
sys.dont_write_bytecode = True
try:
name = '__importlib_test_benchmark__'
# Clears out sys.modules and puts an entry at the front of sys.path.
with source_util.create_modules(name) as mapping:
assert not os.path.exists(imp.cache_from_source(mapping[name]))
for result in bench(name, lambda: sys.modules.pop(name), repeat=repeat,
seconds=seconds):
yield result
finally:
sys.dont_write_bytecode = False
3
Example 25
Project: TrustRouter Source File: build_py.py
def byte_compile(self, files):
if sys.dont_write_bytecode:
self.warn('byte-compiling is disabled, skipping.')
return
from distutils.util import byte_compile
prefix = self.build_lib
if prefix[-1] != os.sep:
prefix = prefix + os.sep
# XXX this code is essentially the same as the 'byte_compile()
# method of the "install_lib" command, except for the determination
# of the 'prefix' string. Hmmm.
if self.compile:
byte_compile(files, optimize=0,
force=self.force, prefix=prefix, dry_run=self.dry_run)
if self.optimize > 0:
byte_compile(files, optimize=self.optimize,
force=self.force, prefix=prefix, dry_run=self.dry_run)
3
Example 26
def setUp(self):
self.modules = {}
# Save the existing system bytecode setting so that it can
# be restored later. We need to disable bytecode writing
# for our module-(re)writing tests.
self._dont_write_bytecode = sys.dont_write_bytecode
sys.dont_write_bytecode = True
3
Example 27
@unittest.skipIf(sys.dont_write_bytecode, 'byte-compile not enabled')
def test_byte_compile(self):
pkg_dir = self._setup_byte_compile()
if sys.flags.optimize < 1:
self.assertTrue(os.path.exists(os.path.join(pkg_dir, 'foo.pyc')))
else:
self.assertTrue(os.path.exists(os.path.join(pkg_dir, 'foo.pyo')))
2
Example 28
Project: staticsite Source File: core.py
def load(self, pathname):
"""
Load settings from a python file, importing only uppercase symbols
"""
orig_dwb = sys.dont_write_bytecode
try:
sys.dont_write_bytecode = True
# http://stackoverflow.com/questions/67631/how-to-import-a-module-given-the-full-path
# Seriously, this should not happen in the standard library. You do not
# break stable APIs. You can extend them but not break them. And
# especially, you do not break stable APIs and then complain that people
# stick to 2.7 until its death, and probably after.
if sys.version_info >= (3, 5):
import importlib.util
spec = importlib.util.spec_from_file_location("staticsite.settings", pathname)
user_settings = importlib.util.module_from_spec(spec)
spec.loader.exec_module(user_settings)
else:
from importlib.machinery import SourceFileLoader
user_settings = SourceFileLoader("staticsite.settings", pathname).load_module()
finally:
sys.dont_write_bytecode = orig_dwb
self.add_module(user_settings)
2
Example 29
Project: autotest Source File: file_module_loader_unittest.py
@staticmethod
def test_load_module_from_file():
tmp_fd, tmp_path = mkstemp()
try:
tmpfile = os.fdopen(tmp_fd, "w")
try:
tmpfile.write("""
import sys
some_value = 'some_value'
print sys.dont_write_bytecode
bytecode_val = sys.dont_write_bytecode
""")
tmpfile.flush()
tmpfile.seek(0)
finally:
tmpfile.close()
assert not sys.dont_write_bytecode
new_module = file_module_loader.load_module_from_file(tmp_path)
assert new_module.some_value == 'some_value'
assert new_module.bytecode_val
assert not sys.dont_write_bytecode
finally:
os.remove(tmp_path)
0
Example 30
Project: brython Source File: abc.py
def get_code(self, fullname):
"""Get a code object from source or bytecode."""
warnings.warn("importlib.abc.PyPycLoader is deprecated and slated for "
"removal in Python 3.4; use SourceLoader instead. "
"If Python 3.1 compatibility is required, see the "
"latest docuementation for PyLoader.",
DeprecationWarning)
source_timestamp = self.source_mtime(fullname)
# Try to use bytecode if it is available.
bytecode_path = self.bytecode_path(fullname)
if bytecode_path:
data = self.get_data(bytecode_path)
try:
magic = data[:4]
if len(magic) < 4:
raise ImportError(
"bad magic number in {}".format(fullname),
name=fullname, path=bytecode_path)
raw_timestamp = data[4:8]
if len(raw_timestamp) < 4:
raise EOFError("bad timestamp in {}".format(fullname))
pyc_timestamp = _bootstrap._r_long(raw_timestamp)
raw_source_size = data[8:12]
if len(raw_source_size) != 4:
raise EOFError("bad file size in {}".format(fullname))
# Source size is unused as the ABC does not provide a way to
# get the size of the source ahead of reading it.
bytecode = data[12:]
# Verify that the magic number is valid.
if imp.get_magic() != magic:
raise ImportError(
"bad magic number in {}".format(fullname),
name=fullname, path=bytecode_path)
# Verify that the bytecode is not stale (only matters when
# there is source to fall back on.
if source_timestamp:
if pyc_timestamp < source_timestamp:
raise ImportError("bytecode is stale", name=fullname,
path=bytecode_path)
except (ImportError, EOFError):
# If source is available give it a shot.
if source_timestamp is not None:
pass
else:
raise
else:
# Bytecode seems fine, so try to use it.
return marshal.loads(bytecode)
elif source_timestamp is None:
raise ImportError("no source or bytecode available to create code "
"object for {0!r}".format(fullname),
name=fullname)
# Use the source.
source_path = self.source_path(fullname)
if source_path is None:
message = "a source path must exist to load {0}".format(fullname)
raise ImportError(message, name=fullname)
source = self.get_data(source_path)
code_object = compile(source, source_path, 'exec', dont_inherit=True)
# Generate bytecode and write it out.
if not sys.dont_write_bytecode:
data = bytearray(imp.get_magic())
data.extend(_bootstrap._w_long(source_timestamp))
data.extend(_bootstrap._w_long(len(source) & 0xFFFFFFFF))
data.extend(marshal.dumps(code_object))
self.write_bytecode(fullname, data)
return code_object
0
Example 31
Project: autotest Source File: autotest_firewalld_add_service_unittest.py
Function: load_module_no_bytecode
Function: load_module_no_bytecode
@preserve_value(sys, 'dont_write_bytecode')
def _load_module_no_bytecode(filename, module_file, module_file_path,
py_source_description):
"""
Helper function to load a module while setting sys.dont_write_bytecode
to prevent bytecode files from being generated.
For example, if the module name is 'foo', then python will write 'fooc'
as the bytecode. This is not desirable.
:type filename: str
:type module_file: open
:type module_file_path: str
:type py_source_description: tuple
:return: imported module
:rtype: module
"""
sys.dont_write_bytecode = 1
new_module = imp.load_module(
os.path.splitext(filename)[0].replace("-", "_"),
module_file, module_file_path, py_source_description)
return new_module
0
Example 32
def _check_module(self, depth, alter_sys=False):
pkg_dir, mod_fname, mod_name = (
self._make_pkg(example_source, depth))
forget(mod_name)
expected_ns = example_namespace.copy()
expected_ns.update({
"__name__": mod_name,
"__file__": mod_fname,
"__package__": mod_name.rpartition(".")[0],
})
if alter_sys:
expected_ns.update({
"run_argv0": mod_fname,
"run_name_in_sys_modules": True,
"module_in_sys_modules": True,
})
def create_ns(init_globals):
return run_module(mod_name, init_globals, alter_sys=alter_sys)
try:
if verbose > 1: print("Running from source:", mod_name)
self.check_code_execution(create_ns, expected_ns)
importlib.invalidate_caches()
__import__(mod_name)
os.remove(mod_fname)
if not sys.dont_write_bytecode:
make_legacy_pyc(mod_fname)
unload(mod_name) # In case loader caches paths
importlib.invalidate_caches()
if verbose > 1: print("Running from compiled:", mod_name)
self._fix_ns_for_legacy_pyc(expected_ns, alter_sys)
self.check_code_execution(create_ns, expected_ns)
finally:
self._del_pkg(pkg_dir, depth, mod_name)
if verbose > 1: print("Module executed successfully")
0
Example 33
def _check_package(self, depth, alter_sys=False):
pkg_dir, mod_fname, mod_name = (
self._make_pkg(example_source, depth, "__main__"))
pkg_name = mod_name.rpartition(".")[0]
forget(mod_name)
expected_ns = example_namespace.copy()
expected_ns.update({
"__name__": mod_name,
"__file__": mod_fname,
"__package__": pkg_name,
})
if alter_sys:
expected_ns.update({
"run_argv0": mod_fname,
"run_name_in_sys_modules": True,
"module_in_sys_modules": True,
})
def create_ns(init_globals):
return run_module(pkg_name, init_globals, alter_sys=alter_sys)
try:
if verbose > 1: print("Running from source:", pkg_name)
self.check_code_execution(create_ns, expected_ns)
importlib.invalidate_caches()
__import__(mod_name)
os.remove(mod_fname)
if not sys.dont_write_bytecode:
make_legacy_pyc(mod_fname)
unload(mod_name) # In case loader caches paths
if verbose > 1: print("Running from compiled:", pkg_name)
importlib.invalidate_caches()
self._fix_ns_for_legacy_pyc(expected_ns, alter_sys)
self.check_code_execution(create_ns, expected_ns)
finally:
self._del_pkg(pkg_dir, depth, pkg_name)
if verbose > 1: print("Package executed successfully")
0
Example 34
Project: ochonetes Source File: main.py
def go():
"""
Entry point for the ochopod CLI tool-suite. This script will look for python modules in the /tools
sub-directory.
"""
#
# - start by simplifying a bit the console logger to look more CLI-ish
#
for handler in logger.handlers:
handler.setFormatter(logging.Formatter('%(message)s'))
try:
def _import(where, funcs):
try:
for script in [f for f in listdir(where) if isfile(join(where, f)) and f.endswith('.py')]:
try:
module = imp.load_source(script[:-3], join(where, script))
if hasattr(module, 'go') and callable(module.go):
tool = module.go()
assert isinstance(tool, Template), 'boo'
assert tool.tag, ''
funcs[tool.tag] = tool
except Exception as failure:
logger.warning('failed to import %s (%s)' % (script, diagnostic(failure)))
except OSError:
pass
#
# - disable .pyc generation
# - scan for tools to import
# - each .py module must have a go() callable as well as a COMMAND attribute
# - the COMMAND attribute tells us what the command-line invocation looks like
#
tools = {}
sys.dont_write_bytecode = True
_import('%s/commands' % dirname(__file__), tools)
def _usage():
return 'available commands -> %s' % ', '.join(sorted(tools.keys()))
parser = argparse.ArgumentParser(description='', prefix_chars='+', usage=_usage())
parser.add_argument('command', type=str, help='command (e.g ls for instance)')
parser.add_argument('extra', metavar='extra arguments', type=str, nargs='*', help='zero or more arguments')
args = parser.parse_args()
total = [args.command] + args.extra
if args.command == 'help':
logger.info(_usage())
exit(0)
def _sub(sub):
for i in range(len(total)-len(sub)+1):
if sub == total[i:i+len(sub)]:
return 1
return 0
matched = [tool for tool in tools.keys() if _sub(tool.split(' '))]
if not matched:
logger.info('unknown command (%s)' % _usage())
elif len(matched) > 1:
logger.info('more than one command were matched (%s)' % _usage())
else:
#
# - simply invoke the tool
# - remove the command tokens first and pass the rest as arguments
# - each tool will parse its own commandline
#
picked = matched[0]
tokens = len(picked.split(' ')) - 1
exit(tools[picked].run(args.extra[tokens:]))
except AssertionError as failure:
logger.error('shutting down <- %s' % failure)
except Exception as failure:
logger.error('shutting down <- %s' % diagnostic(failure))
exit(1)
0
Example 35
Project: PyClassLessons Source File: wheel.py
def install(self, paths, maker, **kwargs):
"""
Install a wheel to the specified paths. If kwarg ``warner`` is
specified, it should be a callable, which will be called with two
tuples indicating the wheel version of this software and the wheel
version in the file, if there is a discrepancy in the versions.
This can be used to issue any warnings to raise any exceptions.
If kwarg ``lib_only`` is True, only the purelib/platlib files are
installed, and the headers, scripts, data and dist-info metadata are
not written.
The return value is a :class:`InstalledDistribution` instance unless
``options.lib_only`` is True, in which case the return value is ``None``.
"""
dry_run = maker.dry_run
warner = kwargs.get('warner')
lib_only = kwargs.get('lib_only', False)
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
data_dir = '%s.data' % name_ver
info_dir = '%s.dist-info' % name_ver
metadata_name = posixpath.join(info_dir, METADATA_FILENAME)
wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
record_name = posixpath.join(info_dir, 'RECORD')
wrapper = codecs.getreader('utf-8')
with ZipFile(pathname, 'r') as zf:
with zf.open(wheel_metadata_name) as bwf:
wf = wrapper(bwf)
message = message_from_file(wf)
wv = message['Wheel-Version'].split('.', 1)
file_version = tuple([int(i) for i in wv])
if (file_version != self.wheel_version) and warner:
warner(self.wheel_version, file_version)
if message['Root-Is-Purelib'] == 'true':
libdir = paths['purelib']
else:
libdir = paths['platlib']
records = {}
with zf.open(record_name) as bf:
with CSVReader(stream=bf) as reader:
for row in reader:
p = row[0]
records[p] = row
data_pfx = posixpath.join(data_dir, '')
info_pfx = posixpath.join(info_dir, '')
script_pfx = posixpath.join(data_dir, 'scripts', '')
# make a new instance rather than a copy of maker's,
# as we mutate it
fileop = FileOperator(dry_run=dry_run)
fileop.record = True # so we can rollback if needed
bc = not sys.dont_write_bytecode # Double negatives. Lovely!
outfiles = [] # for RECORD writing
# for script copying/shebang processing
workdir = tempfile.mkdtemp()
# set target dir later
# we default add_launchers to False, as the
# Python Launcher should be used instead
maker.source_dir = workdir
maker.target_dir = None
try:
for zinfo in zf.infolist():
arcname = zinfo.filename
if isinstance(arcname, text_type):
u_arcname = arcname
else:
u_arcname = arcname.decode('utf-8')
# The signature file won't be in RECORD,
# and we don't currently don't do anything with it
if u_arcname.endswith('/RECORD.jws'):
continue
row = records[u_arcname]
if row[2] and str(zinfo.file_size) != row[2]:
raise DistlibException('size mismatch for '
'%s' % u_arcname)
if row[1]:
kind, value = row[1].split('=', 1)
with zf.open(arcname) as bf:
data = bf.read()
_, digest = self.get_hash(data, kind)
if digest != value:
raise DistlibException('digest mismatch for '
'%s' % arcname)
if lib_only and u_arcname.startswith((info_pfx, data_pfx)):
logger.debug('lib_only: skipping %s', u_arcname)
continue
is_script = (u_arcname.startswith(script_pfx)
and not u_arcname.endswith('.exe'))
if u_arcname.startswith(data_pfx):
_, where, rp = u_arcname.split('/', 2)
outfile = os.path.join(paths[where], convert_path(rp))
else:
# meant for site-packages.
if u_arcname in (wheel_metadata_name, record_name):
continue
outfile = os.path.join(libdir, convert_path(u_arcname))
if not is_script:
with zf.open(arcname) as bf:
fileop.copy_stream(bf, outfile)
outfiles.append(outfile)
# Double check the digest of the written file
if not dry_run and row[1]:
with open(outfile, 'rb') as bf:
data = bf.read()
_, newdigest = self.get_hash(data, kind)
if newdigest != digest:
raise DistlibException('digest mismatch '
'on write for '
'%s' % outfile)
if bc and outfile.endswith('.py'):
try:
pyc = fileop.byte_compile(outfile)
outfiles.append(pyc)
except Exception:
# Don't give up if byte-compilation fails,
# but log it and perhaps warn the user
logger.warning('Byte-compilation failed',
exc_info=True)
else:
fn = os.path.basename(convert_path(arcname))
workname = os.path.join(workdir, fn)
with zf.open(arcname) as bf:
fileop.copy_stream(bf, workname)
dn, fn = os.path.split(outfile)
maker.target_dir = dn
filenames = maker.make(fn)
fileop.set_executable_mode(filenames)
outfiles.extend(filenames)
if lib_only:
logger.debug('lib_only: returning None')
dist = None
else:
# Generate scripts
# Try to get pydist.json so we can see if there are
# any commands to generate. If this fails (e.g. because
# of a legacy wheel), log a warning but don't give up.
commands = None
file_version = self.info['Wheel-Version']
if file_version == '1.0':
# Use legacy info
ep = posixpath.join(info_dir, 'entry_points.txt')
try:
with zf.open(ep) as bwf:
epdata = read_exports(bwf)
commands = {}
for key in ('console', 'gui'):
k = '%s_scripts' % key
if k in epdata:
commands['wrap_%s' % key] = d = {}
for v in epdata[k].values():
s = '%s:%s' % (v.prefix, v.suffix)
if v.flags:
s += ' %s' % v.flags
d[v.name] = s
except Exception:
logger.warning('Unable to read legacy script '
'metadata, so cannot generate '
'scripts')
else:
try:
with zf.open(metadata_name) as bwf:
wf = wrapper(bwf)
commands = json.load(wf).get('commands')
except Exception:
logger.warning('Unable to read JSON metadata, so '
'cannot generate scripts')
if commands:
console_scripts = commands.get('wrap_console', {})
gui_scripts = commands.get('wrap_gui', {})
if console_scripts or gui_scripts:
script_dir = paths.get('scripts', '')
if not os.path.isdir(script_dir):
raise ValueError('Valid script path not '
'specified')
maker.target_dir = script_dir
for k, v in console_scripts.items():
script = '%s = %s' % (k, v)
filenames = maker.make(script)
fileop.set_executable_mode(filenames)
if gui_scripts:
options = {'gui': True }
for k, v in gui_scripts.items():
script = '%s = %s' % (k, v)
filenames = maker.make(script, options)
fileop.set_executable_mode(filenames)
p = os.path.join(libdir, info_dir)
dist = InstalledDistribution(p)
# Write SHARED
paths = dict(paths) # don't change passed in dict
del paths['purelib']
del paths['platlib']
paths['lib'] = libdir
p = dist.write_shared_locations(paths, dry_run)
if p:
outfiles.append(p)
# Write RECORD
dist.write_installed_files(outfiles, paths['prefix'],
dry_run)
return dist
except Exception: # pragma: no cover
logger.exception('installation failed.')
fileop.rollback()
raise
finally:
shutil.rmtree(workdir)
0
Example 36
Project: pip Source File: wheel.py
def install(self, paths, maker, **kwargs):
"""
Install a wheel to the specified paths. If kwarg ``warner`` is
specified, it should be a callable, which will be called with two
tuples indicating the wheel version of this software and the wheel
version in the file, if there is a discrepancy in the versions.
This can be used to issue any warnings to raise any exceptions.
If kwarg ``lib_only`` is True, only the purelib/platlib files are
installed, and the headers, scripts, data and dist-info metadata are
not written.
The return value is a :class:`InstalledDistribution` instance unless
``options.lib_only`` is True, in which case the return value is ``None``.
"""
dry_run = maker.dry_run
warner = kwargs.get('warner')
lib_only = kwargs.get('lib_only', False)
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
data_dir = '%s.data' % name_ver
info_dir = '%s.dist-info' % name_ver
metadata_name = posixpath.join(info_dir, METADATA_FILENAME)
wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
record_name = posixpath.join(info_dir, 'RECORD')
wrapper = codecs.getreader('utf-8')
with ZipFile(pathname, 'r') as zf:
with zf.open(wheel_metadata_name) as bwf:
wf = wrapper(bwf)
message = message_from_file(wf)
wv = message['Wheel-Version'].split('.', 1)
file_version = tuple([int(i) for i in wv])
if (file_version != self.wheel_version) and warner:
warner(self.wheel_version, file_version)
if message['Root-Is-Purelib'] == 'true':
libdir = paths['purelib']
else:
libdir = paths['platlib']
records = {}
with zf.open(record_name) as bf:
with CSVReader(stream=bf) as reader:
for row in reader:
p = row[0]
records[p] = row
data_pfx = posixpath.join(data_dir, '')
info_pfx = posixpath.join(info_dir, '')
script_pfx = posixpath.join(data_dir, 'scripts', '')
# make a new instance rather than a copy of maker's,
# as we mutate it
fileop = FileOperator(dry_run=dry_run)
fileop.record = True # so we can rollback if needed
bc = not sys.dont_write_bytecode # Double negatives. Lovely!
outfiles = [] # for RECORD writing
# for script copying/shebang processing
workdir = tempfile.mkdtemp()
# set target dir later
# we default add_launchers to False, as the
# Python Launcher should be used instead
maker.source_dir = workdir
maker.target_dir = None
try:
for zinfo in zf.infolist():
arcname = zinfo.filename
if isinstance(arcname, text_type):
u_arcname = arcname
else:
u_arcname = arcname.decode('utf-8')
# The signature file won't be in RECORD,
# and we don't currently don't do anything with it
if u_arcname.endswith('/RECORD.jws'):
continue
row = records[u_arcname]
if row[2] and str(zinfo.file_size) != row[2]:
raise DistlibException('size mismatch for '
'%s' % u_arcname)
if row[1]:
kind, value = row[1].split('=', 1)
with zf.open(arcname) as bf:
data = bf.read()
_, digest = self.get_hash(data, kind)
if digest != value:
raise DistlibException('digest mismatch for '
'%s' % arcname)
if lib_only and u_arcname.startswith((info_pfx, data_pfx)):
logger.debug('lib_only: skipping %s', u_arcname)
continue
is_script = (u_arcname.startswith(script_pfx)
and not u_arcname.endswith('.exe'))
if u_arcname.startswith(data_pfx):
_, where, rp = u_arcname.split('/', 2)
outfile = os.path.join(paths[where], convert_path(rp))
else:
# meant for site-packages.
if u_arcname in (wheel_metadata_name, record_name):
continue
outfile = os.path.join(libdir, convert_path(u_arcname))
if not is_script:
with zf.open(arcname) as bf:
fileop.copy_stream(bf, outfile)
outfiles.append(outfile)
# Double check the digest of the written file
if not dry_run and row[1]:
with open(outfile, 'rb') as bf:
data = bf.read()
_, newdigest = self.get_hash(data, kind)
if newdigest != digest:
raise DistlibException('digest mismatch '
'on write for '
'%s' % outfile)
if bc and outfile.endswith('.py'):
try:
pyc = fileop.byte_compile(outfile)
outfiles.append(pyc)
except Exception:
# Don't give up if byte-compilation fails,
# but log it and perhaps warn the user
logger.warning('Byte-compilation failed',
exc_info=True)
else:
fn = os.path.basename(convert_path(arcname))
workname = os.path.join(workdir, fn)
with zf.open(arcname) as bf:
fileop.copy_stream(bf, workname)
dn, fn = os.path.split(outfile)
maker.target_dir = dn
filenames = maker.make(fn)
fileop.set_executable_mode(filenames)
outfiles.extend(filenames)
if lib_only:
logger.debug('lib_only: returning None')
dist = None
else:
# Generate scripts
# Try to get pydist.json so we can see if there are
# any commands to generate. If this fails (e.g. because
# of a legacy wheel), log a warning but don't give up.
commands = None
file_version = self.info['Wheel-Version']
if file_version == '1.0':
# Use legacy info
ep = posixpath.join(info_dir, 'entry_points.txt')
try:
with zf.open(ep) as bwf:
epdata = read_exports(bwf)
commands = {}
for key in ('console', 'gui'):
k = '%s_scripts' % key
if k in epdata:
commands['wrap_%s' % key] = d = {}
for v in epdata[k].values():
s = '%s:%s' % (v.prefix, v.suffix)
if v.flags:
s += ' %s' % v.flags
d[v.name] = s
except Exception:
logger.warning('Unable to read legacy script '
'metadata, so cannot generate '
'scripts')
else:
try:
with zf.open(metadata_name) as bwf:
wf = wrapper(bwf)
commands = json.load(wf).get('extensions')
if commands:
commands = commands.get('python.commands')
except Exception:
logger.warning('Unable to read JSON metadata, so '
'cannot generate scripts')
if commands:
console_scripts = commands.get('wrap_console', {})
gui_scripts = commands.get('wrap_gui', {})
if console_scripts or gui_scripts:
script_dir = paths.get('scripts', '')
if not os.path.isdir(script_dir):
raise ValueError('Valid script path not '
'specified')
maker.target_dir = script_dir
for k, v in console_scripts.items():
script = '%s = %s' % (k, v)
filenames = maker.make(script)
fileop.set_executable_mode(filenames)
if gui_scripts:
options = {'gui': True }
for k, v in gui_scripts.items():
script = '%s = %s' % (k, v)
filenames = maker.make(script, options)
fileop.set_executable_mode(filenames)
p = os.path.join(libdir, info_dir)
dist = InstalledDistribution(p)
# Write SHARED
paths = dict(paths) # don't change passed in dict
del paths['purelib']
del paths['platlib']
paths['lib'] = libdir
p = dist.write_shared_locations(paths, dry_run)
if p:
outfiles.append(p)
# Write RECORD
dist.write_installed_files(outfiles, paths['prefix'],
dry_run)
return dist
except Exception: # pragma: no cover
logger.exception('installation failed.')
fileop.rollback()
raise
finally:
shutil.rmtree(workdir)
0
Example 37
def _parse(self, argv):
debug("argv given to Program.run: {0!r}".format(argv))
self.normalize_argv(argv)
# Obtain core args (sets self.core)
self.parse_core_args()
debug("Finished parsing core args")
# Set interpreter bytecode-writing flag
sys.dont_write_bytecode = not self.args['write-pyc'].value
# Enable debugging from here on out, if debug flag was given.
# (Prior to this point, debugging requires setting INVOKE_DEBUG).
if self.args.debug.value:
enable_logging()
# Print version & exit if necessary
if self.args.version.value:
debug("Saw --version, printing version & exiting")
self.print_version()
raise Exit
# Load a collection of tasks unless one was already set.
if self.namespace is not None:
debug("Program was given a default namespace, skipping collection loading") # noqa
self.collection = self.namespace
else:
debug("No default namespace provided, trying to load one from disk") # noqa
# If no bundled namespace & --help was given, just print it and
# exit. (If we did have a bundled namespace, core --help will be
# handled *after* the collection is loaded & parsing is done.)
if self.args.help.value is True:
debug("No bundled namespace & bare --help given; printing help and exiting.") # noqa
self.print_help()
raise Exit
self.load_collection()
# Parse remainder into task contexts (sets
# self.parser/collection/tasks)
self.parse_tasks()
halp = self.args.help.value
# Core (no value given) --help output (only when bundled namespace)
if halp is True:
debug("Saw bare --help, printing help & exiting")
self.print_help()
raise Exit
# Print per-task help, if necessary
if halp:
if halp in self.parser.contexts:
msg = "Saw --help <taskname>, printing per-task help & exiting"
debug(msg)
self.print_task_help()
raise Exit
else:
# TODO: feels real dumb to factor this out of Parser, but...we
# should?
raise ParseError("No idea what '{0}' is!".format(halp))
# Print discovered tasks if necessary
if self.args.list.value:
self.list_tasks()
raise Exit
# Print completion helpers if necessary
if self.args.complete.value:
complete(self.core, self.initial_context, self.collection)
# Fallback behavior if no tasks were given & no default specified
# (mostly a subroutine for overriding purposes)
# NOTE: when there is a default task, Executor will select it when no
# tasks were found in CLI parsing.
if not self.tasks and not self.collection.default:
self.no_tasks_given()
0
Example 38
Project: pytest Source File: rewrite.py
def find_module(self, name, path=None):
state = self.config._assertstate
state.trace("find_module called for: %s" % name)
names = name.rsplit(".", 1)
lastname = names[-1]
pth = None
if path is not None:
# Starting with Python 3.3, path is a _NamespacePath(), which
# causes problems if not converted to list.
path = list(path)
if len(path) == 1:
pth = path[0]
if pth is None:
try:
fd, fn, desc = imp.find_module(lastname, path)
except ImportError:
return None
if fd is not None:
fd.close()
tp = desc[2]
if tp == imp.PY_COMPILED:
if hasattr(imp, "source_from_cache"):
fn = imp.source_from_cache(fn)
else:
fn = fn[:-1]
elif tp != imp.PY_SOURCE:
# Don't know what this is.
return None
else:
fn = os.path.join(pth, name.rpartition(".")[2] + ".py")
fn_pypath = py.path.local(fn)
if not self._should_rewrite(name, fn_pypath, state):
return None
self._rewritten_names.add(name)
# The requested module looks like a test file, so rewrite it. This is
# the most magical part of the process: load the source, rewrite the
# asserts, and load the rewritten source. We also cache the rewritten
# module code in a special pyc. We must be aware of the possibility of
# concurrent pytest processes rewriting and loading pycs. To avoid
# tricky race conditions, we maintain the following invariant: The
# cached pyc is always a complete, valid pyc. Operations on it must be
# atomic. POSIX's atomic rename comes in handy.
write = not sys.dont_write_bytecode
cache_dir = os.path.join(fn_pypath.dirname, "__pycache__")
if write:
try:
os.mkdir(cache_dir)
except OSError:
e = sys.exc_info()[1].errno
if e == errno.EEXIST:
# Either the __pycache__ directory already exists (the
# common case) or it's blocked by a non-dir node. In the
# latter case, we'll ignore it in _write_pyc.
pass
elif e in [errno.ENOENT, errno.ENOTDIR]:
# One of the path components was not a directory, likely
# because we're in a zip file.
write = False
elif e in [errno.EACCES, errno.EROFS, errno.EPERM]:
state.trace("read only directory: %r" % fn_pypath.dirname)
write = False
else:
raise
cache_name = fn_pypath.basename[:-3] + PYC_TAIL
pyc = os.path.join(cache_dir, cache_name)
# Notice that even if we're in a read-only directory, I'm going
# to check for a cached pyc. This may not be optimal...
co = _read_pyc(fn_pypath, pyc, state.trace)
if co is None:
state.trace("rewriting %r" % (fn,))
source_stat, co = _rewrite_test(self.config, fn_pypath)
if co is None:
# Probably a SyntaxError in the test.
return None
if write:
_make_rewritten_pyc(state, source_stat, pyc, co)
else:
state.trace("found cached rewritten pyc for %r" % (fn,))
self.modules[name] = co, pyc
return self
0
Example 39
Project: imagrium Source File: util.py
def byte_compile (py_files,
optimize=0, force=0,
prefix=None, base_dir=None,
verbose=1, dry_run=0,
direct=None):
"""Byte-compile a collection of Python source files to either .pyc
or .pyo files in the same directory. 'py_files' is a list of files
to compile; any files that don't end in ".py" are silently skipped.
'optimize' must be one of the following:
0 - don't optimize (generate .pyc)
1 - normal optimization (like "python -O")
2 - extra optimization (like "python -OO")
If 'force' is true, all files are recompiled regardless of
timestamps.
The source filename encoded in each bytecode file defaults to the
filenames listed in 'py_files'; you can modify these with 'prefix' and
'basedir'. 'prefix' is a string that will be stripped off of each
source filename, and 'base_dir' is a directory name that will be
prepended (after 'prefix' is stripped). You can supply either or both
(or neither) of 'prefix' and 'base_dir', as you wish.
If 'dry_run' is true, doesn't actually do anything that would
affect the filesystem.
Byte-compilation is either done directly in this interpreter process
with the standard py_compile module, or indirectly by writing a
temporary script and executing it. Normally, you should let
'byte_compile()' figure out to use direct compilation or not (see
the source for details). The 'direct' flag is used by the script
generated in indirect mode; unless you know what you're doing, leave
it set to None.
"""
# nothing is done if sys.dont_write_bytecode is True
if sys.dont_write_bytecode:
raise DistutilsByteCompileError('byte-compiling is disabled.')
# First, if the caller didn't force us into direct or indirect mode,
# figure out which mode we should be in. We take a conservative
# approach: choose direct mode *only* if the current interpreter is
# in debug mode and optimize is 0. If we're not in debug mode (-O
# or -OO), we don't know which level of optimization this
# interpreter is running with, so we can't do direct
# byte-compilation and be certain that it's the right thing. Thus,
# always compile indirectly if the current interpreter is in either
# optimize mode, or if either optimization level was requested by
# the caller.
if direct is None:
direct = (__debug__ and optimize == 0)
# "Indirect" byte-compilation: write a temporary script and then
# run it with the appropriate flags.
if not direct:
try:
from tempfile import mkstemp
(script_fd, script_name) = mkstemp(".py")
except ImportError:
from tempfile import mktemp
(script_fd, script_name) = None, mktemp(".py")
log.info("writing byte-compilation script '%s'", script_name)
if not dry_run:
if script_fd is not None:
script = os.fdopen(script_fd, "w")
else:
script = open(script_name, "w")
script.write("""\
from distutils.util import byte_compile
files = [
""")
# XXX would be nice to write absolute filenames, just for
# safety's sake (script should be more robust in the face of
# chdir'ing before running it). But this requires abspath'ing
# 'prefix' as well, and that breaks the hack in build_lib's
# 'byte_compile()' method that carefully tacks on a trailing
# slash (os.sep really) to make sure the prefix here is "just
# right". This whole prefix business is rather delicate -- the
# problem is that it's really a directory, but I'm treating it
# as a dumb string, so trailing slashes and so forth matter.
#py_files = map(os.path.abspath, py_files)
#if prefix:
# prefix = os.path.abspath(prefix)
script.write(string.join(map(repr, py_files), ",\n") + "]\n")
script.write("""
byte_compile(files, optimize=%r, force=%r,
prefix=%r, base_dir=%r,
verbose=%r, dry_run=0,
direct=1)
""" % (optimize, force, prefix, base_dir, verbose))
script.close()
cmd = [sys.executable, script_name]
if optimize == 1:
cmd.insert(1, "-O")
elif optimize == 2:
cmd.insert(1, "-OO")
spawn(cmd, dry_run=dry_run)
execute(os.remove, (script_name,), "removing %s" % script_name,
dry_run=dry_run)
# "Direct" byte-compilation: use the py_compile module to compile
# right here, right now. Note that the script generated in indirect
# mode simply calls 'byte_compile()' in direct mode, a weird sort of
# cross-process recursion. Hey, it works!
else:
from py_compile import compile
for file in py_files:
if file[-3:] != ".py":
# This lets us be lazy and not filter filenames in
# the "install_lib" command.
continue
# Terminology from the py_compile module:
# cfile - byte-compiled file
# dfile - purported source filename (same as 'file' by default)
if sys.platform.startswith('java'):
cfile = file[:-3] + '$py.class'
else:
cfile = file + (__debug__ and "c" or "o")
dfile = file
if prefix:
if file[:len(prefix)] != prefix:
raise ValueError, \
("invalid prefix: filename %r doesn't start with %r"
% (file, prefix))
dfile = dfile[len(prefix):]
if base_dir:
dfile = os.path.join(base_dir, dfile)
cfile_base = os.path.basename(cfile)
if direct:
if force or newer(file, cfile):
log.info("byte-compiling %s to %s", file, cfile_base)
if not dry_run:
compile(file, cfile, dfile)
else:
log.debug("skipping byte-compilation of %s to %s",
file, cfile_base)
0
Example 40
Project: PokemonGo-Bot-Desktop Source File: install_lib.py
def byte_compile(self, files):
if sys.dont_write_bytecode:
self.warn('byte-compiling is disabled, skipping.')
return
from distutils.util import byte_compile
# Get the "--root" directory supplied to the "install" command,
# and use it as a prefix to strip off the purported filename
# encoded in bytecode files. This is far from complete, but it
# should at least generate usable bytecode in RPM distributions.
install_root = self.get_finalized_command('install').root
if self.compile:
byte_compile(files, optimize=0,
force=self.force, prefix=install_root,
dry_run=self.dry_run)
if self.optimize > 0:
byte_compile(files, optimize=self.optimize,
force=self.force, prefix=install_root,
verbose=self.verbose, dry_run=self.dry_run)
0
Example 41
Project: ochothon Source File: main.py
def go():
"""
Entry point for the portal tool-set. This script will look for python modules in the /commands sub-directory. This
is what is invoked from within the portal's flask endpoint (e.g when the user types something in the cli)
"""
#
# - start by simplifying a bit the console logger to look more CLI-ish
#
for handler in logger.handlers:
handler.setFormatter(logging.Formatter('%(message)s'))
try:
def _import(where, funcs):
try:
for script in [f for f in listdir(where) if isfile(join(where, f)) and f.endswith('.py')]:
try:
module = imp.load_source(script[:-3], join(where, script))
if hasattr(module, 'go') and callable(module.go):
tool = module.go()
assert isinstance(tool, Template), 'wrong sub-class (invalid tool code ?)'
assert tool.tag, 'tag left undefined (invalid tool code ?)'
funcs[tool.tag] = tool
except Exception as failure:
logger.warning('failed to import %s (%s)' % (script, diagnostic(failure)))
except OSError:
pass
#
# - disable .pyc generation
# - scan for tools to import
# - each .py module must have a go() callable as well as a COMMAND attribute
# - the COMMAND attribute tells us what the command-line invocation looks like
#
tools = {}
sys.dont_write_bytecode = True
_import('%s/commands' % dirname(__file__), tools)
def _usage():
return 'available commands -> %s' % ', '.join(sorted(tools.keys()))
parser = ArgumentParser(description='', prefix_chars='+', usage=_usage())
parser.add_argument('command', type=str, help='command (e.g ls for instance)')
parser.add_argument('extra', metavar='extra arguments', type=str, nargs='*', help='zero or more arguments')
args = parser.parse_args()
total = [args.command] + args.extra
if args.command == 'help':
logger.info(_usage())
exit(0)
def _sub(sub):
for i in range(len(total)-len(sub)+1):
if sub == total[i:i+len(sub)]:
return 1
return 0
matched = [tool for tool in tools.keys() if _sub(tool.split(' '))]
if not matched:
logger.info('unknown command (%s)' % _usage())
else:
#
# - simply invoke the tool
# - remove the command tokens first and pass the rest as arguments
# - each tool will parse its own commandline
# - if the tool does not define an exit code default to 0 (success)
#
picked = matched[0]
tokens = len(picked.split(' ')) - 1
code = tools[picked].run(args.extra[tokens:])
exit(0 if code is None else code)
except AssertionError as failure:
logger.error('shutting down <- %s' % failure)
except Exception as failure:
logger.error('shutting down <- %s' % diagnostic(failure))
exit(1)
0
Example 42
Project: invoke Source File: program.py
def bytecode_skipped_by_default(self):
expect('-c foo mytask')
eq_(sys.dont_write_bytecode, True)
0
Example 43
Project: bep Source File: install.py
def install_cmd(args, packages_file, packages_file_path, noise, install_dirs, installed_pkgs_dir):
''' Installs package(s) for either cmdline install interface or from .bep_packages file install
Parameters
----------
args: a class inst of the argparse namespace with the arguments parsed to use during the install.
packages_file: the user's .bep_packages file.
packages_file_path: the absolute path to the packages_file.
noise: noise class inst with the verbosity level for the amount of output to deliver to stdout.
install_dirs: dict of install locations for installed pkgs and install logs.
installed_pkgs_dir: the absolute path to the where the downloaded and built pkgs are stored.
'''
##### install from packages file # FIXME -- this is hacky, fix this
#if ('pkg_type' in args) and (args.pkg_type == "packages"):
if args.pkg_type == "packages":
try: # bring in the packages file
sys.dont_write_bytecode = True # to avoid writing a .pyc files (for the packages file)
pkgs_module = imp.load_source(packages_file, packages_file_path) # used to import a hidden file (really hackey)
except (ImportError, IOError):
print("No {0} file installed for use.".format(packages_file))
if not os.path.isfile(packages_file_path): # create packages file if one doesn't already exist.
#shutil.copy(join('data', packages_file), packages_file_path) # create a template packages file
#print("So created template {0} file for installation of packages.".format(packages_file))
open(packages_file_path, 'a').close() # creates an empty packages file
print("So created empty {0} file for installation of packages.".format(packages_file))
raise SystemExit("Now add the desired packages to the {} file and re-run install.".format(packages_file))
def raise_problem(pkg_to_install):
print("\nError: cannot process entry in {}:".format(packages_file))
print("\t{}\n".format(pkg_to_install))
print("Item needs to be specified like such:")
print("\t{} [language-->]repoType+userName/packageName[^branch]".format(name))
print("\nNote: language and branch are both optional, and repoType only needs")
print("to be specified if it's not ambigious given where the package comes from:")
print("\teg. for a github install: ipython/ipython")
print("\teg. for a github install: python3.3-->ipython/ipython")
print("\teg. for a bitbucket install: hg+mchaput/whoosh")
print("\teg. for a local install: git+/home/username/path/to/repo")
raise SystemExit
for pkg_type, pkgs_from_pkgs_file in pkgs_module.packages.items():
utils.when_not_quiet_mode(utils.status('\t\tInstalling {0} packages'.format(pkg_type)), noise.quiet)
if pkgs_from_pkgs_file:
#####################################################################################################
# FIXME need to refractor what the packages file is (or refractor this here)
for pkg_to_install_entry in pkgs_from_pkgs_file:
lang_N_repo_type_N_pkg_to_install_N_branch = pkg_to_install_entry.split('-->') # to see if a language is given
if len(lang_N_repo_type_N_pkg_to_install_N_branch) == 2:
lang_arg, repo_type_N_pkg_to_install_N_branch = lang_N_repo_type_N_pkg_to_install_N_branch
repo_type_N_pkg_to_install_N_branch = repo_type_N_pkg_to_install_N_branch.split('+') # to see if repo_type given # NOTE this won't work for pypi pkgs b/c there won't be a repo
if len(repo_type_N_pkg_to_install_N_branch) == 2:
repo_type, pkg_to_install_N_branch = repo_type_N_pkg_to_install_N_branch
pkg_to_install_N_branch = pkg_to_install_N_branch.split('^') # to see if branch is given
if len(pkg_to_install_N_branch) == 2:
pkg_to_install, branch = pkg_to_install_N_branch
legit_pkg_name = utils.check_if_valid_pkg_to_install(pkg_to_install, pkg_type)
if legit_pkg_name:
args = Args(repo_type, pkg_type, pkg_to_install, language=lang_arg, branch=branch)
else: # not a legit pkg_to_install in the pkg_to_install_entry
raise_problem(pkg_to_install_entry)
elif len(pkg_to_install_N_branch) == 1: # if branch not given, then get default #NOTE won't work for pypi installs
pkg_to_install = pkg_to_install_N_branch[0]
legit_pkg_name = utils.check_if_valid_pkg_to_install(pkg_to_install, pkg_type)
if legit_pkg_name:
branch = utils.get_default_branch(repo_type)
args = Args(repo_type, pkg_type, pkg_to_install, language=lang_arg, branch=branch) # use default branch
else: # not a legit pkg_to_install in the pkg_to_install_entry
raise_problem(pkg_to_install_entry)
else: # if too many ^ given
raise_problem(pkg_to_install_entry)
elif len(repo_type_N_pkg_to_install_N_branch) == 1: # if repo_type not given
pkg_to_install_N_branch = repo_type_N_pkg_to_install_N_branch[0]
if pkg_type in ['github']:
repo_type = 'git'
pkg_to_install_N_branch = pkg_to_install_N_branch.split('^') # to see if branch is given
if len(pkg_to_install_N_branch) == 2:
pkg_to_install, branch = pkg_to_install_N_branch
legit_pkg_name = utils.check_if_valid_pkg_to_install(pkg_to_install, pkg_type)
if legit_pkg_name:
args = Args(repo_type, pkg_type, pkg_to_install, branch=branch, language=lang_arg)
else: # not a legit pkg_to_install in the pkg_to_install_entry
raise_problem(pkg_to_install_entry)
elif len(pkg_to_install_N_branch) == 1: # if branch not given, then get default #NOTE won't work for pypi installs
pkg_to_install = pkg_to_install_N_branch[0]
legit_pkg_name = utils.check_if_valid_pkg_to_install(pkg_to_install, pkg_type)
if legit_pkg_name:
branch = utils.get_default_branch(repo_type)
args = Args(repo_type, pkg_type, pkg_to_install, language=lang_arg, branch=branch) # use default branch
else: # not a legit pkg_to_install in the pkg_to_install_entry
raise_problem(pkg_to_install_entry)
else: # if too many ^ given
raise_problem(pkg_to_install_entry)
else: # if ambigious repo_type (w/ more than one repo_type possible)
raise_problem(pkg_to_install_entry)
else: # if too many '+' given
raise_problem(pkg_to_install_entry)
elif len(lang_N_repo_type_N_pkg_to_install_N_branch) == 1: # language not given, use system default lang
repo_type_N_pkg_to_install_N_branch = lang_N_repo_type_N_pkg_to_install_N_branch[0]
repo_type_N_pkg_to_install_N_branch = repo_type_N_pkg_to_install_N_branch.split('+') # to see if repo_type given # FIXME this won't work for pypi pkgs b/c there won't be a repo
if len(repo_type_N_pkg_to_install_N_branch) == 2:
repo_type, pkg_to_install_N_branch = repo_type_N_pkg_to_install_N_branch
pkg_to_install_N_branch = pkg_to_install_N_branch.split('^') # to see if branch is given
if len(pkg_to_install_N_branch) == 2:
pkg_to_install, branch = pkg_to_install_N_branch
legit_pkg_name = utils.check_if_valid_pkg_to_install(pkg_to_install, pkg_type)
if legit_pkg_name:
args = Args(repo_type, pkg_type, pkg_to_install, language=args.language, branch=branch) # use default language
else: # not a legit pkg_to_install in the pkg_to_install_entry
raise_problem(pkg_to_install_entry)
elif len(pkg_to_install_N_branch) == 1: # if branch not given, then get default #FIXME won't work for pypi installs
pkg_to_install = pkg_to_install_N_branch[0]
legit_pkg_name = utils.check_if_valid_pkg_to_install(pkg_to_install, pkg_type)
if legit_pkg_name:
branch = utils.get_default_branch(repo_type)
args = Args(repo_type, pkg_type, pkg_to_install, language=args.language, branch=branch) # use default language & branch,
else: # not a legit pkg_to_install in the pkg_to_install_entry
raise_problem(pkg_to_install_entry)
else: # if too many ^ given
raise_problem(pkg_to_install_entry)
elif len(repo_type_N_pkg_to_install_N_branch) == 1: # if repo_type not given
pkg_to_install_N_branch = repo_type_N_pkg_to_install_N_branch[0]
if pkg_type in ['github']:
repo_type = 'git'
pkg_to_install_N_branch = pkg_to_install_N_branch.split('^') # to see if branch is given
if len(pkg_to_install_N_branch) == 2:
pkg_to_install, branch = pkg_to_install_N_branch
legit_pkg_name = utils.check_if_valid_pkg_to_install(pkg_to_install, pkg_type)
if legit_pkg_name:
args = Args(repo_type, pkg_type, pkg_to_install, language=args.language, branch=branch) # use default language
else: # not a legit pkg_to_install in the pkg_to_install_entry
raise_problem(pkg_to_install_entry)
elif len(pkg_to_install_N_branch) == 1: # if branch not given, then get default #FIXME won't work for pypi installs
pkg_to_install = pkg_to_install_N_branch[0]
legit_pkg_name = utils.check_if_valid_pkg_to_install(pkg_to_install, pkg_type)
if legit_pkg_name:
branch = utils.get_default_branch(repo_type)
args = Args(repo_type, pkg_type, pkg_to_install, language=args.language, branch=branch) # use default language & branch,
else: # not a legit pkg_to_install in the pkg_to_install_entry
raise_problem(pkg_to_install_entry)
else: # if too many ^ given
raise_problem(pkg_to_install_entry)
else: # if ambigious repo_type (w/ more than one repo_type possible)
raise_problem(pkg_to_install_entry)
else: # if too many '+' given
raise_problem(pkg_to_install_entry)
else: # if not one or two items after "-->" split
raise_problem(pkg_to_install_entry)
#####################################################################################################
# important to see what has previously been installed, so as to not turn on a 2nd version of a package.
everything_already_installed = utils.all_pkgs_and_branches_for_all_pkg_types_already_installed(installed_pkgs_dir)
pkg_inst = package.create_pkg_inst(args.language, args.pkg_type, install_dirs, args=args) # args are created from the Args class here, unlike where args are the cmdline options for every other action
pkg_inst.install(args.pkg_to_install, args, noise, everything_already_installed=everything_already_installed)
else:
utils.when_not_quiet_mode('\nNo {0} packages specified in {1} to install.'.format(pkg_type, packages_file), noise.quiet)
#### install w/ command line arg(s)
#if 'pkg_to_install' in args:
else:
utils.when_not_quiet_mode(utils.status('\t\tInstalling {0} package'.format(args.pkg_type)), noise.quiet)
pkg_inst = package.create_pkg_inst(args.language, args.pkg_type, install_dirs, args=args)
# important to keep this here so it can be known what has previously been installed, so as to not turn on a 2nd version of a package.
everything_already_installed = utils.all_pkgs_and_branches_for_all_pkg_types_already_installed(installed_pkgs_dir)
pkg_inst.install(args.pkg_to_install, args, noise, everything_already_installed=everything_already_installed)
0
Example 44
Project: invoke Source File: program.py
def write_pyc_explicitly_enables_bytecode_writing(self):
expect('--write-pyc -c foo mytask')
eq_(sys.dont_write_bytecode, False)
0
Example 45
@preserve_value(sys, 'dont_write_bytecode')
def _load_module_no_bytecode(filename, module_file, module_file_path, py_source_description):
"""
Helper function to load a module while setting sys.dont_write_bytecode to prevent bytecode files from being
generator.
For example, if the module name is 'foo', then python will write 'fooc' as the bytecode. This is not desirable.
WARNING: dont_write_bytecode doesn't exist in Python 2.4 so you will get bytecode files.
:type filename: str
:type module_file: open
:type module_file_path: str
:type py_source_description: tuple
:return: imported module
:rtype: module
"""
sys.dont_write_bytecode = 1
new_module = imp.load_module(
os.path.splitext(filename)[0].replace("-", "_"),
module_file, module_file_path, py_source_description)
return new_module
0
Example 46
Project: pymo Source File: test_build_py.py
def test_package_data(self):
sources = self.mkdtemp()
f = open(os.path.join(sources, "__init__.py"), "w")
try:
f.write("# Pretend this is a package.")
finally:
f.close()
f = open(os.path.join(sources, "README.txt"), "w")
try:
f.write("Info about this package")
finally:
f.close()
destination = self.mkdtemp()
dist = Distribution({"packages": ["pkg"],
"package_dir": {"pkg": sources}})
# script_name need not exist, it just need to be initialized
dist.script_name = os.path.join(sources, "setup.py")
dist.command_obj["build"] = support.DummyCommand(
force=0,
build_lib=destination)
dist.packages = ["pkg"]
dist.package_data = {"pkg": ["README.txt"]}
dist.package_dir = {"pkg": sources}
cmd = build_py(dist)
cmd.compile = 1
cmd.ensure_finalized()
self.assertEqual(cmd.package_data, dist.package_data)
cmd.run()
# This makes sure the list of outputs includes byte-compiled
# files for Python modules but not for package data files
# (there shouldn't *be* byte-code files for those!).
#
self.assertEqual(len(cmd.get_outputs()), 3)
pkgdest = os.path.join(destination, "pkg")
files = os.listdir(pkgdest)
self.assertIn("__init__.py", files)
self.assertIn("README.txt", files)
# XXX even with -O, distutils writes pyc, not pyo; bug?
if sys.dont_write_bytecode:
self.assertNotIn("__init__.pyc", files)
else:
self.assertIn("__init__.pyc", files)
0
Example 47
Project: TrustRouter Source File: util.py
def byte_compile (py_files,
optimize=0, force=0,
prefix=None, base_dir=None,
verbose=1, dry_run=0,
direct=None):
"""Byte-compile a collection of Python source files to either .pyc
or .pyo files in the same directory. 'py_files' is a list of files
to compile; any files that don't end in ".py" are silently skipped.
'optimize' must be one of the following:
0 - don't optimize (generate .pyc)
1 - normal optimization (like "python -O")
2 - extra optimization (like "python -OO")
If 'force' is true, all files are recompiled regardless of
timestamps.
The source filename encoded in each bytecode file defaults to the
filenames listed in 'py_files'; you can modify these with 'prefix' and
'basedir'. 'prefix' is a string that will be stripped off of each
source filename, and 'base_dir' is a directory name that will be
prepended (after 'prefix' is stripped). You can supply either or both
(or neither) of 'prefix' and 'base_dir', as you wish.
If 'dry_run' is true, doesn't actually do anything that would
affect the filesystem.
Byte-compilation is either done directly in this interpreter process
with the standard py_compile module, or indirectly by writing a
temporary script and executing it. Normally, you should let
'byte_compile()' figure out to use direct compilation or not (see
the source for details). The 'direct' flag is used by the script
generated in indirect mode; unless you know what you're doing, leave
it set to None.
"""
# nothing is done if sys.dont_write_bytecode is True
if sys.dont_write_bytecode:
raise DistutilsByteCompileError('byte-compiling is disabled.')
# First, if the caller didn't force us into direct or indirect mode,
# figure out which mode we should be in. We take a conservative
# approach: choose direct mode *only* if the current interpreter is
# in debug mode and optimize is 0. If we're not in debug mode (-O
# or -OO), we don't know which level of optimization this
# interpreter is running with, so we can't do direct
# byte-compilation and be certain that it's the right thing. Thus,
# always compile indirectly if the current interpreter is in either
# optimize mode, or if either optimization level was requested by
# the caller.
if direct is None:
direct = (__debug__ and optimize == 0)
# "Indirect" byte-compilation: write a temporary script and then
# run it with the appropriate flags.
if not direct:
try:
from tempfile import mkstemp
(script_fd, script_name) = mkstemp(".py")
except ImportError:
from tempfile import mktemp
(script_fd, script_name) = None, mktemp(".py")
log.info("writing byte-compilation script '%s'", script_name)
if not dry_run:
if script_fd is not None:
script = os.fdopen(script_fd, "w")
else:
script = open(script_name, "w")
script.write("""\
from distutils.util import byte_compile
files = [
""")
# XXX would be nice to write absolute filenames, just for
# safety's sake (script should be more robust in the face of
# chdir'ing before running it). But this requires abspath'ing
# 'prefix' as well, and that breaks the hack in build_lib's
# 'byte_compile()' method that carefully tacks on a trailing
# slash (os.sep really) to make sure the prefix here is "just
# right". This whole prefix business is rather delicate -- the
# problem is that it's really a directory, but I'm treating it
# as a dumb string, so trailing slashes and so forth matter.
#py_files = map(os.path.abspath, py_files)
#if prefix:
# prefix = os.path.abspath(prefix)
script.write(",\n".join(map(repr, py_files)) + "]\n")
script.write("""
byte_compile(files, optimize=%r, force=%r,
prefix=%r, base_dir=%r,
verbose=%r, dry_run=0,
direct=1)
""" % (optimize, force, prefix, base_dir, verbose))
script.close()
cmd = [sys.executable, script_name]
if optimize == 1:
cmd.insert(1, "-O")
elif optimize == 2:
cmd.insert(1, "-OO")
spawn(cmd, dry_run=dry_run)
execute(os.remove, (script_name,), "removing %s" % script_name,
dry_run=dry_run)
# "Direct" byte-compilation: use the py_compile module to compile
# right here, right now. Note that the script generated in indirect
# mode simply calls 'byte_compile()' in direct mode, a weird sort of
# cross-process recursion. Hey, it works!
else:
from py_compile import compile
for file in py_files:
if file[-3:] != ".py":
# This lets us be lazy and not filter filenames in
# the "install_lib" command.
continue
# Terminology from the py_compile module:
# cfile - byte-compiled file
# dfile - purported source filename (same as 'file' by default)
cfile = file + (__debug__ and "c" or "o")
dfile = file
if prefix:
if file[:len(prefix)] != prefix:
raise ValueError("invalid prefix: filename %r doesn't start with %r"
% (file, prefix))
dfile = dfile[len(prefix):]
if base_dir:
dfile = os.path.join(base_dir, dfile)
cfile_base = os.path.basename(cfile)
if direct:
if force or newer(file, cfile):
log.info("byte-compiling %s to %s", file, cfile_base)
if not dry_run:
compile(file, cfile, dfile)
else:
log.debug("skipping byte-compilation of %s to %s",
file, cfile_base)
0
Example 48
Project: PokemonGo-Bot-Desktop Source File: build_py.py
def byte_compile(self, files):
if sys.dont_write_bytecode:
self.warn('byte-compiling is disabled, skipping.')
return
from distutils.util import byte_compile
prefix = self.build_lib
if prefix[-1] != os.sep:
prefix = prefix + os.sep
# XXX this code is essentially the same as the 'byte_compile()
# method of the "install_lib" command, except for the determination
# of the 'prefix' string. Hmmm.
if self.compile:
byte_compile(files, optimize=0,
force=self.force, prefix=prefix, dry_run=self.dry_run)
if self.optimize > 0:
byte_compile(files, optimize=self.optimize,
force=self.force, prefix=prefix, dry_run=self.dry_run)
0
Example 49
Project: PokemonGo-Bot-Desktop Source File: util.py
def byte_compile (py_files,
optimize=0, force=0,
prefix=None, base_dir=None,
verbose=1, dry_run=0,
direct=None):
"""Byte-compile a collection of Python source files to either .pyc
or .pyo files in the same directory. 'py_files' is a list of files
to compile; any files that don't end in ".py" are silently skipped.
'optimize' must be one of the following:
0 - don't optimize (generate .pyc)
1 - normal optimization (like "python -O")
2 - extra optimization (like "python -OO")
If 'force' is true, all files are recompiled regardless of
timestamps.
The source filename encoded in each bytecode file defaults to the
filenames listed in 'py_files'; you can modify these with 'prefix' and
'basedir'. 'prefix' is a string that will be stripped off of each
source filename, and 'base_dir' is a directory name that will be
prepended (after 'prefix' is stripped). You can supply either or both
(or neither) of 'prefix' and 'base_dir', as you wish.
If 'dry_run' is true, doesn't actually do anything that would
affect the filesystem.
Byte-compilation is either done directly in this interpreter process
with the standard py_compile module, or indirectly by writing a
temporary script and executing it. Normally, you should let
'byte_compile()' figure out to use direct compilation or not (see
the source for details). The 'direct' flag is used by the script
generated in indirect mode; unless you know what you're doing, leave
it set to None.
"""
# nothing is done if sys.dont_write_bytecode is True
if sys.dont_write_bytecode:
raise DistutilsByteCompileError('byte-compiling is disabled.')
# First, if the caller didn't force us into direct or indirect mode,
# figure out which mode we should be in. We take a conservative
# approach: choose direct mode *only* if the current interpreter is
# in debug mode and optimize is 0. If we're not in debug mode (-O
# or -OO), we don't know which level of optimization this
# interpreter is running with, so we can't do direct
# byte-compilation and be certain that it's the right thing. Thus,
# always compile indirectly if the current interpreter is in either
# optimize mode, or if either optimization level was requested by
# the caller.
if direct is None:
direct = (__debug__ and optimize == 0)
# "Indirect" byte-compilation: write a temporary script and then
# run it with the appropriate flags.
if not direct:
try:
from tempfile import mkstemp
(script_fd, script_name) = mkstemp(".py")
except ImportError:
from tempfile import mktemp
(script_fd, script_name) = None, mktemp(".py")
log.info("writing byte-compilation script '%s'", script_name)
if not dry_run:
if script_fd is not None:
script = os.fdopen(script_fd, "w")
else:
script = open(script_name, "w")
script.write("""\
from distutils.util import byte_compile
files = [
""")
# XXX would be nice to write absolute filenames, just for
# safety's sake (script should be more robust in the face of
# chdir'ing before running it). But this requires abspath'ing
# 'prefix' as well, and that breaks the hack in build_lib's
# 'byte_compile()' method that carefully tacks on a trailing
# slash (os.sep really) to make sure the prefix here is "just
# right". This whole prefix business is rather delicate -- the
# problem is that it's really a directory, but I'm treating it
# as a dumb string, so trailing slashes and so forth matter.
#py_files = map(os.path.abspath, py_files)
#if prefix:
# prefix = os.path.abspath(prefix)
script.write(string.join(map(repr, py_files), ",\n") + "]\n")
script.write("""
byte_compile(files, optimize=%r, force=%r,
prefix=%r, base_dir=%r,
verbose=%r, dry_run=0,
direct=1)
""" % (optimize, force, prefix, base_dir, verbose))
script.close()
cmd = [sys.executable, script_name]
if optimize == 1:
cmd.insert(1, "-O")
elif optimize == 2:
cmd.insert(1, "-OO")
spawn(cmd, dry_run=dry_run)
execute(os.remove, (script_name,), "removing %s" % script_name,
dry_run=dry_run)
# "Direct" byte-compilation: use the py_compile module to compile
# right here, right now. Note that the script generated in indirect
# mode simply calls 'byte_compile()' in direct mode, a weird sort of
# cross-process recursion. Hey, it works!
else:
from py_compile import compile
for file in py_files:
if file[-3:] != ".py":
# This lets us be lazy and not filter filenames in
# the "install_lib" command.
continue
# Terminology from the py_compile module:
# cfile - byte-compiled file
# dfile - purported source filename (same as 'file' by default)
cfile = file + (__debug__ and "c" or "o")
dfile = file
if prefix:
if file[:len(prefix)] != prefix:
raise ValueError, \
("invalid prefix: filename %r doesn't start with %r"
% (file, prefix))
dfile = dfile[len(prefix):]
if base_dir:
dfile = os.path.join(base_dir, dfile)
cfile_base = os.path.basename(cfile)
if direct:
if force or newer(file, cfile):
log.info("byte-compiling %s to %s", file, cfile_base)
if not dry_run:
compile(file, cfile, dfile)
else:
log.debug("skipping byte-compilation of %s to %s",
file, cfile_base)
0
Example 50
def get_code(self, fullname):
"""Get a code object from source or bytecode."""
warnings.warn("importlib.abc.PyPycLoader is deprecated and slated for "
"removal in Python 3.4; use SourceLoader instead. "
"If Python 3.1 compatibility is required, see the "
"latest docuementation for PyLoader.",
PendingDeprecationWarning)
source_timestamp = self.source_mtime(fullname)
# Try to use bytecode if it is available.
bytecode_path = self.bytecode_path(fullname)
if bytecode_path:
data = self.get_data(bytecode_path)
try:
magic = data[:4]
if len(magic) < 4:
raise ImportError("bad magic number in {}".format(fullname))
raw_timestamp = data[4:8]
if len(raw_timestamp) < 4:
raise EOFError("bad timestamp in {}".format(fullname))
pyc_timestamp = marshal._r_long(raw_timestamp)
bytecode = data[8:]
# Verify that the magic number is valid.
if imp.get_magic() != magic:
raise ImportError("bad magic number in {}".format(fullname))
# Verify that the bytecode is not stale (only matters when
# there is source to fall back on.
if source_timestamp:
if pyc_timestamp < source_timestamp:
raise ImportError("bytecode is stale")
except (ImportError, EOFError):
# If source is available give it a shot.
if source_timestamp is not None:
pass
else:
raise
else:
# Bytecode seems fine, so try to use it.
return marshal.loads(bytecode)
elif source_timestamp is None:
raise ImportError("no source or bytecode available to create code "
"object for {0!r}".format(fullname))
# Use the source.
source_path = self.source_path(fullname)
if source_path is None:
message = "a source path must exist to load {0}".format(fullname)
raise ImportError(message)
source = self.get_data(source_path)
code_object = compile(source, source_path, 'exec', dont_inherit=True)
# Generate bytecode and write it out.
if not sys.dont_write_bytecode:
data = bytearray(imp.get_magic())
data.extend(marshal._w_long(source_timestamp))
data.extend(marshal.dumps(code_object))
self.write_bytecode(fullname, data)
return code_object