2025-07-01
This commit is contained in:
@@ -0,0 +1,126 @@
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
import contextlib
|
||||
from importlib import import_module
|
||||
import os
|
||||
import sys
|
||||
|
||||
from . import _util
|
||||
|
||||
|
||||
VENDORED_ROOT = os.path.dirname(os.path.abspath(__file__))
|
||||
# TODO: Move the "pydevd" git submodule to the debugpy/_vendored directory
|
||||
# and then drop the following fallback.
|
||||
if "pydevd" not in os.listdir(VENDORED_ROOT):
|
||||
VENDORED_ROOT = os.path.dirname(VENDORED_ROOT)
|
||||
|
||||
|
||||
def list_all(resolve=False):
|
||||
"""Return the list of vendored projects."""
|
||||
# TODO: Derive from os.listdir(VENDORED_ROOT)?
|
||||
projects = ["pydevd"]
|
||||
if not resolve:
|
||||
return projects
|
||||
return [project_root(name) for name in projects]
|
||||
|
||||
|
||||
def project_root(project):
|
||||
"""Return the path the root dir of the vendored project.
|
||||
|
||||
If "project" is an empty string then the path prefix for vendored
|
||||
projects (e.g. "debugpy/_vendored/") will be returned.
|
||||
"""
|
||||
if not project:
|
||||
project = ""
|
||||
return os.path.join(VENDORED_ROOT, project)
|
||||
|
||||
|
||||
def iter_project_files(project, relative=False, **kwargs):
|
||||
"""Yield (dirname, basename, filename) for all files in the project."""
|
||||
if relative:
|
||||
with _util.cwd(VENDORED_ROOT):
|
||||
for result in _util.iter_all_files(project, **kwargs):
|
||||
yield result
|
||||
else:
|
||||
root = project_root(project)
|
||||
for result in _util.iter_all_files(root, **kwargs):
|
||||
yield result
|
||||
|
||||
|
||||
def iter_packaging_files(project):
|
||||
"""Yield the filenames for all files in the project.
|
||||
|
||||
The filenames are relative to "debugpy/_vendored". This is most
|
||||
useful for the "package data" in a setup.py.
|
||||
"""
|
||||
# TODO: Use default filters? __pycache__ and .pyc?
|
||||
prune_dir = None
|
||||
exclude_file = None
|
||||
try:
|
||||
mod = import_module("._{}_packaging".format(project), __name__)
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
prune_dir = getattr(mod, "prune_dir", prune_dir)
|
||||
exclude_file = getattr(mod, "exclude_file", exclude_file)
|
||||
results = iter_project_files(
|
||||
project, relative=True, prune_dir=prune_dir, exclude_file=exclude_file
|
||||
)
|
||||
for _, _, filename in results:
|
||||
yield filename
|
||||
|
||||
|
||||
def prefix_matcher(*prefixes):
|
||||
"""Return a module match func that matches any of the given prefixes."""
|
||||
assert prefixes
|
||||
|
||||
def match(name, module):
|
||||
for prefix in prefixes:
|
||||
if name.startswith(prefix):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
return match
|
||||
|
||||
|
||||
def check_modules(project, match, root=None):
|
||||
"""Verify that only vendored modules have been imported."""
|
||||
if root is None:
|
||||
root = project_root(project)
|
||||
extensions = []
|
||||
unvendored = {}
|
||||
for modname, mod in list(sys.modules.items()):
|
||||
if not match(modname, mod):
|
||||
continue
|
||||
try:
|
||||
filename = getattr(mod, "__file__", None)
|
||||
except: # In theory it's possible that any error is raised when accessing __file__
|
||||
filename = None
|
||||
if not filename: # extension module
|
||||
extensions.append(modname)
|
||||
elif not filename.startswith(root):
|
||||
unvendored[modname] = filename
|
||||
return unvendored, extensions
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def vendored(project, root=None):
|
||||
"""A context manager under which the vendored project will be imported."""
|
||||
if root is None:
|
||||
root = project_root(project)
|
||||
# Add the vendored project directory, so that it gets tried first.
|
||||
sys.path.insert(0, root)
|
||||
try:
|
||||
yield root
|
||||
finally:
|
||||
sys.path.remove(root)
|
||||
|
||||
|
||||
def preimport(project, modules, **kwargs):
|
||||
"""Import each of the named modules out of the vendored project."""
|
||||
with vendored(project, **kwargs):
|
||||
for name in modules:
|
||||
import_module(name)
|
||||
@@ -0,0 +1,48 @@
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
from . import VENDORED_ROOT
|
||||
from ._util import cwd, iter_all_files
|
||||
|
||||
|
||||
INCLUDES = [
|
||||
'setup_pydevd_cython.py',
|
||||
]
|
||||
|
||||
|
||||
def iter_files():
|
||||
# From the root of pydevd repo, we want only scripts and
|
||||
# subdirectories that constitute the package itself (not helper
|
||||
# scripts, tests etc). But when walking down into those
|
||||
# subdirectories, we want everything below.
|
||||
|
||||
with cwd(VENDORED_ROOT):
|
||||
return iter_all_files('pydevd', prune_dir, exclude_file)
|
||||
|
||||
|
||||
def prune_dir(dirname, basename):
|
||||
if basename == '__pycache__':
|
||||
return True
|
||||
elif dirname != 'pydevd':
|
||||
return False
|
||||
elif basename.startswith('pydev'):
|
||||
return False
|
||||
elif basename.startswith('_pydev'):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def exclude_file(dirname, basename):
|
||||
if dirname == 'pydevd':
|
||||
if basename in INCLUDES:
|
||||
return False
|
||||
elif not basename.endswith('.py'):
|
||||
return True
|
||||
elif 'pydev' not in basename:
|
||||
return True
|
||||
return False
|
||||
|
||||
if basename.endswith('.pyc'):
|
||||
return True
|
||||
return False
|
||||
@@ -0,0 +1,59 @@
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def cwd(dirname):
|
||||
"""A context manager for operating in a different directory."""
|
||||
orig = os.getcwd()
|
||||
os.chdir(dirname)
|
||||
try:
|
||||
yield orig
|
||||
finally:
|
||||
os.chdir(orig)
|
||||
|
||||
|
||||
def iter_all_files(root, prune_dir=None, exclude_file=None):
|
||||
"""Yield (dirname, basename, filename) for each file in the tree.
|
||||
|
||||
This is an alternative to os.walk() that flattens out the tree and
|
||||
with filtering.
|
||||
"""
|
||||
pending = [root]
|
||||
while pending:
|
||||
dirname = pending.pop(0)
|
||||
for result in _iter_files(dirname, pending, prune_dir, exclude_file):
|
||||
yield result
|
||||
|
||||
|
||||
def iter_tree(root, prune_dir=None, exclude_file=None):
|
||||
"""Yield (dirname, files) for each directory in the tree.
|
||||
|
||||
The list of files is actually a list of (basename, filename).
|
||||
|
||||
This is an alternative to os.walk() with filtering."""
|
||||
pending = [root]
|
||||
while pending:
|
||||
dirname = pending.pop(0)
|
||||
files = []
|
||||
for _, b, f in _iter_files(dirname, pending, prune_dir, exclude_file):
|
||||
files.append((b, f))
|
||||
yield dirname, files
|
||||
|
||||
|
||||
def _iter_files(dirname, subdirs, prune_dir, exclude_file):
|
||||
for basename in os.listdir(dirname):
|
||||
filename = os.path.join(dirname, basename)
|
||||
if os.path.isdir(filename):
|
||||
if prune_dir is not None and prune_dir(dirname, basename):
|
||||
continue
|
||||
subdirs.append(filename)
|
||||
else:
|
||||
# TODO: Use os.path.isfile() to narrow it down?
|
||||
if exclude_file is not None and exclude_file(dirname, basename):
|
||||
continue
|
||||
yield dirname, basename, filename
|
||||
@@ -0,0 +1,81 @@
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
from importlib import import_module
|
||||
import os
|
||||
import warnings
|
||||
|
||||
from . import check_modules, prefix_matcher, preimport, vendored
|
||||
|
||||
# Ensure that pydevd is our vendored copy.
|
||||
_unvendored, _ = check_modules('pydevd',
|
||||
prefix_matcher('pydev', '_pydev'))
|
||||
if _unvendored:
|
||||
_unvendored = sorted(_unvendored.values())
|
||||
msg = 'incompatible copy of pydevd already imported'
|
||||
# raise ImportError(msg)
|
||||
warnings.warn(msg + ':\n {}'.format('\n '.join(_unvendored)))
|
||||
|
||||
# If debugpy logging is enabled, enable it for pydevd as well
|
||||
if "DEBUGPY_LOG_DIR" in os.environ:
|
||||
os.environ[str("PYDEVD_DEBUG")] = str("True")
|
||||
os.environ[str("PYDEVD_DEBUG_FILE")] = os.environ["DEBUGPY_LOG_DIR"] + str("/debugpy.pydevd.log")
|
||||
|
||||
# Disable pydevd frame-eval optimizations only if unset, to allow opt-in.
|
||||
if "PYDEVD_USE_FRAME_EVAL" not in os.environ:
|
||||
os.environ[str("PYDEVD_USE_FRAME_EVAL")] = str("NO")
|
||||
|
||||
# Constants must be set before importing any other pydevd module
|
||||
# # due to heavy use of "from" in them.
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", category=DeprecationWarning)
|
||||
with vendored('pydevd'):
|
||||
pydevd_constants = import_module('_pydevd_bundle.pydevd_constants')
|
||||
# We limit representation size in our representation provider when needed.
|
||||
pydevd_constants.MAXIMUM_VARIABLE_REPRESENTATION_SIZE = 2 ** 32
|
||||
|
||||
# Now make sure all the top-level modules and packages in pydevd are
|
||||
# loaded. Any pydevd modules that aren't loaded at this point, will
|
||||
# be loaded using their parent package's __path__ (i.e. one of the
|
||||
# following).
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", category=DeprecationWarning)
|
||||
preimport('pydevd', [
|
||||
'_pydev_bundle',
|
||||
'_pydev_runfiles',
|
||||
'_pydevd_bundle',
|
||||
'_pydevd_frame_eval',
|
||||
'pydev_ipython',
|
||||
'pydevd_plugins',
|
||||
'pydevd',
|
||||
])
|
||||
|
||||
# When pydevd is imported it sets the breakpoint behavior, but it needs to be
|
||||
# overridden because by default pydevd will connect to the remote debugger using
|
||||
# its own custom protocol rather than DAP.
|
||||
import pydevd # noqa
|
||||
import debugpy # noqa
|
||||
|
||||
|
||||
def debugpy_breakpointhook():
|
||||
debugpy.breakpoint()
|
||||
|
||||
|
||||
pydevd.install_breakpointhook(debugpy_breakpointhook)
|
||||
|
||||
# Ensure that pydevd uses JSON protocol
|
||||
from _pydevd_bundle import pydevd_constants
|
||||
from _pydevd_bundle import pydevd_defaults
|
||||
pydevd_defaults.PydevdCustomization.DEFAULT_PROTOCOL = pydevd_constants.HTTP_JSON_PROTOCOL
|
||||
|
||||
# Enable some defaults related to debugpy such as sending a single notification when
|
||||
# threads pause and stopping on any exception.
|
||||
pydevd_defaults.PydevdCustomization.DEBUG_MODE = 'debugpy-dap'
|
||||
|
||||
# This is important when pydevd attaches automatically to a subprocess. In this case, we have to
|
||||
# make sure that debugpy is properly put back in the game for users to be able to use it.
|
||||
pydevd_defaults.PydevdCustomization.PREIMPORT = '%s;%s' % (
|
||||
os.path.dirname(os.path.dirname(debugpy.__file__)),
|
||||
'debugpy._vendored.force_pydevd'
|
||||
)
|
||||
@@ -0,0 +1,153 @@
|
||||
"""
|
||||
License: Apache 2.0
|
||||
Author: Yuli Fitterman
|
||||
"""
|
||||
import types
|
||||
|
||||
from _pydevd_bundle.pydevd_constants import IS_JYTHON
|
||||
|
||||
try:
|
||||
import inspect
|
||||
except:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc() # Ok, no inspect available (search will not work)
|
||||
|
||||
from _pydev_bundle._pydev_imports_tipper import signature_from_docstring
|
||||
|
||||
|
||||
def is_bound_method(obj):
|
||||
if isinstance(obj, types.MethodType):
|
||||
return getattr(obj, "__self__", getattr(obj, "im_self", None)) is not None
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def get_class_name(instance):
|
||||
return getattr(getattr(instance, "__class__", None), "__name__", None)
|
||||
|
||||
|
||||
def get_bound_class_name(obj):
|
||||
my_self = getattr(obj, "__self__", getattr(obj, "im_self", None))
|
||||
if my_self is None:
|
||||
return None
|
||||
return get_class_name(my_self)
|
||||
|
||||
|
||||
def get_description(obj):
|
||||
try:
|
||||
ob_call = obj.__call__
|
||||
except:
|
||||
ob_call = None
|
||||
|
||||
if isinstance(obj, type) or type(obj).__name__ == "classobj":
|
||||
fob = getattr(obj, "__init__", lambda: None)
|
||||
if not isinstance(fob, (types.FunctionType, types.MethodType)):
|
||||
fob = obj
|
||||
elif is_bound_method(ob_call):
|
||||
fob = ob_call
|
||||
else:
|
||||
fob = obj
|
||||
|
||||
argspec = ""
|
||||
fn_name = None
|
||||
fn_class = None
|
||||
if isinstance(fob, (types.FunctionType, types.MethodType)):
|
||||
spec_info = inspect.getfullargspec(fob)
|
||||
argspec = inspect.formatargspec(*spec_info)
|
||||
fn_name = getattr(fob, "__name__", None)
|
||||
if isinstance(obj, type) or type(obj).__name__ == "classobj":
|
||||
fn_name = "__init__"
|
||||
fn_class = getattr(obj, "__name__", "UnknownClass")
|
||||
elif is_bound_method(obj) or is_bound_method(ob_call):
|
||||
fn_class = get_bound_class_name(obj) or "UnknownClass"
|
||||
|
||||
else:
|
||||
fn_name = getattr(fob, "__name__", None)
|
||||
fn_self = getattr(fob, "__self__", None)
|
||||
if fn_self is not None and not isinstance(fn_self, types.ModuleType):
|
||||
fn_class = get_class_name(fn_self)
|
||||
|
||||
doc_string = get_docstring(ob_call) if is_bound_method(ob_call) else get_docstring(obj)
|
||||
return create_method_stub(fn_name, fn_class, argspec, doc_string)
|
||||
|
||||
|
||||
def create_method_stub(fn_name, fn_class, argspec, doc_string):
|
||||
if fn_name and argspec:
|
||||
doc_string = "" if doc_string is None else doc_string
|
||||
fn_stub = create_function_stub(fn_name, argspec, doc_string, indent=1 if fn_class else 0)
|
||||
if fn_class:
|
||||
expr = fn_class if fn_name == "__init__" else fn_class + "()." + fn_name
|
||||
return create_class_stub(fn_class, fn_stub) + "\n" + expr
|
||||
else:
|
||||
expr = fn_name
|
||||
return fn_stub + "\n" + expr
|
||||
elif doc_string:
|
||||
if fn_name:
|
||||
restored_signature, _ = signature_from_docstring(doc_string, fn_name)
|
||||
if restored_signature:
|
||||
return create_method_stub(fn_name, fn_class, restored_signature, doc_string)
|
||||
return create_function_stub("unknown", "(*args, **kwargs)", doc_string) + "\nunknown"
|
||||
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
def get_docstring(obj):
|
||||
if obj is not None:
|
||||
try:
|
||||
if IS_JYTHON:
|
||||
# Jython
|
||||
doc = obj.__doc__
|
||||
if doc is not None:
|
||||
return doc
|
||||
|
||||
from _pydev_bundle import _pydev_jy_imports_tipper
|
||||
|
||||
is_method, infos = _pydev_jy_imports_tipper.ismethod(obj)
|
||||
ret = ""
|
||||
if is_method:
|
||||
for info in infos:
|
||||
ret += info.get_as_doc()
|
||||
return ret
|
||||
|
||||
else:
|
||||
doc = inspect.getdoc(obj)
|
||||
if doc is not None:
|
||||
return doc
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
return ""
|
||||
try:
|
||||
# if no attempt succeeded, try to return repr()...
|
||||
return repr(obj)
|
||||
except:
|
||||
try:
|
||||
# otherwise the class
|
||||
return str(obj.__class__)
|
||||
except:
|
||||
# if all fails, go to an empty string
|
||||
return ""
|
||||
|
||||
|
||||
def create_class_stub(class_name, contents):
|
||||
return "class %s(object):\n%s" % (class_name, contents)
|
||||
|
||||
|
||||
def create_function_stub(fn_name, fn_argspec, fn_docstring, indent=0):
|
||||
def shift_right(string, prefix):
|
||||
return "".join(prefix + line for line in string.splitlines(True))
|
||||
|
||||
fn_docstring = shift_right(inspect.cleandoc(fn_docstring), " " * (indent + 1))
|
||||
ret = '''
|
||||
def %s%s:
|
||||
"""%s"""
|
||||
pass
|
||||
''' % (fn_name, fn_argspec, fn_docstring)
|
||||
ret = ret[1:] # remove first /n
|
||||
ret = ret.replace("\t", " ")
|
||||
if indent:
|
||||
prefix = " " * indent
|
||||
ret = shift_right(ret, prefix)
|
||||
return ret
|
||||
@@ -0,0 +1,267 @@
|
||||
from collections import namedtuple
|
||||
from string import ascii_letters, digits
|
||||
|
||||
from _pydevd_bundle import pydevd_xml
|
||||
import pydevconsole
|
||||
|
||||
import builtins as __builtin__ # Py3
|
||||
|
||||
try:
|
||||
import java.lang # @UnusedImport
|
||||
from _pydev_bundle import _pydev_jy_imports_tipper
|
||||
|
||||
_pydev_imports_tipper = _pydev_jy_imports_tipper
|
||||
except ImportError:
|
||||
IS_JYTHON = False
|
||||
from _pydev_bundle import _pydev_imports_tipper
|
||||
|
||||
dir2 = _pydev_imports_tipper.generate_imports_tip_for_module
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# _StartsWithFilter
|
||||
# =======================================================================================================================
|
||||
class _StartsWithFilter:
|
||||
"""
|
||||
Used because we can't create a lambda that'll use an outer scope in jython 2.1
|
||||
"""
|
||||
|
||||
def __init__(self, start_with):
|
||||
self.start_with = start_with.lower()
|
||||
|
||||
def __call__(self, name):
|
||||
return name.lower().startswith(self.start_with)
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# Completer
|
||||
#
|
||||
# This class was gotten from IPython.completer (dir2 was replaced with the completer already in pydev)
|
||||
# =======================================================================================================================
|
||||
class Completer:
|
||||
def __init__(self, namespace=None, global_namespace=None):
|
||||
"""Create a new completer for the command line.
|
||||
|
||||
Completer([namespace,global_namespace]) -> completer instance.
|
||||
|
||||
If unspecified, the default namespace where completions are performed
|
||||
is __main__ (technically, __main__.__dict__). Namespaces should be
|
||||
given as dictionaries.
|
||||
|
||||
An optional second namespace can be given. This allows the completer
|
||||
to handle cases where both the local and global scopes need to be
|
||||
distinguished.
|
||||
|
||||
Completer instances should be used as the completion mechanism of
|
||||
readline via the set_completer() call:
|
||||
|
||||
readline.set_completer(Completer(my_namespace).complete)
|
||||
"""
|
||||
|
||||
# Don't bind to namespace quite yet, but flag whether the user wants a
|
||||
# specific namespace or to use __main__.__dict__. This will allow us
|
||||
# to bind to __main__.__dict__ at completion time, not now.
|
||||
if namespace is None:
|
||||
self.use_main_ns = 1
|
||||
else:
|
||||
self.use_main_ns = 0
|
||||
self.namespace = namespace
|
||||
|
||||
# The global namespace, if given, can be bound directly
|
||||
if global_namespace is None:
|
||||
self.global_namespace = {}
|
||||
else:
|
||||
self.global_namespace = global_namespace
|
||||
|
||||
def complete(self, text):
|
||||
"""Return the next possible completion for 'text'.
|
||||
|
||||
This is called successively with state == 0, 1, 2, ... until it
|
||||
returns None. The completion should begin with 'text'.
|
||||
|
||||
"""
|
||||
if self.use_main_ns:
|
||||
# In pydev this option should never be used
|
||||
raise RuntimeError("Namespace must be provided!")
|
||||
self.namespace = __main__.__dict__ # @UndefinedVariable
|
||||
|
||||
if "." in text:
|
||||
return self.attr_matches(text)
|
||||
else:
|
||||
return self.global_matches(text)
|
||||
|
||||
def global_matches(self, text):
|
||||
"""Compute matches when text is a simple name.
|
||||
|
||||
Return a list of all keywords, built-in functions and names currently
|
||||
defined in self.namespace or self.global_namespace that match.
|
||||
|
||||
"""
|
||||
|
||||
def get_item(obj, attr):
|
||||
return obj[attr]
|
||||
|
||||
a = {}
|
||||
|
||||
for dict_with_comps in [__builtin__.__dict__, self.namespace, self.global_namespace]: # @UndefinedVariable
|
||||
a.update(dict_with_comps)
|
||||
|
||||
filter = _StartsWithFilter(text)
|
||||
|
||||
return dir2(a, a.keys(), get_item, filter)
|
||||
|
||||
def attr_matches(self, text):
|
||||
"""Compute matches when text contains a dot.
|
||||
|
||||
Assuming the text is of the form NAME.NAME....[NAME], and is
|
||||
evaluatable in self.namespace or self.global_namespace, it will be
|
||||
evaluated and its attributes (as revealed by dir()) are used as
|
||||
possible completions. (For class instances, class members are are
|
||||
also considered.)
|
||||
|
||||
WARNING: this can still invoke arbitrary C code, if an object
|
||||
with a __getattr__ hook is evaluated.
|
||||
|
||||
"""
|
||||
import re
|
||||
|
||||
# Another option, seems to work great. Catches things like ''.<tab>
|
||||
m = re.match(r"(\S+(\.\w+)*)\.(\w*)$", text) # @UndefinedVariable
|
||||
|
||||
if not m:
|
||||
return []
|
||||
|
||||
expr, attr = m.group(1, 3)
|
||||
try:
|
||||
obj = eval(expr, self.namespace)
|
||||
except:
|
||||
try:
|
||||
obj = eval(expr, self.global_namespace)
|
||||
except:
|
||||
return []
|
||||
|
||||
filter = _StartsWithFilter(attr)
|
||||
|
||||
words = dir2(obj, filter=filter)
|
||||
|
||||
return words
|
||||
|
||||
|
||||
def generate_completions(frame, act_tok):
|
||||
"""
|
||||
:return list(tuple(method_name, docstring, parameters, completion_type))
|
||||
|
||||
method_name: str
|
||||
docstring: str
|
||||
parameters: str -- i.e.: "(a, b)"
|
||||
completion_type is an int
|
||||
See: _pydev_bundle._pydev_imports_tipper for TYPE_ constants
|
||||
"""
|
||||
if frame is None:
|
||||
return []
|
||||
|
||||
# Not using frame.f_globals because of https://sourceforge.net/tracker2/?func=detail&aid=2541355&group_id=85796&atid=577329
|
||||
# (Names not resolved in generator expression in method)
|
||||
# See message: http://mail.python.org/pipermail/python-list/2009-January/526522.html
|
||||
updated_globals = {}
|
||||
updated_globals.update(frame.f_globals)
|
||||
updated_globals.update(frame.f_locals) # locals later because it has precedence over the actual globals
|
||||
|
||||
if pydevconsole.IPYTHON:
|
||||
completions = pydevconsole.get_completions(act_tok, act_tok, updated_globals, frame.f_locals)
|
||||
else:
|
||||
completer = Completer(updated_globals, None)
|
||||
# list(tuple(name, descr, parameters, type))
|
||||
completions = completer.complete(act_tok)
|
||||
|
||||
return completions
|
||||
|
||||
|
||||
def generate_completions_as_xml(frame, act_tok):
|
||||
completions = generate_completions(frame, act_tok)
|
||||
return completions_to_xml(completions)
|
||||
|
||||
|
||||
def completions_to_xml(completions):
|
||||
valid_xml = pydevd_xml.make_valid_xml_value
|
||||
quote = pydevd_xml.quote
|
||||
msg = ["<xml>"]
|
||||
|
||||
for comp in completions:
|
||||
msg.append('<comp p0="')
|
||||
msg.append(valid_xml(quote(comp[0], "/>_= \t")))
|
||||
msg.append('" p1="')
|
||||
msg.append(valid_xml(quote(comp[1], "/>_= \t")))
|
||||
msg.append('" p2="')
|
||||
msg.append(valid_xml(quote(comp[2], "/>_= \t")))
|
||||
msg.append('" p3="')
|
||||
msg.append(valid_xml(quote(comp[3], "/>_= \t")))
|
||||
msg.append('"/>')
|
||||
msg.append("</xml>")
|
||||
|
||||
return "".join(msg)
|
||||
|
||||
|
||||
identifier_start = ascii_letters + "_"
|
||||
identifier_part = ascii_letters + "_" + digits
|
||||
|
||||
identifier_start = set(identifier_start)
|
||||
identifier_part = set(identifier_part)
|
||||
|
||||
|
||||
def isidentifier(s):
|
||||
return s.isidentifier()
|
||||
|
||||
|
||||
TokenAndQualifier = namedtuple("TokenAndQualifier", "token, qualifier")
|
||||
|
||||
|
||||
def extract_token_and_qualifier(text, line=0, column=0):
|
||||
"""
|
||||
Extracts the token a qualifier from the text given the line/colum
|
||||
(see test_extract_token_and_qualifier for examples).
|
||||
|
||||
:param unicode text:
|
||||
:param int line: 0-based
|
||||
:param int column: 0-based
|
||||
"""
|
||||
# Note: not using the tokenize module because text should be unicode and
|
||||
# line/column refer to the unicode text (otherwise we'd have to know
|
||||
# those ranges after converted to bytes).
|
||||
if line < 0:
|
||||
line = 0
|
||||
if column < 0:
|
||||
column = 0
|
||||
|
||||
if isinstance(text, bytes):
|
||||
text = text.decode("utf-8")
|
||||
|
||||
lines = text.splitlines()
|
||||
try:
|
||||
text = lines[line]
|
||||
except IndexError:
|
||||
return TokenAndQualifier("", "")
|
||||
|
||||
if column >= len(text):
|
||||
column = len(text)
|
||||
|
||||
text = text[:column]
|
||||
token = ""
|
||||
qualifier = ""
|
||||
|
||||
temp_token = []
|
||||
for i in range(column - 1, -1, -1):
|
||||
c = text[i]
|
||||
if c in identifier_part or isidentifier(c) or c == ".":
|
||||
temp_token.append(c)
|
||||
else:
|
||||
break
|
||||
temp_token = "".join(reversed(temp_token))
|
||||
if "." in temp_token:
|
||||
temp_token = temp_token.split(".")
|
||||
token = ".".join(temp_token[:-1])
|
||||
qualifier = temp_token[-1]
|
||||
else:
|
||||
qualifier = temp_token
|
||||
|
||||
return TokenAndQualifier(token, qualifier)
|
||||
@@ -0,0 +1,16 @@
|
||||
# We must redefine it in Py3k if it's not already there
|
||||
def execfile(file, glob=None, loc=None):
|
||||
if glob is None:
|
||||
import sys
|
||||
|
||||
glob = sys._getframe().f_back.f_globals
|
||||
if loc is None:
|
||||
loc = glob
|
||||
|
||||
import tokenize
|
||||
|
||||
with tokenize.open(file) as stream:
|
||||
contents = stream.read()
|
||||
|
||||
# execute the script (note: it's important to compile first to have the filename set in debug mode)
|
||||
exec(compile(contents + "\n", file, "exec"), glob, loc)
|
||||
@@ -0,0 +1,43 @@
|
||||
import sys
|
||||
|
||||
|
||||
def __getfilesystemencoding():
|
||||
"""
|
||||
Note: there's a copy of this method in interpreterInfo.py
|
||||
"""
|
||||
try:
|
||||
ret = sys.getfilesystemencoding()
|
||||
if not ret:
|
||||
raise RuntimeError("Unable to get encoding.")
|
||||
return ret
|
||||
except:
|
||||
try:
|
||||
# Handle Jython
|
||||
from java.lang import System # @UnresolvedImport
|
||||
|
||||
env = System.getProperty("os.name").lower()
|
||||
if env.find("win") != -1:
|
||||
return "ISO-8859-1" # mbcs does not work on Jython, so, use a (hopefully) suitable replacement
|
||||
return "utf-8"
|
||||
except:
|
||||
pass
|
||||
|
||||
# Only available from 2.3 onwards.
|
||||
if sys.platform == "win32":
|
||||
return "mbcs"
|
||||
return "utf-8"
|
||||
|
||||
|
||||
def getfilesystemencoding():
|
||||
try:
|
||||
ret = __getfilesystemencoding()
|
||||
|
||||
# Check if the encoding is actually there to be used!
|
||||
if hasattr("", "encode"):
|
||||
"".encode(ret)
|
||||
if hasattr("", "decode"):
|
||||
"".decode(ret)
|
||||
|
||||
return ret
|
||||
except:
|
||||
return "utf-8"
|
||||
@@ -0,0 +1,133 @@
|
||||
# =======================================================================================================================
|
||||
# getopt code copied since gnu_getopt is not available on jython 2.1
|
||||
# =======================================================================================================================
|
||||
class GetoptError(Exception):
|
||||
opt = ""
|
||||
msg = ""
|
||||
|
||||
def __init__(self, msg, opt=""):
|
||||
self.msg = msg
|
||||
self.opt = opt
|
||||
Exception.__init__(self, msg, opt)
|
||||
|
||||
def __str__(self):
|
||||
return self.msg
|
||||
|
||||
|
||||
def gnu_getopt(args, shortopts, longopts=[]):
|
||||
"""getopt(args, options[, long_options]) -> opts, args
|
||||
|
||||
This function works like getopt(), except that GNU style scanning
|
||||
mode is used by default. This means that option and non-option
|
||||
arguments may be intermixed. The getopt() function stops
|
||||
processing options as soon as a non-option argument is
|
||||
encountered.
|
||||
|
||||
If the first character of the option string is `+', or if the
|
||||
environment variable POSIXLY_CORRECT is set, then option
|
||||
processing stops as soon as a non-option argument is encountered.
|
||||
"""
|
||||
|
||||
opts = []
|
||||
prog_args = []
|
||||
if type("") == type(longopts):
|
||||
longopts = [longopts]
|
||||
else:
|
||||
longopts = list(longopts)
|
||||
|
||||
# Allow options after non-option arguments?
|
||||
all_options_first = False
|
||||
if shortopts.startswith("+"):
|
||||
shortopts = shortopts[1:]
|
||||
all_options_first = True
|
||||
|
||||
while args:
|
||||
if args[0] == "--":
|
||||
prog_args += args[1:]
|
||||
break
|
||||
|
||||
if args[0][:2] == "--":
|
||||
opts, args = do_longs(opts, args[0][2:], longopts, args[1:])
|
||||
elif args[0][:1] == "-":
|
||||
opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:])
|
||||
else:
|
||||
if all_options_first:
|
||||
prog_args += args
|
||||
break
|
||||
else:
|
||||
prog_args.append(args[0])
|
||||
args = args[1:]
|
||||
|
||||
return opts, prog_args
|
||||
|
||||
|
||||
def do_longs(opts, opt, longopts, args):
|
||||
try:
|
||||
i = opt.index("=")
|
||||
except ValueError:
|
||||
optarg = None
|
||||
else:
|
||||
opt, optarg = opt[:i], opt[i + 1 :]
|
||||
|
||||
has_arg, opt = long_has_args(opt, longopts)
|
||||
if has_arg:
|
||||
if optarg is None:
|
||||
if not args:
|
||||
raise GetoptError("option --%s requires argument" % opt, opt)
|
||||
optarg, args = args[0], args[1:]
|
||||
elif optarg:
|
||||
raise GetoptError("option --%s must not have an argument" % opt, opt)
|
||||
opts.append(("--" + opt, optarg or ""))
|
||||
return opts, args
|
||||
|
||||
|
||||
# Return:
|
||||
# has_arg?
|
||||
# full option name
|
||||
def long_has_args(opt, longopts):
|
||||
possibilities = [o for o in longopts if o.startswith(opt)]
|
||||
if not possibilities:
|
||||
raise GetoptError("option --%s not recognized" % opt, opt)
|
||||
# Is there an exact match?
|
||||
if opt in possibilities:
|
||||
return False, opt
|
||||
elif opt + "=" in possibilities:
|
||||
return True, opt
|
||||
# No exact match, so better be unique.
|
||||
if len(possibilities) > 1:
|
||||
# XXX since possibilities contains all valid continuations, might be
|
||||
# nice to work them into the error msg
|
||||
raise GetoptError("option --%s not a unique prefix" % opt, opt)
|
||||
assert len(possibilities) == 1
|
||||
unique_match = possibilities[0]
|
||||
has_arg = unique_match.endswith("=")
|
||||
if has_arg:
|
||||
unique_match = unique_match[:-1]
|
||||
return has_arg, unique_match
|
||||
|
||||
|
||||
def do_shorts(opts, optstring, shortopts, args):
|
||||
while optstring != "":
|
||||
opt, optstring = optstring[0], optstring[1:]
|
||||
if short_has_arg(opt, shortopts):
|
||||
if optstring == "":
|
||||
if not args:
|
||||
raise GetoptError("option -%s requires argument" % opt, opt)
|
||||
optstring, args = args[0], args[1:]
|
||||
optarg, optstring = optstring, ""
|
||||
else:
|
||||
optarg = ""
|
||||
opts.append(("-" + opt, optarg))
|
||||
return opts, args
|
||||
|
||||
|
||||
def short_has_arg(opt, shortopts):
|
||||
for i in range(len(shortopts)):
|
||||
if opt == shortopts[i] != ":":
|
||||
return shortopts.startswith(":", i + 1)
|
||||
raise GetoptError("option -%s not recognized" % opt, opt)
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# End getopt code
|
||||
# =======================================================================================================================
|
||||
@@ -0,0 +1,372 @@
|
||||
import inspect
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from _pydev_bundle._pydev_tipper_common import do_find
|
||||
from _pydevd_bundle.pydevd_utils import hasattr_checked, dir_checked
|
||||
|
||||
from inspect import getfullargspec
|
||||
|
||||
|
||||
def getargspec(*args, **kwargs):
|
||||
arg_spec = getfullargspec(*args, **kwargs)
|
||||
return arg_spec.args, arg_spec.varargs, arg_spec.varkw, arg_spec.defaults, arg_spec.kwonlyargs or [], arg_spec.kwonlydefaults or {}
|
||||
|
||||
|
||||
# completion types.
|
||||
TYPE_IMPORT = "0"
|
||||
TYPE_CLASS = "1"
|
||||
TYPE_FUNCTION = "2"
|
||||
TYPE_ATTR = "3"
|
||||
TYPE_BUILTIN = "4"
|
||||
TYPE_PARAM = "5"
|
||||
|
||||
|
||||
def _imp(name, log=None):
|
||||
try:
|
||||
return __import__(name)
|
||||
except:
|
||||
if "." in name:
|
||||
sub = name[0 : name.rfind(".")]
|
||||
|
||||
if log is not None:
|
||||
log.add_content("Unable to import", name, "trying with", sub)
|
||||
log.add_exception()
|
||||
|
||||
return _imp(sub, log)
|
||||
else:
|
||||
s = "Unable to import module: %s - sys.path: %s" % (str(name), sys.path)
|
||||
if log is not None:
|
||||
log.add_content(s)
|
||||
log.add_exception()
|
||||
|
||||
raise ImportError(s)
|
||||
|
||||
|
||||
IS_IPY = False
|
||||
if sys.platform == "cli":
|
||||
IS_IPY = True
|
||||
_old_imp = _imp
|
||||
|
||||
def _imp(name, log=None):
|
||||
# We must add a reference in clr for .Net
|
||||
import clr # @UnresolvedImport
|
||||
|
||||
initial_name = name
|
||||
while "." in name:
|
||||
try:
|
||||
clr.AddReference(name)
|
||||
break # If it worked, that's OK.
|
||||
except:
|
||||
name = name[0 : name.rfind(".")]
|
||||
else:
|
||||
try:
|
||||
clr.AddReference(name)
|
||||
except:
|
||||
pass # That's OK (not dot net module).
|
||||
|
||||
return _old_imp(initial_name, log)
|
||||
|
||||
|
||||
def get_file(mod):
|
||||
f = None
|
||||
try:
|
||||
f = inspect.getsourcefile(mod) or inspect.getfile(mod)
|
||||
except:
|
||||
try:
|
||||
f = getattr(mod, "__file__", None)
|
||||
except:
|
||||
f = None
|
||||
if f and f.lower(f[-4:]) in [".pyc", ".pyo"]:
|
||||
filename = f[:-4] + ".py"
|
||||
if os.path.exists(filename):
|
||||
f = filename
|
||||
|
||||
return f
|
||||
|
||||
|
||||
def Find(name, log=None):
|
||||
f = None
|
||||
|
||||
mod = _imp(name, log)
|
||||
parent = mod
|
||||
foundAs = ""
|
||||
|
||||
if inspect.ismodule(mod):
|
||||
f = get_file(mod)
|
||||
|
||||
components = name.split(".")
|
||||
|
||||
old_comp = None
|
||||
for comp in components[1:]:
|
||||
try:
|
||||
# this happens in the following case:
|
||||
# we have mx.DateTime.mxDateTime.mxDateTime.pyd
|
||||
# but after importing it, mx.DateTime.mxDateTime shadows access to mxDateTime.pyd
|
||||
mod = getattr(mod, comp)
|
||||
except AttributeError:
|
||||
if old_comp != comp:
|
||||
raise
|
||||
|
||||
if inspect.ismodule(mod):
|
||||
f = get_file(mod)
|
||||
else:
|
||||
if len(foundAs) > 0:
|
||||
foundAs = foundAs + "."
|
||||
foundAs = foundAs + comp
|
||||
|
||||
old_comp = comp
|
||||
|
||||
return f, mod, parent, foundAs
|
||||
|
||||
|
||||
def search_definition(data):
|
||||
"""@return file, line, col"""
|
||||
|
||||
data = data.replace("\n", "")
|
||||
if data.endswith("."):
|
||||
data = data.rstrip(".")
|
||||
f, mod, parent, foundAs = Find(data)
|
||||
try:
|
||||
return do_find(f, mod), foundAs
|
||||
except:
|
||||
return do_find(f, parent), foundAs
|
||||
|
||||
|
||||
def generate_tip(data, log=None):
|
||||
data = data.replace("\n", "")
|
||||
if data.endswith("."):
|
||||
data = data.rstrip(".")
|
||||
|
||||
f, mod, parent, foundAs = Find(data, log)
|
||||
# print_ >> open('temp.txt', 'w'), f
|
||||
tips = generate_imports_tip_for_module(mod)
|
||||
return f, tips
|
||||
|
||||
|
||||
def check_char(c):
|
||||
if c == "-" or c == ".":
|
||||
return "_"
|
||||
return c
|
||||
|
||||
|
||||
_SENTINEL = object()
|
||||
|
||||
|
||||
def generate_imports_tip_for_module(obj_to_complete, dir_comps=None, getattr=getattr, filter=lambda name: True):
|
||||
"""
|
||||
@param obj_to_complete: the object from where we should get the completions
|
||||
@param dir_comps: if passed, we should not 'dir' the object and should just iterate those passed as kwonly_arg parameter
|
||||
@param getattr: the way to get kwonly_arg given object from the obj_to_complete (used for the completer)
|
||||
@param filter: kwonly_arg callable that receives the name and decides if it should be appended or not to the results
|
||||
@return: list of tuples, so that each tuple represents kwonly_arg completion with:
|
||||
name, doc, args, type (from the TYPE_* constants)
|
||||
"""
|
||||
ret = []
|
||||
|
||||
if dir_comps is None:
|
||||
dir_comps = dir_checked(obj_to_complete)
|
||||
if hasattr_checked(obj_to_complete, "__dict__"):
|
||||
dir_comps.append("__dict__")
|
||||
if hasattr_checked(obj_to_complete, "__class__"):
|
||||
dir_comps.append("__class__")
|
||||
|
||||
get_complete_info = True
|
||||
|
||||
if len(dir_comps) > 1000:
|
||||
# ok, we don't want to let our users wait forever...
|
||||
# no complete info for you...
|
||||
|
||||
get_complete_info = False
|
||||
|
||||
dontGetDocsOn = (float, int, str, tuple, list, dict)
|
||||
dontGetattrOn = (dict, list, set, tuple)
|
||||
for d in dir_comps:
|
||||
if d is None:
|
||||
continue
|
||||
|
||||
if not filter(d):
|
||||
continue
|
||||
|
||||
args = ""
|
||||
|
||||
try:
|
||||
try:
|
||||
if isinstance(obj_to_complete, dontGetattrOn):
|
||||
raise Exception(
|
||||
'Since python 3.9, e.g. "dict[str]" will return'
|
||||
" a dict that's only supposed to take strings. "
|
||||
'Interestingly, e.g. dict["val"] is also valid '
|
||||
"and presumably represents a dict that only takes "
|
||||
'keys that are "val". This breaks our check for '
|
||||
"class attributes."
|
||||
)
|
||||
obj = getattr(obj_to_complete.__class__, d)
|
||||
except:
|
||||
obj = getattr(obj_to_complete, d)
|
||||
except: # just ignore and get it without additional info
|
||||
ret.append((d, "", args, TYPE_BUILTIN))
|
||||
else:
|
||||
if get_complete_info:
|
||||
try:
|
||||
retType = TYPE_BUILTIN
|
||||
|
||||
# check if we have to get docs
|
||||
getDoc = True
|
||||
for class_ in dontGetDocsOn:
|
||||
if isinstance(obj, class_):
|
||||
getDoc = False
|
||||
break
|
||||
|
||||
doc = ""
|
||||
if getDoc:
|
||||
# no need to get this info... too many constants are defined and
|
||||
# makes things much slower (passing all that through sockets takes quite some time)
|
||||
try:
|
||||
doc = inspect.getdoc(obj)
|
||||
if doc is None:
|
||||
doc = ""
|
||||
except: # may happen on jython when checking java classes (so, just ignore it)
|
||||
doc = ""
|
||||
|
||||
if inspect.ismethod(obj) or inspect.isbuiltin(obj) or inspect.isfunction(obj) or inspect.isroutine(obj):
|
||||
try:
|
||||
args, vargs, kwargs, defaults, kwonly_args, kwonly_defaults = getargspec(obj)
|
||||
|
||||
args = args[:]
|
||||
|
||||
for kwonly_arg in kwonly_args:
|
||||
default = kwonly_defaults.get(kwonly_arg, _SENTINEL)
|
||||
if default is not _SENTINEL:
|
||||
args.append("%s=%s" % (kwonly_arg, default))
|
||||
else:
|
||||
args.append(str(kwonly_arg))
|
||||
|
||||
args = "(%s)" % (", ".join(args))
|
||||
except TypeError:
|
||||
# ok, let's see if we can get the arguments from the doc
|
||||
args, doc = signature_from_docstring(doc, getattr(obj, "__name__", None))
|
||||
|
||||
retType = TYPE_FUNCTION
|
||||
|
||||
elif inspect.isclass(obj):
|
||||
retType = TYPE_CLASS
|
||||
|
||||
elif inspect.ismodule(obj):
|
||||
retType = TYPE_IMPORT
|
||||
|
||||
else:
|
||||
retType = TYPE_ATTR
|
||||
|
||||
# add token and doc to return - assure only strings.
|
||||
ret.append((d, doc, args, retType))
|
||||
|
||||
except: # just ignore and get it without aditional info
|
||||
ret.append((d, "", args, TYPE_BUILTIN))
|
||||
|
||||
else: # get_complete_info == False
|
||||
if inspect.ismethod(obj) or inspect.isbuiltin(obj) or inspect.isfunction(obj) or inspect.isroutine(obj):
|
||||
retType = TYPE_FUNCTION
|
||||
|
||||
elif inspect.isclass(obj):
|
||||
retType = TYPE_CLASS
|
||||
|
||||
elif inspect.ismodule(obj):
|
||||
retType = TYPE_IMPORT
|
||||
|
||||
else:
|
||||
retType = TYPE_ATTR
|
||||
# ok, no complete info, let's try to do this as fast and clean as possible
|
||||
# so, no docs for this kind of information, only the signatures
|
||||
ret.append((d, "", str(args), retType))
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def signature_from_docstring(doc, obj_name):
|
||||
args = "()"
|
||||
try:
|
||||
found = False
|
||||
if len(doc) > 0:
|
||||
if IS_IPY:
|
||||
# Handle case where we have the situation below
|
||||
# sort(self, object cmp, object key)
|
||||
# sort(self, object cmp, object key, bool reverse)
|
||||
# sort(self)
|
||||
# sort(self, object cmp)
|
||||
|
||||
# Or: sort(self: list, cmp: object, key: object)
|
||||
# sort(self: list, cmp: object, key: object, reverse: bool)
|
||||
# sort(self: list)
|
||||
# sort(self: list, cmp: object)
|
||||
if obj_name:
|
||||
name = obj_name + "("
|
||||
|
||||
# Fix issue where it was appearing sort(aa)sort(bb)sort(cc) in the same line.
|
||||
lines = doc.splitlines()
|
||||
if len(lines) == 1:
|
||||
c = doc.count(name)
|
||||
if c > 1:
|
||||
doc = ("\n" + name).join(doc.split(name))
|
||||
|
||||
major = ""
|
||||
for line in doc.splitlines():
|
||||
if line.startswith(name) and line.endswith(")"):
|
||||
if len(line) > len(major):
|
||||
major = line
|
||||
if major:
|
||||
args = major[major.index("(") :]
|
||||
found = True
|
||||
|
||||
if not found:
|
||||
i = doc.find("->")
|
||||
if i < 0:
|
||||
i = doc.find("--")
|
||||
if i < 0:
|
||||
i = doc.find("\n")
|
||||
if i < 0:
|
||||
i = doc.find("\r")
|
||||
|
||||
if i > 0:
|
||||
s = doc[0:i]
|
||||
s = s.strip()
|
||||
|
||||
# let's see if we have a docstring in the first line
|
||||
if s[-1] == ")":
|
||||
start = s.find("(")
|
||||
if start >= 0:
|
||||
end = s.find("[")
|
||||
if end <= 0:
|
||||
end = s.find(")")
|
||||
if end <= 0:
|
||||
end = len(s)
|
||||
|
||||
args = s[start:end]
|
||||
if not args[-1] == ")":
|
||||
args = args + ")"
|
||||
|
||||
# now, get rid of unwanted chars
|
||||
l = len(args) - 1
|
||||
r = []
|
||||
for i in range(len(args)):
|
||||
if i == 0 or i == l:
|
||||
r.append(args[i])
|
||||
else:
|
||||
r.append(check_char(args[i]))
|
||||
|
||||
args = "".join(r)
|
||||
|
||||
if IS_IPY:
|
||||
if args.startswith("(self:"):
|
||||
i = args.find(",")
|
||||
if i >= 0:
|
||||
args = "(self" + args[i:]
|
||||
else:
|
||||
args = "(self)"
|
||||
i = args.find(")")
|
||||
if i > 0:
|
||||
args = args[: i + 1]
|
||||
|
||||
except:
|
||||
pass
|
||||
return args, doc
|
||||
@@ -0,0 +1,485 @@
|
||||
import traceback
|
||||
from io import StringIO
|
||||
from java.lang import StringBuffer # @UnresolvedImport
|
||||
from java.lang import String # @UnresolvedImport
|
||||
import java.lang # @UnresolvedImport
|
||||
import sys
|
||||
from _pydev_bundle._pydev_tipper_common import do_find
|
||||
|
||||
from org.python.core import PyReflectedFunction # @UnresolvedImport
|
||||
|
||||
from org.python import core # @UnresolvedImport
|
||||
from org.python.core import PyClass # @UnresolvedImport
|
||||
|
||||
# completion types.
|
||||
TYPE_IMPORT = "0"
|
||||
TYPE_CLASS = "1"
|
||||
TYPE_FUNCTION = "2"
|
||||
TYPE_ATTR = "3"
|
||||
TYPE_BUILTIN = "4"
|
||||
TYPE_PARAM = "5"
|
||||
|
||||
|
||||
def _imp(name):
|
||||
try:
|
||||
return __import__(name)
|
||||
except:
|
||||
if "." in name:
|
||||
sub = name[0 : name.rfind(".")]
|
||||
return _imp(sub)
|
||||
else:
|
||||
s = "Unable to import module: %s - sys.path: %s" % (str(name), sys.path)
|
||||
raise RuntimeError(s)
|
||||
|
||||
|
||||
import java.util
|
||||
|
||||
_java_rt_file = getattr(java.util, "__file__", None)
|
||||
|
||||
|
||||
def Find(name):
|
||||
f = None
|
||||
if name.startswith("__builtin__"):
|
||||
if name == "__builtin__.str":
|
||||
name = "org.python.core.PyString"
|
||||
elif name == "__builtin__.dict":
|
||||
name = "org.python.core.PyDictionary"
|
||||
|
||||
mod = _imp(name)
|
||||
parent = mod
|
||||
foundAs = ""
|
||||
|
||||
try:
|
||||
f = getattr(mod, "__file__", None)
|
||||
except:
|
||||
f = None
|
||||
|
||||
components = name.split(".")
|
||||
old_comp = None
|
||||
for comp in components[1:]:
|
||||
try:
|
||||
# this happens in the following case:
|
||||
# we have mx.DateTime.mxDateTime.mxDateTime.pyd
|
||||
# but after importing it, mx.DateTime.mxDateTime does shadows access to mxDateTime.pyd
|
||||
mod = getattr(mod, comp)
|
||||
except AttributeError:
|
||||
if old_comp != comp:
|
||||
raise
|
||||
|
||||
if hasattr(mod, "__file__"):
|
||||
f = mod.__file__
|
||||
else:
|
||||
if len(foundAs) > 0:
|
||||
foundAs = foundAs + "."
|
||||
foundAs = foundAs + comp
|
||||
|
||||
old_comp = comp
|
||||
|
||||
if f is None and name.startswith("java.lang"):
|
||||
# Hack: java.lang.__file__ is None on Jython 2.7 (whereas it pointed to rt.jar on Jython 2.5).
|
||||
f = _java_rt_file
|
||||
|
||||
if f is not None:
|
||||
if f.endswith(".pyc"):
|
||||
f = f[:-1]
|
||||
elif f.endswith("$py.class"):
|
||||
f = f[: -len("$py.class")] + ".py"
|
||||
return f, mod, parent, foundAs
|
||||
|
||||
|
||||
def format_param_class_name(paramClassName):
|
||||
if paramClassName.startswith("<type '") and paramClassName.endswith("'>"):
|
||||
paramClassName = paramClassName[len("<type '") : -2]
|
||||
if paramClassName.startswith("["):
|
||||
if paramClassName == "[C":
|
||||
paramClassName = "char[]"
|
||||
|
||||
elif paramClassName == "[B":
|
||||
paramClassName = "byte[]"
|
||||
|
||||
elif paramClassName == "[I":
|
||||
paramClassName = "int[]"
|
||||
|
||||
elif paramClassName.startswith("[L") and paramClassName.endswith(";"):
|
||||
paramClassName = paramClassName[2:-1]
|
||||
paramClassName += "[]"
|
||||
return paramClassName
|
||||
|
||||
|
||||
def generate_tip(data, log=None):
|
||||
data = data.replace("\n", "")
|
||||
if data.endswith("."):
|
||||
data = data.rstrip(".")
|
||||
|
||||
f, mod, parent, foundAs = Find(data)
|
||||
tips = generate_imports_tip_for_module(mod)
|
||||
return f, tips
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# Info
|
||||
# =======================================================================================================================
|
||||
class Info:
|
||||
def __init__(self, name, **kwargs):
|
||||
self.name = name
|
||||
self.doc = kwargs.get("doc", None)
|
||||
self.args = kwargs.get("args", ()) # tuple of strings
|
||||
self.varargs = kwargs.get("varargs", None) # string
|
||||
self.kwargs = kwargs.get("kwargs", None) # string
|
||||
self.ret = kwargs.get("ret", None) # string
|
||||
|
||||
def basic_as_str(self):
|
||||
"""@returns this class information as a string (just basic format)"""
|
||||
args = self.args
|
||||
s = "function:%s args=%s, varargs=%s, kwargs=%s, docs:%s" % (self.name, args, self.varargs, self.kwargs, self.doc)
|
||||
return s
|
||||
|
||||
def get_as_doc(self):
|
||||
s = str(self.name)
|
||||
if self.doc:
|
||||
s += "\n@doc %s\n" % str(self.doc)
|
||||
|
||||
if self.args:
|
||||
s += "\n@params "
|
||||
for arg in self.args:
|
||||
s += str(format_param_class_name(arg))
|
||||
s += " "
|
||||
|
||||
if self.varargs:
|
||||
s += "\n@varargs "
|
||||
s += str(self.varargs)
|
||||
|
||||
if self.kwargs:
|
||||
s += "\n@kwargs "
|
||||
s += str(self.kwargs)
|
||||
|
||||
if self.ret:
|
||||
s += "\n@return "
|
||||
s += str(format_param_class_name(str(self.ret)))
|
||||
|
||||
return str(s)
|
||||
|
||||
|
||||
def isclass(cls):
|
||||
return isinstance(cls, core.PyClass) or type(cls) == java.lang.Class
|
||||
|
||||
|
||||
def ismethod(func):
|
||||
"""this function should return the information gathered on a function
|
||||
|
||||
@param func: this is the function we want to get info on
|
||||
@return a tuple where:
|
||||
0 = indicates whether the parameter passed is a method or not
|
||||
1 = a list of classes 'Info', with the info gathered from the function
|
||||
this is a list because when we have methods from java with the same name and different signatures,
|
||||
we actually have many methods, each with its own set of arguments
|
||||
"""
|
||||
|
||||
try:
|
||||
if isinstance(func, core.PyFunction):
|
||||
# ok, this is from python, created by jython
|
||||
# print_ ' PyFunction'
|
||||
|
||||
def getargs(func_code):
|
||||
"""Get information about the arguments accepted by a code object.
|
||||
|
||||
Three things are returned: (args, varargs, varkw), where 'args' is
|
||||
a list of argument names (possibly containing nested lists), and
|
||||
'varargs' and 'varkw' are the names of the * and ** arguments or None."""
|
||||
|
||||
nargs = func_code.co_argcount
|
||||
names = func_code.co_varnames
|
||||
args = list(names[:nargs])
|
||||
step = 0
|
||||
|
||||
if not hasattr(func_code, "CO_VARARGS"):
|
||||
from org.python.core import CodeFlag # @UnresolvedImport
|
||||
|
||||
co_varargs_flag = CodeFlag.CO_VARARGS.flag
|
||||
co_varkeywords_flag = CodeFlag.CO_VARKEYWORDS.flag
|
||||
else:
|
||||
co_varargs_flag = func_code.CO_VARARGS
|
||||
co_varkeywords_flag = func_code.CO_VARKEYWORDS
|
||||
|
||||
varargs = None
|
||||
if func_code.co_flags & co_varargs_flag:
|
||||
varargs = func_code.co_varnames[nargs]
|
||||
nargs = nargs + 1
|
||||
varkw = None
|
||||
if func_code.co_flags & co_varkeywords_flag:
|
||||
varkw = func_code.co_varnames[nargs]
|
||||
return args, varargs, varkw
|
||||
|
||||
args = getargs(func.func_code)
|
||||
return 1, [Info(func.func_name, args=args[0], varargs=args[1], kwargs=args[2], doc=func.func_doc)]
|
||||
|
||||
if isinstance(func, core.PyMethod):
|
||||
# this is something from java itself, and jython just wrapped it...
|
||||
|
||||
# things to play in func:
|
||||
# ['__call__', '__class__', '__cmp__', '__delattr__', '__dir__', '__doc__', '__findattr__', '__name__', '_doget', 'im_class',
|
||||
# 'im_func', 'im_self', 'toString']
|
||||
# print_ ' PyMethod'
|
||||
# that's the PyReflectedFunction... keep going to get it
|
||||
func = func.im_func
|
||||
|
||||
if isinstance(func, PyReflectedFunction):
|
||||
# this is something from java itself, and jython just wrapped it...
|
||||
|
||||
# print_ ' PyReflectedFunction'
|
||||
|
||||
infos = []
|
||||
for i in range(len(func.argslist)):
|
||||
# things to play in func.argslist[i]:
|
||||
|
||||
# 'PyArgsCall', 'PyArgsKeywordsCall', 'REPLACE', 'StandardCall', 'args', 'compare', 'compareTo', 'data', 'declaringClass'
|
||||
# 'flags', 'isStatic', 'matches', 'precedence']
|
||||
|
||||
# print_ ' ', func.argslist[i].data.__class__
|
||||
# func.argslist[i].data.__class__ == java.lang.reflect.Method
|
||||
|
||||
if func.argslist[i]:
|
||||
met = func.argslist[i].data
|
||||
name = met.getName()
|
||||
try:
|
||||
ret = met.getReturnType()
|
||||
except AttributeError:
|
||||
ret = ""
|
||||
parameterTypes = met.getParameterTypes()
|
||||
|
||||
args = []
|
||||
for j in range(len(parameterTypes)):
|
||||
paramTypesClass = parameterTypes[j]
|
||||
try:
|
||||
try:
|
||||
paramClassName = paramTypesClass.getName()
|
||||
except:
|
||||
paramClassName = paramTypesClass.getName(paramTypesClass)
|
||||
except AttributeError:
|
||||
try:
|
||||
paramClassName = repr(paramTypesClass) # should be something like <type 'object'>
|
||||
paramClassName = paramClassName.split("'")[1]
|
||||
except:
|
||||
paramClassName = repr(paramTypesClass) # just in case something else happens... it will at least be visible
|
||||
# if the parameter equals [C, it means it it a char array, so, let's change it
|
||||
|
||||
a = format_param_class_name(paramClassName)
|
||||
# a = a.replace('[]','Array')
|
||||
# a = a.replace('Object', 'obj')
|
||||
# a = a.replace('String', 's')
|
||||
# a = a.replace('Integer', 'i')
|
||||
# a = a.replace('Char', 'c')
|
||||
# a = a.replace('Double', 'd')
|
||||
args.append(a) # so we don't leave invalid code
|
||||
|
||||
info = Info(name, args=args, ret=ret)
|
||||
# print_ info.basic_as_str()
|
||||
infos.append(info)
|
||||
|
||||
return 1, infos
|
||||
except Exception:
|
||||
s = StringIO()
|
||||
traceback.print_exc(file=s)
|
||||
return 1, [Info(str("ERROR"), doc=s.getvalue())]
|
||||
|
||||
return 0, None
|
||||
|
||||
|
||||
def ismodule(mod):
|
||||
# java modules... do we have other way to know that?
|
||||
if not hasattr(mod, "getClass") and not hasattr(mod, "__class__") and hasattr(mod, "__name__"):
|
||||
return 1
|
||||
|
||||
return isinstance(mod, core.PyModule)
|
||||
|
||||
|
||||
def dir_obj(obj):
|
||||
ret = []
|
||||
found = java.util.HashMap()
|
||||
original = obj
|
||||
if hasattr(obj, "__class__"):
|
||||
if obj.__class__ == java.lang.Class:
|
||||
# get info about superclasses
|
||||
classes = []
|
||||
classes.append(obj)
|
||||
try:
|
||||
c = obj.getSuperclass()
|
||||
except TypeError:
|
||||
# may happen on jython when getting the java.lang.Class class
|
||||
c = obj.getSuperclass(obj)
|
||||
|
||||
while c != None:
|
||||
classes.append(c)
|
||||
c = c.getSuperclass()
|
||||
|
||||
# get info about interfaces
|
||||
interfs = []
|
||||
for obj in classes:
|
||||
try:
|
||||
interfs.extend(obj.getInterfaces())
|
||||
except TypeError:
|
||||
interfs.extend(obj.getInterfaces(obj))
|
||||
classes.extend(interfs)
|
||||
|
||||
# now is the time when we actually get info on the declared methods and fields
|
||||
for obj in classes:
|
||||
try:
|
||||
declaredMethods = obj.getDeclaredMethods()
|
||||
except TypeError:
|
||||
declaredMethods = obj.getDeclaredMethods(obj)
|
||||
|
||||
try:
|
||||
declaredFields = obj.getDeclaredFields()
|
||||
except TypeError:
|
||||
declaredFields = obj.getDeclaredFields(obj)
|
||||
|
||||
for i in range(len(declaredMethods)):
|
||||
name = declaredMethods[i].getName()
|
||||
ret.append(name)
|
||||
found.put(name, 1)
|
||||
|
||||
for i in range(len(declaredFields)):
|
||||
name = declaredFields[i].getName()
|
||||
ret.append(name)
|
||||
found.put(name, 1)
|
||||
|
||||
elif isclass(obj.__class__):
|
||||
d = dir(obj.__class__)
|
||||
for name in d:
|
||||
ret.append(name)
|
||||
found.put(name, 1)
|
||||
|
||||
# this simple dir does not always get all the info, that's why we have the part before
|
||||
# (e.g.: if we do a dir on String, some methods that are from other interfaces such as
|
||||
# charAt don't appear)
|
||||
d = dir(original)
|
||||
for name in d:
|
||||
if found.get(name) != 1:
|
||||
ret.append(name)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def format_arg(arg):
|
||||
"""formats an argument to be shown"""
|
||||
|
||||
s = str(arg)
|
||||
dot = s.rfind(".")
|
||||
if dot >= 0:
|
||||
s = s[dot + 1 :]
|
||||
|
||||
s = s.replace(";", "")
|
||||
s = s.replace("[]", "Array")
|
||||
if len(s) > 0:
|
||||
c = s[0].lower()
|
||||
s = c + s[1:]
|
||||
|
||||
return s
|
||||
|
||||
|
||||
def search_definition(data):
|
||||
"""@return file, line, col"""
|
||||
|
||||
data = data.replace("\n", "")
|
||||
if data.endswith("."):
|
||||
data = data.rstrip(".")
|
||||
f, mod, parent, foundAs = Find(data)
|
||||
try:
|
||||
return do_find(f, mod), foundAs
|
||||
except:
|
||||
return do_find(f, parent), foundAs
|
||||
|
||||
|
||||
def generate_imports_tip_for_module(obj_to_complete, dir_comps=None, getattr=getattr, filter=lambda name: True):
|
||||
"""
|
||||
@param obj_to_complete: the object from where we should get the completions
|
||||
@param dir_comps: if passed, we should not 'dir' the object and should just iterate those passed as a parameter
|
||||
@param getattr: the way to get a given object from the obj_to_complete (used for the completer)
|
||||
@param filter: a callable that receives the name and decides if it should be appended or not to the results
|
||||
@return: list of tuples, so that each tuple represents a completion with:
|
||||
name, doc, args, type (from the TYPE_* constants)
|
||||
"""
|
||||
ret = []
|
||||
|
||||
if dir_comps is None:
|
||||
dir_comps = dir_obj(obj_to_complete)
|
||||
|
||||
for d in dir_comps:
|
||||
if d is None:
|
||||
continue
|
||||
|
||||
if not filter(d):
|
||||
continue
|
||||
|
||||
args = ""
|
||||
doc = ""
|
||||
retType = TYPE_BUILTIN
|
||||
|
||||
try:
|
||||
obj = getattr(obj_to_complete, d)
|
||||
except (AttributeError, java.lang.NoClassDefFoundError):
|
||||
# jython has a bug in its custom classloader that prevents some things from working correctly, so, let's see if
|
||||
# we can fix that... (maybe fixing it in jython itself would be a better idea, as this is clearly a bug)
|
||||
# for that we need a custom classloader... we have references from it in the below places:
|
||||
#
|
||||
# http://mindprod.com/jgloss/classloader.html
|
||||
# http://www.javaworld.com/javaworld/jw-03-2000/jw-03-classload-p2.html
|
||||
# http://freshmeat.net/articles/view/1643/
|
||||
#
|
||||
# note: this only happens when we add things to the sys.path at runtime, if they are added to the classpath
|
||||
# before the run, everything goes fine.
|
||||
#
|
||||
# The code below ilustrates what I mean...
|
||||
#
|
||||
# import sys
|
||||
# sys.path.insert(1, r"C:\bin\eclipse310\plugins\org.junit_3.8.1\junit.jar" )
|
||||
#
|
||||
# import junit.framework
|
||||
# print_ dir(junit.framework) #shows the TestCase class here
|
||||
#
|
||||
# import junit.framework.TestCase
|
||||
#
|
||||
# raises the error:
|
||||
# Traceback (innermost last):
|
||||
# File "<console>", line 1, in ?
|
||||
# ImportError: No module named TestCase
|
||||
#
|
||||
# whereas if we had added the jar to the classpath before, everything would be fine by now...
|
||||
|
||||
ret.append((d, "", "", retType))
|
||||
# that's ok, private things cannot be gotten...
|
||||
continue
|
||||
else:
|
||||
isMet = ismethod(obj)
|
||||
if isMet[0] and isMet[1]:
|
||||
info = isMet[1][0]
|
||||
try:
|
||||
args, vargs, kwargs = info.args, info.varargs, info.kwargs
|
||||
doc = info.get_as_doc()
|
||||
r = ""
|
||||
for a in args:
|
||||
if len(r) > 0:
|
||||
r += ", "
|
||||
r += format_arg(a)
|
||||
args = "(%s)" % (r)
|
||||
except TypeError:
|
||||
traceback.print_exc()
|
||||
args = "()"
|
||||
|
||||
retType = TYPE_FUNCTION
|
||||
|
||||
elif isclass(obj):
|
||||
retType = TYPE_CLASS
|
||||
|
||||
elif ismodule(obj):
|
||||
retType = TYPE_IMPORT
|
||||
|
||||
# add token and doc to return - assure only strings.
|
||||
ret.append((d, doc, args, retType))
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.path.append(r"D:\dev_programs\eclipse_3\310\eclipse\plugins\org.junit_3.8.1\junit.jar")
|
||||
sys.stdout.write("%s\n" % Find("junit.framework.TestCase"))
|
||||
@@ -0,0 +1,23 @@
|
||||
import traceback
|
||||
import sys
|
||||
from io import StringIO
|
||||
|
||||
|
||||
class Log:
|
||||
def __init__(self):
|
||||
self._contents = []
|
||||
|
||||
def add_content(self, *content):
|
||||
self._contents.append(" ".join(content))
|
||||
|
||||
def add_exception(self):
|
||||
s = StringIO()
|
||||
exc_info = sys.exc_info()
|
||||
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], limit=None, file=s)
|
||||
self._contents.append(s.getvalue())
|
||||
|
||||
def get_contents(self):
|
||||
return "\n".join(self._contents)
|
||||
|
||||
def clear_log(self):
|
||||
del self._contents[:]
|
||||
@@ -0,0 +1,134 @@
|
||||
import sys
|
||||
import os
|
||||
|
||||
|
||||
def find_in_pythonpath(module_name):
|
||||
# Check all the occurrences where we could match the given module/package in the PYTHONPATH.
|
||||
#
|
||||
# This is a simplistic approach, but probably covers most of the cases we're interested in
|
||||
# (i.e.: this may fail in more elaborate cases of import customization or .zip imports, but
|
||||
# this should be rare in general).
|
||||
found_at = []
|
||||
|
||||
parts = module_name.split(".") # split because we need to convert mod.name to mod/name
|
||||
for path in sys.path:
|
||||
target = os.path.join(path, *parts)
|
||||
target_py = target + ".py"
|
||||
if os.path.isdir(target):
|
||||
found_at.append(target)
|
||||
if os.path.exists(target_py):
|
||||
found_at.append(target_py)
|
||||
return found_at
|
||||
|
||||
|
||||
class DebuggerInitializationError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class VerifyShadowedImport(object):
|
||||
def __init__(self, import_name):
|
||||
self.import_name = import_name
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if exc_type is not None:
|
||||
if exc_type == DebuggerInitializationError:
|
||||
return False # It's already an error we generated.
|
||||
|
||||
# We couldn't even import it...
|
||||
found_at = find_in_pythonpath(self.import_name)
|
||||
|
||||
if len(found_at) <= 1:
|
||||
# It wasn't found anywhere or there was just 1 occurrence.
|
||||
# Let's just return to show the original error.
|
||||
return False
|
||||
|
||||
# We found more than 1 occurrence of the same module in the PYTHONPATH
|
||||
# (the user module and the standard library module).
|
||||
# Let's notify the user as it seems that the module was shadowed.
|
||||
msg = self._generate_shadowed_import_message(found_at)
|
||||
raise DebuggerInitializationError(msg)
|
||||
|
||||
def _generate_shadowed_import_message(self, found_at):
|
||||
msg = """It was not possible to initialize the debugger due to a module name conflict.
|
||||
|
||||
i.e.: the module "%(import_name)s" could not be imported because it is shadowed by:
|
||||
%(found_at)s
|
||||
Please rename this file/folder so that the original module from the standard library can be imported.""" % {
|
||||
"import_name": self.import_name,
|
||||
"found_at": found_at[0],
|
||||
}
|
||||
|
||||
return msg
|
||||
|
||||
def check(self, module, expected_attributes):
|
||||
msg = ""
|
||||
for expected_attribute in expected_attributes:
|
||||
try:
|
||||
getattr(module, expected_attribute)
|
||||
except:
|
||||
msg = self._generate_shadowed_import_message([module.__file__])
|
||||
break
|
||||
|
||||
if msg:
|
||||
raise DebuggerInitializationError(msg)
|
||||
|
||||
|
||||
with VerifyShadowedImport("threading") as verify_shadowed:
|
||||
import threading
|
||||
|
||||
verify_shadowed.check(threading, ["Thread", "settrace", "setprofile", "Lock", "RLock", "current_thread"])
|
||||
ThreadingEvent = threading.Event
|
||||
ThreadingLock = threading.Lock
|
||||
threading_current_thread = threading.current_thread
|
||||
|
||||
with VerifyShadowedImport("time") as verify_shadowed:
|
||||
import time
|
||||
|
||||
verify_shadowed.check(time, ["sleep", "time", "mktime"])
|
||||
|
||||
with VerifyShadowedImport("socket") as verify_shadowed:
|
||||
import socket
|
||||
|
||||
verify_shadowed.check(socket, ["socket", "gethostname", "getaddrinfo"])
|
||||
|
||||
with VerifyShadowedImport("select") as verify_shadowed:
|
||||
import select
|
||||
|
||||
verify_shadowed.check(select, ["select"])
|
||||
|
||||
with VerifyShadowedImport("code") as verify_shadowed:
|
||||
import code as _code
|
||||
|
||||
verify_shadowed.check(_code, ["compile_command", "InteractiveInterpreter"])
|
||||
|
||||
with VerifyShadowedImport("_thread") as verify_shadowed:
|
||||
import _thread as thread
|
||||
|
||||
verify_shadowed.check(thread, ["start_new_thread", "start_new", "allocate_lock"])
|
||||
|
||||
with VerifyShadowedImport("queue") as verify_shadowed:
|
||||
import queue as _queue
|
||||
|
||||
verify_shadowed.check(_queue, ["Queue", "LifoQueue", "Empty", "Full", "deque"])
|
||||
|
||||
with VerifyShadowedImport("xmlrpclib") as verify_shadowed:
|
||||
import xmlrpc.client as xmlrpclib
|
||||
|
||||
verify_shadowed.check(xmlrpclib, ["ServerProxy", "Marshaller", "Server"])
|
||||
|
||||
with VerifyShadowedImport("xmlrpc.server") as verify_shadowed:
|
||||
import xmlrpc.server as xmlrpcserver
|
||||
|
||||
verify_shadowed.check(xmlrpcserver, ["SimpleXMLRPCServer"])
|
||||
|
||||
with VerifyShadowedImport("http.server") as verify_shadowed:
|
||||
import http.server as BaseHTTPServer
|
||||
|
||||
verify_shadowed.check(BaseHTTPServer, ["BaseHTTPRequestHandler"])
|
||||
|
||||
# If set, this is a version of the threading.enumerate that doesn't have the patching to remove the pydevd threads.
|
||||
# Note: as it can't be set during execution, don't import the name (import the module and access it through its name).
|
||||
pydevd_saved_threading_enumerate = None
|
||||
@@ -0,0 +1,77 @@
|
||||
import sys
|
||||
|
||||
|
||||
def patch_sys_module():
|
||||
def patched_exc_info(fun):
|
||||
def pydev_debugger_exc_info():
|
||||
type, value, traceback = fun()
|
||||
if type == ImportError:
|
||||
# we should not show frame added by plugin_import call
|
||||
if traceback and hasattr(traceback, "tb_next"):
|
||||
return type, value, traceback.tb_next
|
||||
return type, value, traceback
|
||||
|
||||
return pydev_debugger_exc_info
|
||||
|
||||
system_exc_info = sys.exc_info
|
||||
sys.exc_info = patched_exc_info(system_exc_info)
|
||||
if not hasattr(sys, "system_exc_info"):
|
||||
sys.system_exc_info = system_exc_info
|
||||
|
||||
|
||||
def patched_reload(orig_reload):
|
||||
def pydev_debugger_reload(module):
|
||||
orig_reload(module)
|
||||
if module.__name__ == "sys":
|
||||
# if sys module was reloaded we should patch it again
|
||||
patch_sys_module()
|
||||
|
||||
return pydev_debugger_reload
|
||||
|
||||
|
||||
def patch_reload():
|
||||
import builtins # Py3
|
||||
|
||||
if hasattr(builtins, "reload"):
|
||||
sys.builtin_orig_reload = builtins.reload
|
||||
builtins.reload = patched_reload(sys.builtin_orig_reload) # @UndefinedVariable
|
||||
try:
|
||||
import imp
|
||||
|
||||
sys.imp_orig_reload = imp.reload
|
||||
imp.reload = patched_reload(sys.imp_orig_reload) # @UndefinedVariable
|
||||
except ImportError:
|
||||
pass # Ok, imp not available on Python 3.12.
|
||||
else:
|
||||
try:
|
||||
import importlib
|
||||
|
||||
sys.importlib_orig_reload = importlib.reload # @UndefinedVariable
|
||||
importlib.reload = patched_reload(sys.importlib_orig_reload) # @UndefinedVariable
|
||||
except:
|
||||
pass
|
||||
|
||||
del builtins
|
||||
|
||||
|
||||
def cancel_patches_in_sys_module():
|
||||
sys.exc_info = sys.system_exc_info # @UndefinedVariable
|
||||
import builtins # Py3
|
||||
|
||||
if hasattr(sys, "builtin_orig_reload"):
|
||||
builtins.reload = sys.builtin_orig_reload
|
||||
|
||||
if hasattr(sys, "imp_orig_reload"):
|
||||
try:
|
||||
import imp
|
||||
|
||||
imp.reload = sys.imp_orig_reload
|
||||
except ImportError:
|
||||
pass # Ok, imp not available in Python 3.12.
|
||||
|
||||
if hasattr(sys, "importlib_orig_reload"):
|
||||
import importlib
|
||||
|
||||
importlib.reload = sys.importlib_orig_reload
|
||||
|
||||
del builtins
|
||||
@@ -0,0 +1,53 @@
|
||||
import inspect
|
||||
import re
|
||||
|
||||
|
||||
def do_find(f, mod):
|
||||
import linecache
|
||||
|
||||
if inspect.ismodule(mod):
|
||||
return f, 0, 0
|
||||
|
||||
lines = linecache.getlines(f)
|
||||
|
||||
if inspect.isclass(mod):
|
||||
name = mod.__name__
|
||||
pat = re.compile(r"^\s*class\s*" + name + r"\b")
|
||||
for i in range(len(lines)):
|
||||
if pat.match(lines[i]):
|
||||
return f, i, 0
|
||||
|
||||
return f, 0, 0
|
||||
|
||||
if inspect.ismethod(mod):
|
||||
mod = mod.im_func
|
||||
|
||||
if inspect.isfunction(mod):
|
||||
try:
|
||||
mod = mod.func_code
|
||||
except AttributeError:
|
||||
mod = mod.__code__ # python 3k
|
||||
|
||||
if inspect.istraceback(mod):
|
||||
mod = mod.tb_frame
|
||||
|
||||
if inspect.isframe(mod):
|
||||
mod = mod.f_code
|
||||
|
||||
if inspect.iscode(mod):
|
||||
if not hasattr(mod, "co_filename"):
|
||||
return None, 0, 0
|
||||
|
||||
if not hasattr(mod, "co_firstlineno"):
|
||||
return mod.co_filename, 0, 0
|
||||
|
||||
lnum = mod.co_firstlineno
|
||||
pat = re.compile(r"^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)")
|
||||
while lnum > 0:
|
||||
if pat.match(lines[lnum]):
|
||||
break
|
||||
lnum -= 1
|
||||
|
||||
return f, lnum, 0
|
||||
|
||||
raise RuntimeError("Do not know about: " + f + " " + str(mod))
|
||||
@@ -0,0 +1,349 @@
|
||||
"""
|
||||
Sample usage to track changes in a thread.
|
||||
|
||||
import threading
|
||||
import time
|
||||
watcher = fsnotify.Watcher()
|
||||
watcher.accepted_file_extensions = {'.py', '.pyw'}
|
||||
|
||||
# Configure target values to compute throttling.
|
||||
# Note: internal sleep times will be updated based on
|
||||
# profiling the actual application runtime to match
|
||||
# those values.
|
||||
|
||||
watcher.target_time_for_single_scan = 2.
|
||||
watcher.target_time_for_notification = 4.
|
||||
|
||||
watcher.set_tracked_paths([target_dir])
|
||||
|
||||
def start_watching(): # Called from thread
|
||||
for change_enum, change_path in watcher.iter_changes():
|
||||
if change_enum == fsnotify.Change.added:
|
||||
print('Added: ', change_path)
|
||||
elif change_enum == fsnotify.Change.modified:
|
||||
print('Modified: ', change_path)
|
||||
elif change_enum == fsnotify.Change.deleted:
|
||||
print('Deleted: ', change_path)
|
||||
|
||||
t = threading.Thread(target=start_watching)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
try:
|
||||
...
|
||||
finally:
|
||||
watcher.dispose()
|
||||
|
||||
|
||||
Note: changes are only reported for files (added/modified/deleted), not directories.
|
||||
"""
|
||||
import sys
|
||||
from os.path import basename
|
||||
from _pydev_bundle import pydev_log, _pydev_saved_modules
|
||||
from os import scandir
|
||||
|
||||
try:
|
||||
from enum import IntEnum
|
||||
except:
|
||||
|
||||
class IntEnum(object):
|
||||
pass
|
||||
|
||||
|
||||
import time
|
||||
|
||||
__author__ = "Fabio Zadrozny"
|
||||
__email__ = "fabiofz@gmail.com"
|
||||
__version__ = "0.1.5" # Version here and in setup.py
|
||||
|
||||
|
||||
class Change(IntEnum):
|
||||
added = 1
|
||||
modified = 2
|
||||
deleted = 3
|
||||
|
||||
|
||||
class _SingleVisitInfo(object):
|
||||
def __init__(self):
|
||||
self.count = 0
|
||||
self.visited_dirs = set()
|
||||
self.file_to_mtime = {}
|
||||
self.last_sleep_time = time.time()
|
||||
|
||||
|
||||
class _PathWatcher(object):
|
||||
"""
|
||||
Helper to watch a single path.
|
||||
"""
|
||||
|
||||
def __init__(self, root_path, accept_directory, accept_file, single_visit_info, max_recursion_level, sleep_time=0.0):
|
||||
"""
|
||||
:type root_path: str
|
||||
:type accept_directory: Callback[str, bool]
|
||||
:type accept_file: Callback[str, bool]
|
||||
:type max_recursion_level: int
|
||||
:type sleep_time: float
|
||||
"""
|
||||
self.accept_directory = accept_directory
|
||||
self.accept_file = accept_file
|
||||
self._max_recursion_level = max_recursion_level
|
||||
|
||||
self._root_path = root_path
|
||||
|
||||
# Initial sleep value for throttling, it'll be auto-updated based on the
|
||||
# Watcher.target_time_for_single_scan.
|
||||
self.sleep_time = sleep_time
|
||||
|
||||
self.sleep_at_elapsed = 1.0 / 30.0
|
||||
|
||||
# When created, do the initial snapshot right away!
|
||||
old_file_to_mtime = {}
|
||||
self._check(single_visit_info, lambda _change: None, old_file_to_mtime)
|
||||
|
||||
def __eq__(self, o):
|
||||
if isinstance(o, _PathWatcher):
|
||||
return self._root_path == o._root_path
|
||||
|
||||
return False
|
||||
|
||||
def __ne__(self, o):
|
||||
return not self == o
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._root_path)
|
||||
|
||||
def _check_dir(self, dir_path, single_visit_info, append_change, old_file_to_mtime, level):
|
||||
# This is the actual poll loop
|
||||
if dir_path in single_visit_info.visited_dirs or level > self._max_recursion_level:
|
||||
return
|
||||
single_visit_info.visited_dirs.add(dir_path)
|
||||
try:
|
||||
if isinstance(dir_path, bytes):
|
||||
try:
|
||||
dir_path = dir_path.decode(sys.getfilesystemencoding())
|
||||
except UnicodeDecodeError:
|
||||
try:
|
||||
dir_path = dir_path.decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
return # Ignore if we can't deal with the path.
|
||||
|
||||
new_files = single_visit_info.file_to_mtime
|
||||
|
||||
for entry in scandir(dir_path):
|
||||
single_visit_info.count += 1
|
||||
|
||||
# Throttle if needed inside the loop
|
||||
# to avoid consuming too much CPU.
|
||||
if single_visit_info.count % 300 == 0:
|
||||
if self.sleep_time > 0:
|
||||
t = time.time()
|
||||
diff = t - single_visit_info.last_sleep_time
|
||||
if diff > self.sleep_at_elapsed:
|
||||
time.sleep(self.sleep_time)
|
||||
single_visit_info.last_sleep_time = time.time()
|
||||
|
||||
if entry.is_dir():
|
||||
if self.accept_directory(entry.path):
|
||||
self._check_dir(entry.path, single_visit_info, append_change, old_file_to_mtime, level + 1)
|
||||
|
||||
elif self.accept_file(entry.path):
|
||||
stat = entry.stat()
|
||||
mtime = (stat.st_mtime_ns, stat.st_size)
|
||||
path = entry.path
|
||||
new_files[path] = mtime
|
||||
|
||||
old_mtime = old_file_to_mtime.pop(path, None)
|
||||
if not old_mtime:
|
||||
append_change((Change.added, path))
|
||||
elif old_mtime != mtime:
|
||||
append_change((Change.modified, path))
|
||||
|
||||
except OSError:
|
||||
pass # Directory was removed in the meanwhile.
|
||||
|
||||
def _check(self, single_visit_info, append_change, old_file_to_mtime):
|
||||
self._check_dir(self._root_path, single_visit_info, append_change, old_file_to_mtime, 0)
|
||||
|
||||
|
||||
class Watcher(object):
|
||||
# By default (if accept_directory is not specified), these will be the
|
||||
# ignored directories.
|
||||
ignored_dirs = {".git", "__pycache__", ".idea", "node_modules", ".metadata"}
|
||||
|
||||
# By default (if accept_file is not specified), these will be the
|
||||
# accepted files.
|
||||
accepted_file_extensions = ()
|
||||
|
||||
# Set to the target value for doing full scan of all files (adds a sleep inside the poll loop
|
||||
# which processes files to reach the target time).
|
||||
# Lower values will consume more CPU
|
||||
# Set to 0.0 to have no sleeps (which will result in a higher cpu load).
|
||||
target_time_for_single_scan = 2.0
|
||||
|
||||
# Set the target value from the start of one scan to the start of another scan (adds a
|
||||
# sleep after a full poll is done to reach the target time).
|
||||
# Lower values will consume more CPU.
|
||||
# Set to 0.0 to have a new scan start right away without any sleeps.
|
||||
target_time_for_notification = 4.0
|
||||
|
||||
# Set to True to print the time for a single poll through all the paths.
|
||||
print_poll_time = False
|
||||
|
||||
# This is the maximum recursion level.
|
||||
max_recursion_level = 10
|
||||
|
||||
def __init__(self, accept_directory=None, accept_file=None):
|
||||
"""
|
||||
:param Callable[str, bool] accept_directory:
|
||||
Callable that returns whether a directory should be watched.
|
||||
Note: if passed it'll override the `ignored_dirs`
|
||||
|
||||
:param Callable[str, bool] accept_file:
|
||||
Callable that returns whether a file should be watched.
|
||||
Note: if passed it'll override the `accepted_file_extensions`.
|
||||
"""
|
||||
self._path_watchers = set()
|
||||
self._disposed = _pydev_saved_modules.ThreadingEvent()
|
||||
|
||||
if accept_directory is None:
|
||||
accept_directory = lambda dir_path: basename(dir_path) not in self.ignored_dirs
|
||||
if accept_file is None:
|
||||
accept_file = lambda path_name: not self.accepted_file_extensions or path_name.endswith(self.accepted_file_extensions)
|
||||
self.accept_file = accept_file
|
||||
self.accept_directory = accept_directory
|
||||
self._single_visit_info = _SingleVisitInfo()
|
||||
|
||||
@property
|
||||
def accept_directory(self):
|
||||
return self._accept_directory
|
||||
|
||||
@accept_directory.setter
|
||||
def accept_directory(self, accept_directory):
|
||||
self._accept_directory = accept_directory
|
||||
for path_watcher in self._path_watchers:
|
||||
path_watcher.accept_directory = accept_directory
|
||||
|
||||
@property
|
||||
def accept_file(self):
|
||||
return self._accept_file
|
||||
|
||||
@accept_file.setter
|
||||
def accept_file(self, accept_file):
|
||||
self._accept_file = accept_file
|
||||
for path_watcher in self._path_watchers:
|
||||
path_watcher.accept_file = accept_file
|
||||
|
||||
def dispose(self):
|
||||
self._disposed.set()
|
||||
|
||||
@property
|
||||
def path_watchers(self):
|
||||
return tuple(self._path_watchers)
|
||||
|
||||
def set_tracked_paths(self, paths):
|
||||
"""
|
||||
Note: always resets all path trackers to track the passed paths.
|
||||
"""
|
||||
if not isinstance(paths, (list, tuple, set)):
|
||||
paths = (paths,)
|
||||
|
||||
# Sort by the path len so that the bigger paths come first (so,
|
||||
# if there's any nesting we want the nested paths to be visited
|
||||
# before the parent paths so that the max_recursion_level is correct).
|
||||
paths = sorted(set(paths), key=lambda path: -len(path))
|
||||
path_watchers = set()
|
||||
|
||||
self._single_visit_info = _SingleVisitInfo()
|
||||
|
||||
initial_time = time.time()
|
||||
for path in paths:
|
||||
sleep_time = 0.0 # When collecting the first time, sleep_time should be 0!
|
||||
path_watcher = _PathWatcher(
|
||||
path,
|
||||
self.accept_directory,
|
||||
self.accept_file,
|
||||
self._single_visit_info,
|
||||
max_recursion_level=self.max_recursion_level,
|
||||
sleep_time=sleep_time,
|
||||
)
|
||||
|
||||
path_watchers.add(path_watcher)
|
||||
|
||||
actual_time = time.time() - initial_time
|
||||
|
||||
pydev_log.debug("Tracking the following paths for changes: %s", paths)
|
||||
pydev_log.debug("Time to track: %.2fs", actual_time)
|
||||
pydev_log.debug("Folders found: %s", len(self._single_visit_info.visited_dirs))
|
||||
pydev_log.debug("Files found: %s", len(self._single_visit_info.file_to_mtime))
|
||||
self._path_watchers = path_watchers
|
||||
|
||||
def iter_changes(self):
|
||||
"""
|
||||
Continuously provides changes (until dispose() is called).
|
||||
|
||||
Changes provided are tuples with the Change enum and filesystem path.
|
||||
|
||||
:rtype: Iterable[Tuple[Change, str]]
|
||||
"""
|
||||
while not self._disposed.is_set():
|
||||
initial_time = time.time()
|
||||
|
||||
old_visit_info = self._single_visit_info
|
||||
old_file_to_mtime = old_visit_info.file_to_mtime
|
||||
changes = []
|
||||
append_change = changes.append
|
||||
|
||||
self._single_visit_info = single_visit_info = _SingleVisitInfo()
|
||||
for path_watcher in self._path_watchers:
|
||||
path_watcher._check(single_visit_info, append_change, old_file_to_mtime)
|
||||
|
||||
# Note that we pop entries while visiting, so, what remained is what's deleted.
|
||||
for entry in old_file_to_mtime:
|
||||
append_change((Change.deleted, entry))
|
||||
|
||||
for change in changes:
|
||||
yield change
|
||||
|
||||
actual_time = time.time() - initial_time
|
||||
if self.print_poll_time:
|
||||
print("--- Total poll time: %.3fs" % actual_time)
|
||||
|
||||
if actual_time > 0:
|
||||
if self.target_time_for_single_scan <= 0.0:
|
||||
for path_watcher in self._path_watchers:
|
||||
path_watcher.sleep_time = 0.0
|
||||
else:
|
||||
perc = self.target_time_for_single_scan / actual_time
|
||||
|
||||
# Prevent from changing the values too much (go slowly into the right
|
||||
# direction).
|
||||
# (to prevent from cases where the user puts the machine on sleep and
|
||||
# values become too skewed).
|
||||
if perc > 2.0:
|
||||
perc = 2.0
|
||||
elif perc < 0.5:
|
||||
perc = 0.5
|
||||
|
||||
for path_watcher in self._path_watchers:
|
||||
if path_watcher.sleep_time <= 0.0:
|
||||
path_watcher.sleep_time = 0.001
|
||||
new_sleep_time = path_watcher.sleep_time * perc
|
||||
|
||||
# Prevent from changing the values too much (go slowly into the right
|
||||
# direction).
|
||||
# (to prevent from cases where the user puts the machine on sleep and
|
||||
# values become too skewed).
|
||||
diff_sleep_time = new_sleep_time - path_watcher.sleep_time
|
||||
path_watcher.sleep_time += diff_sleep_time / (3.0 * len(self._path_watchers))
|
||||
|
||||
if actual_time > 0:
|
||||
self._disposed.wait(actual_time)
|
||||
|
||||
if path_watcher.sleep_time < 0.001:
|
||||
path_watcher.sleep_time = 0.001
|
||||
|
||||
# print('new sleep time: %s' % path_watcher.sleep_time)
|
||||
|
||||
diff = self.target_time_for_notification - actual_time
|
||||
if diff > 0.0:
|
||||
self._disposed.wait(diff)
|
||||
@@ -0,0 +1,641 @@
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
from _pydev_bundle.pydev_imports import xmlrpclib, _queue, Exec
|
||||
from _pydev_bundle._pydev_calltip_util import get_description
|
||||
from _pydevd_bundle import pydevd_vars
|
||||
from _pydevd_bundle import pydevd_xml
|
||||
from _pydevd_bundle.pydevd_constants import IS_JYTHON, NEXT_VALUE_SEPARATOR, get_global_debugger, silence_warnings_decorator
|
||||
from contextlib import contextmanager
|
||||
from _pydev_bundle import pydev_log
|
||||
from _pydevd_bundle.pydevd_utils import interrupt_main_thread
|
||||
|
||||
from io import StringIO
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# BaseStdIn
|
||||
# =======================================================================================================================
|
||||
class BaseStdIn:
|
||||
def __init__(self, original_stdin=sys.stdin, *args, **kwargs):
|
||||
try:
|
||||
self.encoding = sys.stdin.encoding
|
||||
except:
|
||||
# Not sure if it's available in all Python versions...
|
||||
pass
|
||||
self.original_stdin = original_stdin
|
||||
|
||||
try:
|
||||
self.errors = sys.stdin.errors # Who knew? sys streams have an errors attribute!
|
||||
except:
|
||||
# Not sure if it's available in all Python versions...
|
||||
pass
|
||||
|
||||
def readline(self, *args, **kwargs):
|
||||
# sys.stderr.write('Cannot readline out of the console evaluation\n') -- don't show anything
|
||||
# This could happen if the user had done input('enter number).<-- upon entering this, that message would appear,
|
||||
# which is not something we want.
|
||||
return "\n"
|
||||
|
||||
def write(self, *args, **kwargs):
|
||||
pass # not available StdIn (but it can be expected to be in the stream interface)
|
||||
|
||||
def flush(self, *args, **kwargs):
|
||||
pass # not available StdIn (but it can be expected to be in the stream interface)
|
||||
|
||||
def read(self, *args, **kwargs):
|
||||
# in the interactive interpreter, a read and a readline are the same.
|
||||
return self.readline()
|
||||
|
||||
def close(self, *args, **kwargs):
|
||||
pass # expected in StdIn
|
||||
|
||||
def __iter__(self):
|
||||
# BaseStdIn would not be considered as Iterable in Python 3 without explicit `__iter__` implementation
|
||||
return self.original_stdin.__iter__()
|
||||
|
||||
def __getattr__(self, item):
|
||||
# it's called if the attribute wasn't found
|
||||
if hasattr(self.original_stdin, item):
|
||||
return getattr(self.original_stdin, item)
|
||||
raise AttributeError("%s has no attribute %s" % (self.original_stdin, item))
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# StdIn
|
||||
# =======================================================================================================================
|
||||
class StdIn(BaseStdIn):
|
||||
"""
|
||||
Object to be added to stdin (to emulate it as non-blocking while the next line arrives)
|
||||
"""
|
||||
|
||||
def __init__(self, interpreter, host, client_port, original_stdin=sys.stdin):
|
||||
BaseStdIn.__init__(self, original_stdin)
|
||||
self.interpreter = interpreter
|
||||
self.client_port = client_port
|
||||
self.host = host
|
||||
|
||||
def readline(self, *args, **kwargs):
|
||||
# Ok, callback into the client to get the new input
|
||||
try:
|
||||
server = xmlrpclib.Server("http://%s:%s" % (self.host, self.client_port))
|
||||
requested_input = server.RequestInput()
|
||||
if not requested_input:
|
||||
return "\n" # Yes, a readline must return something (otherwise we can get an EOFError on the input() call).
|
||||
else:
|
||||
# readline should end with '\n' (not doing so makes IPython 5 remove the last *valid* character).
|
||||
requested_input += "\n"
|
||||
return requested_input
|
||||
except KeyboardInterrupt:
|
||||
raise # Let KeyboardInterrupt go through -- #PyDev-816: Interrupting infinite loop in the Interactive Console
|
||||
except:
|
||||
return "\n"
|
||||
|
||||
def close(self, *args, **kwargs):
|
||||
pass # expected in StdIn
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# DebugConsoleStdIn
|
||||
# =======================================================================================================================
|
||||
class DebugConsoleStdIn(BaseStdIn):
|
||||
"""
|
||||
Object to be added to stdin (to emulate it as non-blocking while the next line arrives)
|
||||
"""
|
||||
|
||||
def __init__(self, py_db, original_stdin):
|
||||
"""
|
||||
:param py_db:
|
||||
If None, get_global_debugger() is used.
|
||||
"""
|
||||
BaseStdIn.__init__(self, original_stdin)
|
||||
self._py_db = py_db
|
||||
self._in_notification = 0
|
||||
|
||||
def __send_input_requested_message(self, is_started):
|
||||
try:
|
||||
py_db = self._py_db
|
||||
if py_db is None:
|
||||
py_db = get_global_debugger()
|
||||
|
||||
if py_db is None:
|
||||
return
|
||||
|
||||
cmd = py_db.cmd_factory.make_input_requested_message(is_started)
|
||||
py_db.writer.add_command(cmd)
|
||||
except Exception:
|
||||
pydev_log.exception()
|
||||
|
||||
@contextmanager
|
||||
def notify_input_requested(self):
|
||||
self._in_notification += 1
|
||||
if self._in_notification == 1:
|
||||
self.__send_input_requested_message(True)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self._in_notification -= 1
|
||||
if self._in_notification == 0:
|
||||
self.__send_input_requested_message(False)
|
||||
|
||||
def readline(self, *args, **kwargs):
|
||||
with self.notify_input_requested():
|
||||
return self.original_stdin.readline(*args, **kwargs)
|
||||
|
||||
def read(self, *args, **kwargs):
|
||||
with self.notify_input_requested():
|
||||
return self.original_stdin.read(*args, **kwargs)
|
||||
|
||||
|
||||
class CodeFragment:
|
||||
def __init__(self, text, is_single_line=True):
|
||||
self.text = text
|
||||
self.is_single_line = is_single_line
|
||||
|
||||
def append(self, code_fragment):
|
||||
self.text = self.text + "\n" + code_fragment.text
|
||||
if not code_fragment.is_single_line:
|
||||
self.is_single_line = False
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# BaseInterpreterInterface
|
||||
# =======================================================================================================================
|
||||
class BaseInterpreterInterface:
|
||||
def __init__(self, mainThread, connect_status_queue=None):
|
||||
self.mainThread = mainThread
|
||||
self.interruptable = False
|
||||
self.exec_queue = _queue.Queue(0)
|
||||
self.buffer = None
|
||||
self.banner_shown = False
|
||||
self.connect_status_queue = connect_status_queue
|
||||
self.mpl_modules_for_patching = {}
|
||||
self.init_mpl_modules_for_patching()
|
||||
|
||||
def build_banner(self):
|
||||
return "print({0})\n".format(repr(self.get_greeting_msg()))
|
||||
|
||||
def get_greeting_msg(self):
|
||||
return "PyDev console: starting.\n"
|
||||
|
||||
def init_mpl_modules_for_patching(self):
|
||||
from pydev_ipython.matplotlibtools import activate_matplotlib, activate_pylab, activate_pyplot
|
||||
|
||||
self.mpl_modules_for_patching = {
|
||||
"matplotlib": lambda: activate_matplotlib(self.enableGui),
|
||||
"matplotlib.pyplot": activate_pyplot,
|
||||
"pylab": activate_pylab,
|
||||
}
|
||||
|
||||
def need_more_for_code(self, source):
|
||||
# PyDev-502: PyDev 3.9 F2 doesn't support backslash continuations
|
||||
|
||||
# Strangely even the IPython console is_complete said it was complete
|
||||
# even with a continuation char at the end.
|
||||
if source.endswith("\\"):
|
||||
return True
|
||||
|
||||
if hasattr(self.interpreter, "is_complete"):
|
||||
return not self.interpreter.is_complete(source)
|
||||
try:
|
||||
# At this point, it should always be single.
|
||||
# If we don't do this, things as:
|
||||
#
|
||||
# for i in range(10): print(i)
|
||||
#
|
||||
# (in a single line) don't work.
|
||||
# Note that it won't give an error and code will be None (so, it'll
|
||||
# use execMultipleLines in the next call in this case).
|
||||
symbol = "single"
|
||||
code = self.interpreter.compile(source, "<input>", symbol)
|
||||
except (OverflowError, SyntaxError, ValueError):
|
||||
# Case 1
|
||||
return False
|
||||
if code is None:
|
||||
# Case 2
|
||||
return True
|
||||
|
||||
# Case 3
|
||||
return False
|
||||
|
||||
def need_more(self, code_fragment):
|
||||
if self.buffer is None:
|
||||
self.buffer = code_fragment
|
||||
else:
|
||||
self.buffer.append(code_fragment)
|
||||
|
||||
return self.need_more_for_code(self.buffer.text)
|
||||
|
||||
def create_std_in(self, debugger=None, original_std_in=None):
|
||||
if debugger is None:
|
||||
return StdIn(self, self.host, self.client_port, original_stdin=original_std_in)
|
||||
else:
|
||||
return DebugConsoleStdIn(py_db=debugger, original_stdin=original_std_in)
|
||||
|
||||
def add_exec(self, code_fragment, debugger=None):
|
||||
# In case sys.excepthook called, use original excepthook #PyDev-877: Debug console freezes with Python 3.5+
|
||||
# (showtraceback does it on python 3.5 onwards)
|
||||
sys.excepthook = sys.__excepthook__
|
||||
try:
|
||||
original_in = sys.stdin
|
||||
try:
|
||||
help = None
|
||||
if "pydoc" in sys.modules:
|
||||
pydoc = sys.modules["pydoc"] # Don't import it if it still is not there.
|
||||
|
||||
if hasattr(pydoc, "help"):
|
||||
# You never know how will the API be changed, so, let's code defensively here
|
||||
help = pydoc.help
|
||||
if not hasattr(help, "input"):
|
||||
help = None
|
||||
except:
|
||||
# Just ignore any error here
|
||||
pass
|
||||
|
||||
more = False
|
||||
try:
|
||||
sys.stdin = self.create_std_in(debugger, original_in)
|
||||
try:
|
||||
if help is not None:
|
||||
# This will enable the help() function to work.
|
||||
try:
|
||||
try:
|
||||
help.input = sys.stdin
|
||||
except AttributeError:
|
||||
help._input = sys.stdin
|
||||
except:
|
||||
help = None
|
||||
if not self._input_error_printed:
|
||||
self._input_error_printed = True
|
||||
sys.stderr.write("\nError when trying to update pydoc.help.input\n")
|
||||
sys.stderr.write("(help() may not work -- please report this as a bug in the pydev bugtracker).\n\n")
|
||||
traceback.print_exc()
|
||||
|
||||
try:
|
||||
self.start_exec()
|
||||
if hasattr(self, "debugger"):
|
||||
self.debugger.enable_tracing()
|
||||
|
||||
more = self.do_add_exec(code_fragment)
|
||||
|
||||
if hasattr(self, "debugger"):
|
||||
self.debugger.disable_tracing()
|
||||
|
||||
self.finish_exec(more)
|
||||
finally:
|
||||
if help is not None:
|
||||
try:
|
||||
try:
|
||||
help.input = original_in
|
||||
except AttributeError:
|
||||
help._input = original_in
|
||||
except:
|
||||
pass
|
||||
|
||||
finally:
|
||||
sys.stdin = original_in
|
||||
except SystemExit:
|
||||
raise
|
||||
except:
|
||||
traceback.print_exc()
|
||||
finally:
|
||||
sys.__excepthook__ = sys.excepthook
|
||||
|
||||
return more
|
||||
|
||||
def do_add_exec(self, codeFragment):
|
||||
"""
|
||||
Subclasses should override.
|
||||
|
||||
@return: more (True if more input is needed to complete the statement and False if the statement is complete).
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_namespace(self):
|
||||
"""
|
||||
Subclasses should override.
|
||||
|
||||
@return: dict with namespace.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def __resolve_reference__(self, text):
|
||||
"""
|
||||
|
||||
:type text: str
|
||||
"""
|
||||
obj = None
|
||||
if "." not in text:
|
||||
try:
|
||||
obj = self.get_namespace()[text]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
if obj is None:
|
||||
try:
|
||||
obj = self.get_namespace()["__builtins__"][text]
|
||||
except:
|
||||
pass
|
||||
|
||||
if obj is None:
|
||||
try:
|
||||
obj = getattr(self.get_namespace()["__builtins__"], text, None)
|
||||
except:
|
||||
pass
|
||||
|
||||
else:
|
||||
try:
|
||||
last_dot = text.rindex(".")
|
||||
parent_context = text[0:last_dot]
|
||||
res = pydevd_vars.eval_in_context(parent_context, self.get_namespace(), self.get_namespace())
|
||||
obj = getattr(res, text[last_dot + 1 :])
|
||||
except:
|
||||
pass
|
||||
return obj
|
||||
|
||||
def getDescription(self, text):
|
||||
try:
|
||||
obj = self.__resolve_reference__(text)
|
||||
if obj is None:
|
||||
return ""
|
||||
return get_description(obj)
|
||||
except:
|
||||
return ""
|
||||
|
||||
def do_exec_code(self, code, is_single_line):
|
||||
try:
|
||||
code_fragment = CodeFragment(code, is_single_line)
|
||||
more = self.need_more(code_fragment)
|
||||
if not more:
|
||||
code_fragment = self.buffer
|
||||
self.buffer = None
|
||||
self.exec_queue.put(code_fragment)
|
||||
|
||||
return more
|
||||
except:
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
def execLine(self, line):
|
||||
return self.do_exec_code(line, True)
|
||||
|
||||
def execMultipleLines(self, lines):
|
||||
if IS_JYTHON:
|
||||
more = False
|
||||
for line in lines.split("\n"):
|
||||
more = self.do_exec_code(line, True)
|
||||
return more
|
||||
else:
|
||||
return self.do_exec_code(lines, False)
|
||||
|
||||
def interrupt(self):
|
||||
self.buffer = None # Also clear the buffer when it's interrupted.
|
||||
try:
|
||||
if self.interruptable:
|
||||
# Fix for #PyDev-500: Console interrupt can't interrupt on sleep
|
||||
interrupt_main_thread(self.mainThread)
|
||||
|
||||
self.finish_exec(False)
|
||||
return True
|
||||
except:
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
def close(self):
|
||||
sys.exit(0)
|
||||
|
||||
def start_exec(self):
|
||||
self.interruptable = True
|
||||
|
||||
def get_server(self):
|
||||
if getattr(self, "host", None) is not None:
|
||||
return xmlrpclib.Server("http://%s:%s" % (self.host, self.client_port))
|
||||
else:
|
||||
return None
|
||||
|
||||
server = property(get_server)
|
||||
|
||||
def ShowConsole(self):
|
||||
server = self.get_server()
|
||||
if server is not None:
|
||||
server.ShowConsole()
|
||||
|
||||
def finish_exec(self, more):
|
||||
self.interruptable = False
|
||||
|
||||
server = self.get_server()
|
||||
|
||||
if server is not None:
|
||||
return server.NotifyFinished(more)
|
||||
else:
|
||||
return True
|
||||
|
||||
def getFrame(self):
|
||||
xml = StringIO()
|
||||
hidden_ns = self.get_ipython_hidden_vars_dict()
|
||||
xml.write("<xml>")
|
||||
xml.write(pydevd_xml.frame_vars_to_xml(self.get_namespace(), hidden_ns))
|
||||
xml.write("</xml>")
|
||||
|
||||
return xml.getvalue()
|
||||
|
||||
@silence_warnings_decorator
|
||||
def getVariable(self, attributes):
|
||||
xml = StringIO()
|
||||
xml.write("<xml>")
|
||||
val_dict = pydevd_vars.resolve_compound_var_object_fields(self.get_namespace(), attributes)
|
||||
if val_dict is None:
|
||||
val_dict = {}
|
||||
|
||||
for k, val in val_dict.items():
|
||||
val = val_dict[k]
|
||||
evaluate_full_value = pydevd_xml.should_evaluate_full_value(val)
|
||||
xml.write(pydevd_vars.var_to_xml(val, k, evaluate_full_value=evaluate_full_value))
|
||||
|
||||
xml.write("</xml>")
|
||||
|
||||
return xml.getvalue()
|
||||
|
||||
def getArray(self, attr, roffset, coffset, rows, cols, format):
|
||||
name = attr.split("\t")[-1]
|
||||
array = pydevd_vars.eval_in_context(name, self.get_namespace(), self.get_namespace())
|
||||
return pydevd_vars.table_like_struct_to_xml(array, name, roffset, coffset, rows, cols, format)
|
||||
|
||||
def evaluate(self, expression):
|
||||
xml = StringIO()
|
||||
xml.write("<xml>")
|
||||
result = pydevd_vars.eval_in_context(expression, self.get_namespace(), self.get_namespace())
|
||||
xml.write(pydevd_vars.var_to_xml(result, expression))
|
||||
xml.write("</xml>")
|
||||
return xml.getvalue()
|
||||
|
||||
@silence_warnings_decorator
|
||||
def loadFullValue(self, seq, scope_attrs):
|
||||
"""
|
||||
Evaluate full value for async Console variables in a separate thread and send results to IDE side
|
||||
:param seq: id of command
|
||||
:param scope_attrs: a sequence of variables with their attributes separated by NEXT_VALUE_SEPARATOR
|
||||
(i.e.: obj\tattr1\tattr2NEXT_VALUE_SEPARATORobj2\attr1\tattr2)
|
||||
:return:
|
||||
"""
|
||||
frame_variables = self.get_namespace()
|
||||
var_objects = []
|
||||
vars = scope_attrs.split(NEXT_VALUE_SEPARATOR)
|
||||
for var_attrs in vars:
|
||||
if "\t" in var_attrs:
|
||||
name, attrs = var_attrs.split("\t", 1)
|
||||
|
||||
else:
|
||||
name = var_attrs
|
||||
attrs = None
|
||||
if name in frame_variables:
|
||||
var_object = pydevd_vars.resolve_var_object(frame_variables[name], attrs)
|
||||
var_objects.append((var_object, name))
|
||||
else:
|
||||
var_object = pydevd_vars.eval_in_context(name, frame_variables, frame_variables)
|
||||
var_objects.append((var_object, name))
|
||||
|
||||
from _pydevd_bundle.pydevd_comm import GetValueAsyncThreadConsole
|
||||
|
||||
py_db = getattr(self, "debugger", None)
|
||||
|
||||
if py_db is None:
|
||||
py_db = get_global_debugger()
|
||||
|
||||
if py_db is None:
|
||||
from pydevd import PyDB
|
||||
|
||||
py_db = PyDB()
|
||||
|
||||
t = GetValueAsyncThreadConsole(py_db, self.get_server(), seq, var_objects)
|
||||
t.start()
|
||||
|
||||
def changeVariable(self, attr, value):
|
||||
def do_change_variable():
|
||||
Exec("%s=%s" % (attr, value), self.get_namespace(), self.get_namespace())
|
||||
|
||||
# Important: it has to be really enabled in the main thread, so, schedule
|
||||
# it to run in the main thread.
|
||||
self.exec_queue.put(do_change_variable)
|
||||
|
||||
def connectToDebugger(self, debuggerPort, debugger_options=None):
|
||||
"""
|
||||
Used to show console with variables connection.
|
||||
Mainly, monkey-patches things in the debugger structure so that the debugger protocol works.
|
||||
"""
|
||||
|
||||
if debugger_options is None:
|
||||
debugger_options = {}
|
||||
env_key = "PYDEVD_EXTRA_ENVS"
|
||||
if env_key in debugger_options:
|
||||
for env_name, value in debugger_options[env_key].items():
|
||||
existing_value = os.environ.get(env_name, None)
|
||||
if existing_value:
|
||||
os.environ[env_name] = "%s%c%s" % (existing_value, os.path.pathsep, value)
|
||||
else:
|
||||
os.environ[env_name] = value
|
||||
if env_name == "PYTHONPATH":
|
||||
sys.path.append(value)
|
||||
|
||||
del debugger_options[env_key]
|
||||
|
||||
def do_connect_to_debugger():
|
||||
try:
|
||||
# Try to import the packages needed to attach the debugger
|
||||
import pydevd
|
||||
from _pydev_bundle._pydev_saved_modules import threading
|
||||
except:
|
||||
# This happens on Jython embedded in host eclipse
|
||||
traceback.print_exc()
|
||||
sys.stderr.write("pydevd is not available, cannot connect\n")
|
||||
|
||||
from _pydevd_bundle.pydevd_constants import set_thread_id
|
||||
from _pydev_bundle import pydev_localhost
|
||||
|
||||
set_thread_id(threading.current_thread(), "console_main")
|
||||
|
||||
VIRTUAL_FRAME_ID = "1" # matches PyStackFrameConsole.java
|
||||
VIRTUAL_CONSOLE_ID = "console_main" # matches PyThreadConsole.java
|
||||
f = FakeFrame()
|
||||
f.f_back = None
|
||||
f.f_globals = {} # As globals=locals here, let's simply let it empty (and save a bit of network traffic).
|
||||
f.f_locals = self.get_namespace()
|
||||
|
||||
self.debugger = pydevd.PyDB()
|
||||
self.debugger.add_fake_frame(thread_id=VIRTUAL_CONSOLE_ID, frame_id=VIRTUAL_FRAME_ID, frame=f)
|
||||
try:
|
||||
pydevd.apply_debugger_options(debugger_options)
|
||||
self.debugger.connect(pydev_localhost.get_localhost(), debuggerPort)
|
||||
self.debugger.prepare_to_run()
|
||||
self.debugger.disable_tracing()
|
||||
except:
|
||||
traceback.print_exc()
|
||||
sys.stderr.write("Failed to connect to target debugger.\n")
|
||||
|
||||
# Register to process commands when idle
|
||||
self.debugrunning = False
|
||||
try:
|
||||
import pydevconsole
|
||||
|
||||
pydevconsole.set_debug_hook(self.debugger.process_internal_commands)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
sys.stderr.write("Version of Python does not support debuggable Interactive Console.\n")
|
||||
|
||||
# Important: it has to be really enabled in the main thread, so, schedule
|
||||
# it to run in the main thread.
|
||||
self.exec_queue.put(do_connect_to_debugger)
|
||||
|
||||
return ("connect complete",)
|
||||
|
||||
def handshake(self):
|
||||
if self.connect_status_queue is not None:
|
||||
self.connect_status_queue.put(True)
|
||||
return "PyCharm"
|
||||
|
||||
def get_connect_status_queue(self):
|
||||
return self.connect_status_queue
|
||||
|
||||
def hello(self, input_str):
|
||||
# Don't care what the input string is
|
||||
return ("Hello eclipse",)
|
||||
|
||||
def enableGui(self, guiname):
|
||||
"""Enable the GUI specified in guiname (see inputhook for list).
|
||||
As with IPython, enabling multiple GUIs isn't an error, but
|
||||
only the last one's main loop runs and it may not work
|
||||
"""
|
||||
|
||||
def do_enable_gui():
|
||||
from _pydev_bundle.pydev_versioncheck import versionok_for_gui
|
||||
|
||||
if versionok_for_gui():
|
||||
try:
|
||||
from pydev_ipython.inputhook import enable_gui
|
||||
|
||||
enable_gui(guiname)
|
||||
except:
|
||||
sys.stderr.write("Failed to enable GUI event loop integration for '%s'\n" % guiname)
|
||||
traceback.print_exc()
|
||||
elif guiname not in ["none", "", None]:
|
||||
# Only print a warning if the guiname was going to do something
|
||||
sys.stderr.write("PyDev console: Python version does not support GUI event loop integration for '%s'\n" % guiname)
|
||||
# Return value does not matter, so return back what was sent
|
||||
return guiname
|
||||
|
||||
# Important: it has to be really enabled in the main thread, so, schedule
|
||||
# it to run in the main thread.
|
||||
self.exec_queue.put(do_enable_gui)
|
||||
|
||||
def get_ipython_hidden_vars_dict(self):
|
||||
return None
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# FakeFrame
|
||||
# =======================================================================================================================
|
||||
class FakeFrame:
|
||||
"""
|
||||
Used to show console with variables connection.
|
||||
A class to be used as a mock of a frame.
|
||||
"""
|
||||
@@ -0,0 +1,38 @@
|
||||
import sys
|
||||
import traceback
|
||||
from types import ModuleType
|
||||
from _pydevd_bundle.pydevd_constants import DebugInfoHolder
|
||||
|
||||
import builtins
|
||||
|
||||
|
||||
class ImportHookManager(ModuleType):
|
||||
def __init__(self, name, system_import):
|
||||
ModuleType.__init__(self, name)
|
||||
self._system_import = system_import
|
||||
self._modules_to_patch = {}
|
||||
|
||||
def add_module_name(self, module_name, activate_function):
|
||||
self._modules_to_patch[module_name] = activate_function
|
||||
|
||||
def do_import(self, name, *args, **kwargs):
|
||||
module = self._system_import(name, *args, **kwargs)
|
||||
try:
|
||||
activate_func = self._modules_to_patch.pop(name, None)
|
||||
if activate_func:
|
||||
activate_func() # call activate function
|
||||
except:
|
||||
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 2:
|
||||
traceback.print_exc()
|
||||
|
||||
# Restore normal system importer to reduce performance impact
|
||||
# of calling this method every time an import statement is invoked
|
||||
if not self._modules_to_patch:
|
||||
builtins.__import__ = self._system_import
|
||||
|
||||
return module
|
||||
|
||||
|
||||
import_hook_manager = ImportHookManager(__name__ + ".import_hook", builtins.__import__)
|
||||
builtins.__import__ = import_hook_manager.do_import
|
||||
sys.modules[import_hook_manager.__name__] = import_hook_manager
|
||||
@@ -0,0 +1,12 @@
|
||||
from _pydev_bundle._pydev_saved_modules import xmlrpclib
|
||||
from _pydev_bundle._pydev_saved_modules import xmlrpcserver
|
||||
|
||||
SimpleXMLRPCServer = xmlrpcserver.SimpleXMLRPCServer
|
||||
|
||||
from _pydev_bundle._pydev_execfile import execfile
|
||||
|
||||
from _pydev_bundle._pydev_saved_modules import _queue
|
||||
|
||||
from _pydevd_bundle.pydevd_exec2 import Exec
|
||||
|
||||
from urllib.parse import quote, quote_plus, unquote_plus # @UnresolvedImport
|
||||
@@ -0,0 +1,95 @@
|
||||
import sys
|
||||
from _pydev_bundle.pydev_console_utils import BaseInterpreterInterface
|
||||
|
||||
import traceback
|
||||
|
||||
# Uncomment to force PyDev standard shell.
|
||||
# raise ImportError()
|
||||
|
||||
from _pydev_bundle.pydev_ipython_console_011 import get_pydev_frontend
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# InterpreterInterface
|
||||
# =======================================================================================================================
|
||||
class InterpreterInterface(BaseInterpreterInterface):
|
||||
"""
|
||||
The methods in this class should be registered in the xml-rpc server.
|
||||
"""
|
||||
|
||||
def __init__(self, host, client_port, main_thread, show_banner=True, connect_status_queue=None):
|
||||
BaseInterpreterInterface.__init__(self, main_thread, connect_status_queue)
|
||||
self.client_port = client_port
|
||||
self.host = host
|
||||
self.interpreter = get_pydev_frontend(host, client_port)
|
||||
self._input_error_printed = False
|
||||
self.notification_succeeded = False
|
||||
self.notification_tries = 0
|
||||
self.notification_max_tries = 3
|
||||
self.show_banner = show_banner
|
||||
|
||||
self.notify_about_magic()
|
||||
|
||||
def get_greeting_msg(self):
|
||||
if self.show_banner:
|
||||
self.interpreter.show_banner()
|
||||
return self.interpreter.get_greeting_msg()
|
||||
|
||||
def do_add_exec(self, code_fragment):
|
||||
self.notify_about_magic()
|
||||
if code_fragment.text.rstrip().endswith("??"):
|
||||
print("IPython-->")
|
||||
try:
|
||||
res = bool(self.interpreter.add_exec(code_fragment.text))
|
||||
finally:
|
||||
if code_fragment.text.rstrip().endswith("??"):
|
||||
print("<--IPython")
|
||||
|
||||
return res
|
||||
|
||||
def get_namespace(self):
|
||||
return self.interpreter.get_namespace()
|
||||
|
||||
def getCompletions(self, text, act_tok):
|
||||
return self.interpreter.getCompletions(text, act_tok)
|
||||
|
||||
def close(self):
|
||||
sys.exit(0)
|
||||
|
||||
def notify_about_magic(self):
|
||||
if not self.notification_succeeded:
|
||||
self.notification_tries += 1
|
||||
if self.notification_tries > self.notification_max_tries:
|
||||
return
|
||||
completions = self.getCompletions("%", "%")
|
||||
magic_commands = [x[0] for x in completions]
|
||||
|
||||
server = self.get_server()
|
||||
|
||||
if server is not None:
|
||||
try:
|
||||
server.NotifyAboutMagic(magic_commands, self.interpreter.is_automagic())
|
||||
self.notification_succeeded = True
|
||||
except:
|
||||
self.notification_succeeded = False
|
||||
|
||||
def get_ipython_hidden_vars_dict(self):
|
||||
try:
|
||||
if hasattr(self.interpreter, "ipython") and hasattr(self.interpreter.ipython, "user_ns_hidden"):
|
||||
user_ns_hidden = self.interpreter.ipython.user_ns_hidden
|
||||
if isinstance(user_ns_hidden, dict):
|
||||
# Since IPython 2 dict `user_ns_hidden` contains hidden variables and values
|
||||
user_hidden_dict = user_ns_hidden.copy()
|
||||
else:
|
||||
# In IPython 1.x `user_ns_hidden` used to be a set with names of hidden variables
|
||||
user_hidden_dict = dict([(key, val) for key, val in self.interpreter.ipython.user_ns.items() if key in user_ns_hidden])
|
||||
|
||||
# while `_`, `__` and `___` were not initialized, they are not presented in `user_ns_hidden`
|
||||
user_hidden_dict.setdefault("_", "")
|
||||
user_hidden_dict.setdefault("__", "")
|
||||
user_hidden_dict.setdefault("___", "")
|
||||
|
||||
return user_hidden_dict
|
||||
except:
|
||||
# Getting IPython variables shouldn't break loading frame variables
|
||||
traceback.print_exc()
|
||||
@@ -0,0 +1,504 @@
|
||||
# TODO that would make IPython integration better
|
||||
# - show output other times then when enter was pressed
|
||||
# - support proper exit to allow IPython to cleanup (e.g. temp files created with %edit)
|
||||
# - support Ctrl-D (Ctrl-Z on Windows)
|
||||
# - use IPython (numbered) prompts in PyDev
|
||||
# - better integration of IPython and PyDev completions
|
||||
# - some of the semantics on handling the code completion are not correct:
|
||||
# eg: Start a line with % and then type c should give %cd as a completion by it doesn't
|
||||
# however type %c and request completions and %cd is given as an option
|
||||
# eg: Completing a magic when user typed it without the leading % causes the % to be inserted
|
||||
# to the left of what should be the first colon.
|
||||
"""Interface to TerminalInteractiveShell for PyDev Interactive Console frontend
|
||||
for IPython 0.11 to 1.0+.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
import codeop
|
||||
import traceback
|
||||
|
||||
from IPython.core.error import UsageError
|
||||
from IPython.core.completer import IPCompleter
|
||||
from IPython.core.interactiveshell import InteractiveShell, InteractiveShellABC
|
||||
from IPython.core.usage import default_banner_parts
|
||||
from IPython.utils.strdispatch import StrDispatch
|
||||
import IPython.core.release as IPythonRelease
|
||||
from IPython.terminal.interactiveshell import TerminalInteractiveShell
|
||||
|
||||
try:
|
||||
from traitlets import CBool, Unicode
|
||||
except ImportError:
|
||||
from IPython.utils.traitlets import CBool, Unicode
|
||||
from IPython.core import release
|
||||
|
||||
from _pydev_bundle.pydev_imports import xmlrpclib
|
||||
|
||||
default_pydev_banner_parts = default_banner_parts
|
||||
|
||||
default_pydev_banner = "".join(default_pydev_banner_parts)
|
||||
|
||||
|
||||
def show_in_pager(self, strng, *args, **kwargs):
|
||||
"""Run a string through pager"""
|
||||
# On PyDev we just output the string, there are scroll bars in the console
|
||||
# to handle "paging". This is the same behaviour as when TERM==dump (see
|
||||
# page.py)
|
||||
# for compatibility with mime-bundle form:
|
||||
if isinstance(strng, dict):
|
||||
strng = strng.get("text/plain", strng)
|
||||
print(strng)
|
||||
|
||||
|
||||
def create_editor_hook(pydev_host, pydev_client_port):
|
||||
def call_editor(filename, line=0, wait=True):
|
||||
"""Open an editor in PyDev"""
|
||||
if line is None:
|
||||
line = 0
|
||||
|
||||
# Make sure to send an absolution path because unlike most editor hooks
|
||||
# we don't launch a process. This is more like what happens in the zmqshell
|
||||
filename = os.path.abspath(filename)
|
||||
|
||||
# import sys
|
||||
# sys.__stderr__.write('Calling editor at: %s:%s\n' % (pydev_host, pydev_client_port))
|
||||
|
||||
# Tell PyDev to open the editor
|
||||
server = xmlrpclib.Server("http://%s:%s" % (pydev_host, pydev_client_port))
|
||||
server.IPythonEditor(filename, str(line))
|
||||
|
||||
if wait:
|
||||
input("Press Enter when done editing:")
|
||||
|
||||
return call_editor
|
||||
|
||||
|
||||
class PyDevIPCompleter(IPCompleter):
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Create a Completer that reuses the advanced completion support of PyDev
|
||||
in addition to the completion support provided by IPython"""
|
||||
IPCompleter.__init__(self, *args, **kwargs)
|
||||
# Use PyDev for python matches, see getCompletions below
|
||||
if self.python_matches in self.matchers:
|
||||
# `self.python_matches` matches attributes or global python names
|
||||
self.matchers.remove(self.python_matches)
|
||||
|
||||
|
||||
class PyDevIPCompleter6(IPCompleter):
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Create a Completer that reuses the advanced completion support of PyDev
|
||||
in addition to the completion support provided by IPython"""
|
||||
IPCompleter.__init__(self, *args, **kwargs)
|
||||
|
||||
@property
|
||||
def matchers(self):
|
||||
"""All active matcher routines for completion"""
|
||||
# To remove python_matches we now have to override it as it's now a property in the superclass.
|
||||
return [
|
||||
self.file_matches,
|
||||
self.magic_matches,
|
||||
self.python_func_kw_matches,
|
||||
self.dict_key_matches,
|
||||
]
|
||||
|
||||
@matchers.setter
|
||||
def matchers(self, value):
|
||||
# To stop the init in IPCompleter raising an AttributeError we now have to specify a setter as it's now a property in the superclass.
|
||||
return
|
||||
|
||||
|
||||
class PyDevTerminalInteractiveShell(TerminalInteractiveShell):
|
||||
banner1 = Unicode(default_pydev_banner, config=True, help="""The part of the banner to be printed before the profile""")
|
||||
|
||||
# TODO term_title: (can PyDev's title be changed???, see terminal.py for where to inject code, in particular set_term_title as used by %cd)
|
||||
# for now, just disable term_title
|
||||
term_title = CBool(False)
|
||||
|
||||
# Note in version 0.11 there is no guard in the IPython code about displaying a
|
||||
# warning, so with 0.11 you get:
|
||||
# WARNING: Readline services not available or not loaded.
|
||||
# WARNING: The auto-indent feature requires the readline library
|
||||
# Disable readline, readline type code is all handled by PyDev (on Java side)
|
||||
readline_use = CBool(False)
|
||||
# autoindent has no meaning in PyDev (PyDev always handles that on the Java side),
|
||||
# and attempting to enable it will print a warning in the absence of readline.
|
||||
autoindent = CBool(False)
|
||||
# Force console to not give warning about color scheme choice and default to NoColor.
|
||||
# TODO It would be nice to enable colors in PyDev but:
|
||||
# - The PyDev Console (Eclipse Console) does not support the full range of colors, so the
|
||||
# effect isn't as nice anyway at the command line
|
||||
# - If done, the color scheme should default to LightBG, but actually be dependent on
|
||||
# any settings the user has (such as if a dark theme is in use, then Linux is probably
|
||||
# a better theme).
|
||||
colors_force = CBool(True)
|
||||
colors = Unicode("NoColor")
|
||||
# Since IPython 5 the terminal interface is not compatible with Emacs `inferior-shell` and
|
||||
# the `simple_prompt` flag is needed
|
||||
simple_prompt = CBool(True)
|
||||
|
||||
# In the PyDev Console, GUI control is done via hookable XML-RPC server
|
||||
@staticmethod
|
||||
def enable_gui(gui=None, app=None):
|
||||
"""Switch amongst GUI input hooks by name."""
|
||||
# Deferred import
|
||||
from pydev_ipython.inputhook import enable_gui as real_enable_gui
|
||||
|
||||
try:
|
||||
return real_enable_gui(gui, app)
|
||||
except ValueError as e:
|
||||
raise UsageError("%s" % e)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Things related to hooks
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
def init_history(self):
|
||||
# Disable history so that we don't have an additional thread for that
|
||||
# (and we don't use the history anyways).
|
||||
self.config.HistoryManager.enabled = False
|
||||
super(PyDevTerminalInteractiveShell, self).init_history()
|
||||
|
||||
def init_hooks(self):
|
||||
super(PyDevTerminalInteractiveShell, self).init_hooks()
|
||||
self.set_hook("show_in_pager", show_in_pager)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Things related to exceptions
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
def showtraceback(self, exc_tuple=None, *args, **kwargs):
|
||||
# IPython does a lot of clever stuff with Exceptions. However mostly
|
||||
# it is related to IPython running in a terminal instead of an IDE.
|
||||
# (e.g. it prints out snippets of code around the stack trace)
|
||||
# PyDev does a lot of clever stuff too, so leave exception handling
|
||||
# with default print_exc that PyDev can parse and do its clever stuff
|
||||
# with (e.g. it puts links back to the original source code)
|
||||
try:
|
||||
if exc_tuple is None:
|
||||
etype, value, tb = sys.exc_info()
|
||||
else:
|
||||
etype, value, tb = exc_tuple
|
||||
except ValueError:
|
||||
return
|
||||
|
||||
if tb is not None:
|
||||
traceback.print_exception(etype, value, tb)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Things related to text completion
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
# The way to construct an IPCompleter changed in most versions,
|
||||
# so we have a custom, per version implementation of the construction
|
||||
|
||||
def _new_completer_100(self):
|
||||
completer = PyDevIPCompleter(
|
||||
shell=self,
|
||||
namespace=self.user_ns,
|
||||
global_namespace=self.user_global_ns,
|
||||
alias_table=self.alias_manager.alias_table,
|
||||
use_readline=self.has_readline,
|
||||
parent=self,
|
||||
)
|
||||
return completer
|
||||
|
||||
def _new_completer_234(self):
|
||||
# correct for IPython versions 2.x, 3.x, 4.x
|
||||
completer = PyDevIPCompleter(
|
||||
shell=self,
|
||||
namespace=self.user_ns,
|
||||
global_namespace=self.user_global_ns,
|
||||
use_readline=self.has_readline,
|
||||
parent=self,
|
||||
)
|
||||
return completer
|
||||
|
||||
def _new_completer_500(self):
|
||||
completer = PyDevIPCompleter(
|
||||
shell=self, namespace=self.user_ns, global_namespace=self.user_global_ns, use_readline=False, parent=self
|
||||
)
|
||||
return completer
|
||||
|
||||
def _new_completer_600(self):
|
||||
completer = PyDevIPCompleter6(
|
||||
shell=self, namespace=self.user_ns, global_namespace=self.user_global_ns, use_readline=False, parent=self
|
||||
)
|
||||
return completer
|
||||
|
||||
def add_completer_hooks(self):
|
||||
from IPython.core.completerlib import module_completer, magic_run_completer, cd_completer
|
||||
|
||||
try:
|
||||
from IPython.core.completerlib import reset_completer
|
||||
except ImportError:
|
||||
# reset_completer was added for rel-0.13
|
||||
reset_completer = None
|
||||
self.configurables.append(self.Completer)
|
||||
|
||||
# Add custom completers to the basic ones built into IPCompleter
|
||||
sdisp = self.strdispatchers.get("complete_command", StrDispatch())
|
||||
self.strdispatchers["complete_command"] = sdisp
|
||||
self.Completer.custom_completers = sdisp
|
||||
|
||||
self.set_hook("complete_command", module_completer, str_key="import")
|
||||
self.set_hook("complete_command", module_completer, str_key="from")
|
||||
self.set_hook("complete_command", magic_run_completer, str_key="%run")
|
||||
self.set_hook("complete_command", cd_completer, str_key="%cd")
|
||||
if reset_completer:
|
||||
self.set_hook("complete_command", reset_completer, str_key="%reset")
|
||||
|
||||
def init_completer(self):
|
||||
"""Initialize the completion machinery.
|
||||
|
||||
This creates a completer that provides the completions that are
|
||||
IPython specific. We use this to supplement PyDev's core code
|
||||
completions.
|
||||
"""
|
||||
# PyDev uses its own completer and custom hooks so that it uses
|
||||
# most completions from PyDev's core completer which provides
|
||||
# extra information.
|
||||
# See getCompletions for where the two sets of results are merged
|
||||
|
||||
if IPythonRelease._version_major >= 6:
|
||||
self.Completer = self._new_completer_600()
|
||||
elif IPythonRelease._version_major >= 5:
|
||||
self.Completer = self._new_completer_500()
|
||||
elif IPythonRelease._version_major >= 2:
|
||||
self.Completer = self._new_completer_234()
|
||||
elif IPythonRelease._version_major >= 1:
|
||||
self.Completer = self._new_completer_100()
|
||||
|
||||
if hasattr(self.Completer, "use_jedi"):
|
||||
self.Completer.use_jedi = False
|
||||
|
||||
self.add_completer_hooks()
|
||||
|
||||
if IPythonRelease._version_major <= 3:
|
||||
# Only configure readline if we truly are using readline. IPython can
|
||||
# do tab-completion over the network, in GUIs, etc, where readline
|
||||
# itself may be absent
|
||||
if self.has_readline:
|
||||
self.set_readline_completer()
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Things related to aliases
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
def init_alias(self):
|
||||
# InteractiveShell defines alias's we want, but TerminalInteractiveShell defines
|
||||
# ones we don't. So don't use super and instead go right to InteractiveShell
|
||||
InteractiveShell.init_alias(self)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Things related to exiting
|
||||
# -------------------------------------------------------------------------
|
||||
def ask_exit(self):
|
||||
"""Ask the shell to exit. Can be overiden and used as a callback."""
|
||||
# TODO PyDev's console does not have support from the Python side to exit
|
||||
# the console. If user forces the exit (with sys.exit()) then the console
|
||||
# simply reports errors. e.g.:
|
||||
# >>> import sys
|
||||
# >>> sys.exit()
|
||||
# Failed to create input stream: Connection refused
|
||||
# >>>
|
||||
# Console already exited with value: 0 while waiting for an answer.
|
||||
# Error stream:
|
||||
# Output stream:
|
||||
# >>>
|
||||
#
|
||||
# Alternatively if you use the non-IPython shell this is what happens
|
||||
# >>> exit()
|
||||
# <type 'exceptions.SystemExit'>:None
|
||||
# >>>
|
||||
# <type 'exceptions.SystemExit'>:None
|
||||
# >>>
|
||||
#
|
||||
super(PyDevTerminalInteractiveShell, self).ask_exit()
|
||||
print("To exit the PyDev Console, terminate the console within IDE.")
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Things related to magics
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
def init_magics(self):
|
||||
super(PyDevTerminalInteractiveShell, self).init_magics()
|
||||
# TODO Any additional magics for PyDev?
|
||||
|
||||
|
||||
InteractiveShellABC.register(PyDevTerminalInteractiveShell) # @UndefinedVariable
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# _PyDevFrontEnd
|
||||
# =======================================================================================================================
|
||||
class _PyDevFrontEnd:
|
||||
version = release.__version__
|
||||
|
||||
def __init__(self):
|
||||
# Create and initialize our IPython instance.
|
||||
if hasattr(PyDevTerminalInteractiveShell, "_instance") and PyDevTerminalInteractiveShell._instance is not None:
|
||||
self.ipython = PyDevTerminalInteractiveShell._instance
|
||||
else:
|
||||
self.ipython = PyDevTerminalInteractiveShell.instance()
|
||||
|
||||
self._curr_exec_line = 0
|
||||
self._curr_exec_lines = []
|
||||
|
||||
def show_banner(self):
|
||||
self.ipython.show_banner()
|
||||
|
||||
def update(self, globals, locals):
|
||||
ns = self.ipython.user_ns
|
||||
|
||||
for key, value in list(ns.items()):
|
||||
if key not in locals:
|
||||
locals[key] = value
|
||||
|
||||
self.ipython.user_global_ns.clear()
|
||||
self.ipython.user_global_ns.update(globals)
|
||||
self.ipython.user_ns = locals
|
||||
|
||||
if hasattr(self.ipython, "history_manager") and hasattr(self.ipython.history_manager, "save_thread"):
|
||||
self.ipython.history_manager.save_thread.pydev_do_not_trace = True # don't trace ipython history saving thread
|
||||
|
||||
def complete(self, string):
|
||||
try:
|
||||
if string:
|
||||
return self.ipython.complete(None, line=string, cursor_pos=string.__len__())
|
||||
else:
|
||||
return self.ipython.complete(string, string, 0)
|
||||
except:
|
||||
# Silence completer exceptions
|
||||
pass
|
||||
|
||||
def is_complete(self, string):
|
||||
# Based on IPython 0.10.1
|
||||
|
||||
if string in ("", "\n"):
|
||||
# Prefiltering, eg through ipython0, may return an empty
|
||||
# string although some operations have been accomplished. We
|
||||
# thus want to consider an empty string as a complete
|
||||
# statement.
|
||||
return True
|
||||
else:
|
||||
try:
|
||||
# Add line returns here, to make sure that the statement is
|
||||
# complete (except if '\' was used).
|
||||
# This should probably be done in a different place (like
|
||||
# maybe 'prefilter_input' method? For now, this works.
|
||||
clean_string = string.rstrip("\n")
|
||||
if not clean_string.endswith("\\"):
|
||||
clean_string += "\n\n"
|
||||
|
||||
is_complete = codeop.compile_command(clean_string, "<string>", "exec")
|
||||
except Exception:
|
||||
# XXX: Hack: return True so that the
|
||||
# code gets executed and the error captured.
|
||||
is_complete = True
|
||||
return is_complete
|
||||
|
||||
def getCompletions(self, text, act_tok):
|
||||
# Get completions from IPython and from PyDev and merge the results
|
||||
# IPython only gives context free list of completions, while PyDev
|
||||
# gives detailed information about completions.
|
||||
try:
|
||||
TYPE_IPYTHON = "11"
|
||||
TYPE_IPYTHON_MAGIC = "12"
|
||||
_line, ipython_completions = self.complete(text)
|
||||
|
||||
from _pydev_bundle._pydev_completer import Completer
|
||||
|
||||
completer = Completer(self.get_namespace(), None)
|
||||
ret = completer.complete(act_tok)
|
||||
append = ret.append
|
||||
ip = self.ipython
|
||||
pydev_completions = set([f[0] for f in ret])
|
||||
for ipython_completion in ipython_completions:
|
||||
# PyCharm was not expecting completions with '%'...
|
||||
# Could be fixed in the backend, but it's probably better
|
||||
# fixing it at PyCharm.
|
||||
# if ipython_completion.startswith('%'):
|
||||
# ipython_completion = ipython_completion[1:]
|
||||
|
||||
if ipython_completion not in pydev_completions:
|
||||
pydev_completions.add(ipython_completion)
|
||||
inf = ip.object_inspect(ipython_completion)
|
||||
if inf["type_name"] == "Magic function":
|
||||
pydev_type = TYPE_IPYTHON_MAGIC
|
||||
else:
|
||||
pydev_type = TYPE_IPYTHON
|
||||
pydev_doc = inf["docstring"]
|
||||
if pydev_doc is None:
|
||||
pydev_doc = ""
|
||||
append((ipython_completion, pydev_doc, "", pydev_type))
|
||||
return ret
|
||||
except:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
return []
|
||||
|
||||
def get_namespace(self):
|
||||
return self.ipython.user_ns
|
||||
|
||||
def clear_buffer(self):
|
||||
del self._curr_exec_lines[:]
|
||||
|
||||
def add_exec(self, line):
|
||||
if self._curr_exec_lines:
|
||||
self._curr_exec_lines.append(line)
|
||||
|
||||
buf = "\n".join(self._curr_exec_lines)
|
||||
|
||||
if self.is_complete(buf):
|
||||
self._curr_exec_line += 1
|
||||
self.ipython.run_cell(buf)
|
||||
del self._curr_exec_lines[:]
|
||||
return False # execute complete (no more)
|
||||
|
||||
return True # needs more
|
||||
else:
|
||||
if not self.is_complete(line):
|
||||
# Did not execute
|
||||
self._curr_exec_lines.append(line)
|
||||
return True # needs more
|
||||
else:
|
||||
self._curr_exec_line += 1
|
||||
self.ipython.run_cell(line, store_history=True)
|
||||
# hist = self.ipython.history_manager.output_hist_reprs
|
||||
# rep = hist.get(self._curr_exec_line, None)
|
||||
# if rep is not None:
|
||||
# print(rep)
|
||||
return False # execute complete (no more)
|
||||
|
||||
def is_automagic(self):
|
||||
return self.ipython.automagic
|
||||
|
||||
def get_greeting_msg(self):
|
||||
return "PyDev console: using IPython %s\n" % self.version
|
||||
|
||||
|
||||
class _PyDevFrontEndContainer:
|
||||
_instance = None
|
||||
_last_host_port = None
|
||||
|
||||
|
||||
def get_pydev_frontend(pydev_host, pydev_client_port):
|
||||
if _PyDevFrontEndContainer._instance is None:
|
||||
_PyDevFrontEndContainer._instance = _PyDevFrontEnd()
|
||||
|
||||
if _PyDevFrontEndContainer._last_host_port != (pydev_host, pydev_client_port):
|
||||
_PyDevFrontEndContainer._last_host_port = pydev_host, pydev_client_port
|
||||
|
||||
# Back channel to PyDev to open editors (in the future other
|
||||
# info may go back this way. This is the same channel that is
|
||||
# used to get stdin, see StdIn in pydev_console_utils)
|
||||
_PyDevFrontEndContainer._instance.ipython.hooks["editor"] = create_editor_hook(pydev_host, pydev_client_port)
|
||||
|
||||
# Note: setting the callback directly because setting it with set_hook would actually create a chain instead
|
||||
# of ovewriting at each new call).
|
||||
# _PyDevFrontEndContainer._instance.ipython.set_hook('editor', create_editor_hook(pydev_host, pydev_client_port))
|
||||
|
||||
return _PyDevFrontEndContainer._instance
|
||||
@@ -0,0 +1,30 @@
|
||||
from _pydev_bundle._pydev_saved_modules import threading
|
||||
|
||||
# Hack for https://www.brainwy.com/tracker/PyDev/363 (i.e.: calling is_alive() can throw AssertionError under some
|
||||
# circumstances).
|
||||
# It is required to debug threads started by start_new_thread in Python 3.4
|
||||
_temp = threading.Thread()
|
||||
|
||||
if hasattr(_temp, "_handle") and hasattr(_temp, "_started"): # Python 3.13 and later has this
|
||||
|
||||
def is_thread_alive(t):
|
||||
return not t._handle.is_done()
|
||||
|
||||
|
||||
elif hasattr(_temp, "_is_stopped"): # Python 3.12 and earlier has this
|
||||
|
||||
def is_thread_alive(t):
|
||||
return not t._is_stopped
|
||||
|
||||
elif hasattr(_temp, "_Thread__stopped"): # Python 2.x has this
|
||||
|
||||
def is_thread_alive(t):
|
||||
return not t._Thread__stopped
|
||||
|
||||
else:
|
||||
# Jython wraps a native java thread and thus only obeys the public API.
|
||||
def is_thread_alive(t):
|
||||
return t.is_alive()
|
||||
|
||||
|
||||
del _temp
|
||||
@@ -0,0 +1,68 @@
|
||||
from _pydev_bundle._pydev_saved_modules import socket
|
||||
import sys
|
||||
|
||||
IS_JYTHON = sys.platform.find("java") != -1
|
||||
|
||||
_cache = None
|
||||
|
||||
|
||||
def get_localhost():
|
||||
"""
|
||||
Should return 127.0.0.1 in ipv4 and ::1 in ipv6
|
||||
|
||||
localhost is not used because on windows vista/windows 7, there can be issues where the resolving doesn't work
|
||||
properly and takes a lot of time (had this issue on the pyunit server).
|
||||
|
||||
Using the IP directly solves the problem.
|
||||
"""
|
||||
# TODO: Needs better investigation!
|
||||
|
||||
global _cache
|
||||
if _cache is None:
|
||||
try:
|
||||
for addr_info in socket.getaddrinfo("localhost", 80, 0, 0, socket.SOL_TCP):
|
||||
config = addr_info[4]
|
||||
if config[0] == "127.0.0.1":
|
||||
_cache = "127.0.0.1"
|
||||
return _cache
|
||||
except:
|
||||
# Ok, some versions of Python don't have getaddrinfo or SOL_TCP... Just consider it 127.0.0.1 in this case.
|
||||
_cache = "127.0.0.1"
|
||||
else:
|
||||
_cache = "localhost"
|
||||
|
||||
return _cache
|
||||
|
||||
|
||||
def get_socket_names(n_sockets, close=False):
|
||||
socket_names = []
|
||||
sockets = []
|
||||
for _ in range(n_sockets):
|
||||
if IS_JYTHON:
|
||||
# Although the option which would be pure java *should* work for Jython, the socket being returned is still 0
|
||||
# (i.e.: it doesn't give the local port bound, only the original port, which was 0).
|
||||
from java.net import ServerSocket
|
||||
|
||||
sock = ServerSocket(0)
|
||||
socket_name = get_localhost(), sock.getLocalPort()
|
||||
else:
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
sock.bind((get_localhost(), 0))
|
||||
socket_name = sock.getsockname()
|
||||
|
||||
sockets.append(sock)
|
||||
socket_names.append(socket_name)
|
||||
|
||||
if close:
|
||||
for s in sockets:
|
||||
s.close()
|
||||
return socket_names
|
||||
|
||||
|
||||
def get_socket_name(close=False):
|
||||
return get_socket_names(1, close)[0]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print(get_socket_name())
|
||||
@@ -0,0 +1,284 @@
|
||||
from _pydevd_bundle.pydevd_constants import DebugInfoHolder, SHOW_COMPILE_CYTHON_COMMAND_LINE, NULL, LOG_TIME, ForkSafeLock
|
||||
from contextlib import contextmanager
|
||||
import traceback
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
class _LoggingGlobals(object):
|
||||
_warn_once_map = {}
|
||||
_debug_stream_filename = None
|
||||
_debug_stream = NULL
|
||||
_debug_stream_initialized = False
|
||||
_initialize_lock = ForkSafeLock()
|
||||
|
||||
|
||||
def initialize_debug_stream(reinitialize=False):
|
||||
"""
|
||||
:param bool reinitialize:
|
||||
Reinitialize is used to update the debug stream after a fork (thus, if it wasn't
|
||||
initialized, we don't need to do anything, just wait for the first regular log call
|
||||
to initialize).
|
||||
"""
|
||||
if reinitialize:
|
||||
if not _LoggingGlobals._debug_stream_initialized:
|
||||
return
|
||||
else:
|
||||
if _LoggingGlobals._debug_stream_initialized:
|
||||
return
|
||||
|
||||
with _LoggingGlobals._initialize_lock:
|
||||
# Initialization is done lazilly, so, it's possible that multiple threads try to initialize
|
||||
# logging.
|
||||
|
||||
# Check initial conditions again after obtaining the lock.
|
||||
if reinitialize:
|
||||
if not _LoggingGlobals._debug_stream_initialized:
|
||||
return
|
||||
else:
|
||||
if _LoggingGlobals._debug_stream_initialized:
|
||||
return
|
||||
|
||||
_LoggingGlobals._debug_stream_initialized = True
|
||||
|
||||
# Note: we cannot initialize with sys.stderr because when forking we may end up logging things in 'os' calls.
|
||||
_LoggingGlobals._debug_stream = NULL
|
||||
_LoggingGlobals._debug_stream_filename = None
|
||||
|
||||
if not DebugInfoHolder.PYDEVD_DEBUG_FILE:
|
||||
_LoggingGlobals._debug_stream = sys.stderr
|
||||
else:
|
||||
# Add pid to the filename.
|
||||
try:
|
||||
target_file = DebugInfoHolder.PYDEVD_DEBUG_FILE
|
||||
debug_file = _compute_filename_with_pid(target_file)
|
||||
_LoggingGlobals._debug_stream = open(debug_file, "w")
|
||||
_LoggingGlobals._debug_stream_filename = debug_file
|
||||
except Exception:
|
||||
_LoggingGlobals._debug_stream = sys.stderr
|
||||
# Don't fail when trying to setup logging, just show the exception.
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def _compute_filename_with_pid(target_file, pid=None):
|
||||
# Note: used in tests.
|
||||
dirname = os.path.dirname(target_file)
|
||||
basename = os.path.basename(target_file)
|
||||
try:
|
||||
os.makedirs(dirname)
|
||||
except Exception:
|
||||
pass # Ignore error if it already exists.
|
||||
|
||||
name, ext = os.path.splitext(basename)
|
||||
if pid is None:
|
||||
pid = os.getpid()
|
||||
return os.path.join(dirname, "%s.%s%s" % (name, pid, ext))
|
||||
|
||||
|
||||
def log_to(log_file: str, log_level: int = 3) -> None:
|
||||
with _LoggingGlobals._initialize_lock:
|
||||
# Can be set directly.
|
||||
DebugInfoHolder.DEBUG_TRACE_LEVEL = log_level
|
||||
|
||||
if DebugInfoHolder.PYDEVD_DEBUG_FILE != log_file:
|
||||
# Note that we don't need to reset it unless it actually changed
|
||||
# (would be the case where it's set as an env var in a new process
|
||||
# and a subprocess initializes logging to the same value).
|
||||
_LoggingGlobals._debug_stream = NULL
|
||||
_LoggingGlobals._debug_stream_filename = None
|
||||
|
||||
DebugInfoHolder.PYDEVD_DEBUG_FILE = log_file
|
||||
|
||||
_LoggingGlobals._debug_stream_initialized = False
|
||||
|
||||
|
||||
def list_log_files(pydevd_debug_file):
|
||||
log_files = []
|
||||
dirname = os.path.dirname(pydevd_debug_file)
|
||||
basename = os.path.basename(pydevd_debug_file)
|
||||
if os.path.isdir(dirname):
|
||||
name, ext = os.path.splitext(basename)
|
||||
for f in os.listdir(dirname):
|
||||
if f.startswith(name) and f.endswith(ext):
|
||||
log_files.append(os.path.join(dirname, f))
|
||||
return log_files
|
||||
|
||||
|
||||
@contextmanager
|
||||
def log_context(trace_level, stream):
|
||||
"""
|
||||
To be used to temporarily change the logging settings.
|
||||
"""
|
||||
with _LoggingGlobals._initialize_lock:
|
||||
original_trace_level = DebugInfoHolder.DEBUG_TRACE_LEVEL
|
||||
original_debug_stream = _LoggingGlobals._debug_stream
|
||||
original_pydevd_debug_file = DebugInfoHolder.PYDEVD_DEBUG_FILE
|
||||
original_debug_stream_filename = _LoggingGlobals._debug_stream_filename
|
||||
original_initialized = _LoggingGlobals._debug_stream_initialized
|
||||
|
||||
DebugInfoHolder.DEBUG_TRACE_LEVEL = trace_level
|
||||
_LoggingGlobals._debug_stream = stream
|
||||
_LoggingGlobals._debug_stream_initialized = True
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
with _LoggingGlobals._initialize_lock:
|
||||
DebugInfoHolder.DEBUG_TRACE_LEVEL = original_trace_level
|
||||
_LoggingGlobals._debug_stream = original_debug_stream
|
||||
DebugInfoHolder.PYDEVD_DEBUG_FILE = original_pydevd_debug_file
|
||||
_LoggingGlobals._debug_stream_filename = original_debug_stream_filename
|
||||
_LoggingGlobals._debug_stream_initialized = original_initialized
|
||||
|
||||
|
||||
import time
|
||||
|
||||
_last_log_time = time.time()
|
||||
|
||||
# Set to True to show pid in each logged message (usually the file has it, but sometimes it's handy).
|
||||
_LOG_PID = False
|
||||
|
||||
|
||||
def _pydevd_log(level, msg, *args):
|
||||
"""
|
||||
Levels are:
|
||||
|
||||
0 most serious warnings/errors (always printed)
|
||||
1 warnings/significant events
|
||||
2 informational trace
|
||||
3 verbose mode
|
||||
"""
|
||||
if level <= DebugInfoHolder.DEBUG_TRACE_LEVEL:
|
||||
# yes, we can have errors printing if the console of the program has been finished (and we're still trying to print something)
|
||||
try:
|
||||
try:
|
||||
if args:
|
||||
msg = msg % args
|
||||
except:
|
||||
msg = "%s - %s" % (msg, args)
|
||||
|
||||
if LOG_TIME:
|
||||
global _last_log_time
|
||||
new_log_time = time.time()
|
||||
time_diff = new_log_time - _last_log_time
|
||||
_last_log_time = new_log_time
|
||||
msg = "%.2fs - %s\n" % (
|
||||
time_diff,
|
||||
msg,
|
||||
)
|
||||
else:
|
||||
msg = "%s\n" % (msg,)
|
||||
|
||||
if _LOG_PID:
|
||||
msg = "<%s> - %s\n" % (
|
||||
os.getpid(),
|
||||
msg,
|
||||
)
|
||||
|
||||
try:
|
||||
try:
|
||||
initialize_debug_stream() # Do it as late as possible
|
||||
_LoggingGlobals._debug_stream.write(msg)
|
||||
except TypeError:
|
||||
if isinstance(msg, bytes):
|
||||
# Depending on the StringIO flavor, it may only accept unicode.
|
||||
msg = msg.decode("utf-8", "replace")
|
||||
_LoggingGlobals._debug_stream.write(msg)
|
||||
except UnicodeEncodeError:
|
||||
# When writing to the stream it's possible that the string can't be represented
|
||||
# in the encoding expected (in this case, convert it to the stream encoding
|
||||
# or ascii if we can't find one suitable using a suitable replace).
|
||||
encoding = getattr(_LoggingGlobals._debug_stream, "encoding", "ascii")
|
||||
msg = msg.encode(encoding, "backslashreplace")
|
||||
msg = msg.decode(encoding)
|
||||
_LoggingGlobals._debug_stream.write(msg)
|
||||
|
||||
_LoggingGlobals._debug_stream.flush()
|
||||
except:
|
||||
pass
|
||||
return True
|
||||
|
||||
|
||||
def _pydevd_log_exception(msg="", *args):
|
||||
if msg or args:
|
||||
_pydevd_log(0, msg, *args)
|
||||
try:
|
||||
initialize_debug_stream() # Do it as late as possible
|
||||
traceback.print_exc(file=_LoggingGlobals._debug_stream)
|
||||
_LoggingGlobals._debug_stream.flush()
|
||||
except:
|
||||
raise
|
||||
|
||||
|
||||
def verbose(msg, *args):
|
||||
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 3:
|
||||
_pydevd_log(3, msg, *args)
|
||||
|
||||
|
||||
def debug(msg, *args):
|
||||
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 2:
|
||||
_pydevd_log(2, msg, *args)
|
||||
|
||||
|
||||
def info(msg, *args):
|
||||
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 1:
|
||||
_pydevd_log(1, msg, *args)
|
||||
|
||||
|
||||
warn = info
|
||||
|
||||
|
||||
def critical(msg, *args):
|
||||
_pydevd_log(0, msg, *args)
|
||||
|
||||
|
||||
def exception(msg="", *args):
|
||||
try:
|
||||
_pydevd_log_exception(msg, *args)
|
||||
except:
|
||||
pass # Should never fail (even at interpreter shutdown).
|
||||
|
||||
|
||||
error = exception
|
||||
|
||||
|
||||
def error_once(msg, *args):
|
||||
try:
|
||||
if args:
|
||||
message = msg % args
|
||||
else:
|
||||
message = str(msg)
|
||||
except:
|
||||
message = "%s - %s" % (msg, args)
|
||||
|
||||
if message not in _LoggingGlobals._warn_once_map:
|
||||
_LoggingGlobals._warn_once_map[message] = True
|
||||
critical(message)
|
||||
|
||||
|
||||
def exception_once(msg, *args):
|
||||
try:
|
||||
if args:
|
||||
message = msg % args
|
||||
else:
|
||||
message = str(msg)
|
||||
except:
|
||||
message = "%s - %s" % (msg, args)
|
||||
|
||||
if message not in _LoggingGlobals._warn_once_map:
|
||||
_LoggingGlobals._warn_once_map[message] = True
|
||||
exception(message)
|
||||
|
||||
|
||||
def debug_once(msg, *args):
|
||||
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 3:
|
||||
error_once(msg, *args)
|
||||
|
||||
|
||||
def show_compile_cython_command_line():
|
||||
if SHOW_COMPILE_CYTHON_COMMAND_LINE:
|
||||
dirname = os.path.dirname(os.path.dirname(__file__))
|
||||
error_once(
|
||||
'warning: Debugger speedups using cython not found. Run \'"%s" "%s" build_ext --inplace\' to build.',
|
||||
sys.executable,
|
||||
os.path.join(dirname, "setup_pydevd_cython.py"),
|
||||
)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,220 @@
|
||||
from __future__ import nested_scopes
|
||||
|
||||
from _pydev_bundle._pydev_saved_modules import threading
|
||||
import os
|
||||
from _pydev_bundle import pydev_log
|
||||
|
||||
|
||||
def set_trace_in_qt():
|
||||
from _pydevd_bundle.pydevd_comm import get_global_debugger
|
||||
|
||||
py_db = get_global_debugger()
|
||||
if py_db is not None:
|
||||
threading.current_thread() # Create the dummy thread for qt.
|
||||
py_db.enable_tracing()
|
||||
|
||||
|
||||
_patched_qt = False
|
||||
|
||||
|
||||
def patch_qt(qt_support_mode):
|
||||
"""
|
||||
This method patches qt (PySide2, PySide, PyQt4, PyQt5) so that we have hooks to set the tracing for QThread.
|
||||
"""
|
||||
if not qt_support_mode:
|
||||
return
|
||||
|
||||
if qt_support_mode is True or qt_support_mode == "True":
|
||||
# do not break backward compatibility
|
||||
qt_support_mode = "auto"
|
||||
|
||||
if qt_support_mode == "auto":
|
||||
qt_support_mode = os.getenv("PYDEVD_PYQT_MODE", "auto")
|
||||
|
||||
# Avoid patching more than once
|
||||
global _patched_qt
|
||||
if _patched_qt:
|
||||
return
|
||||
|
||||
pydev_log.debug("Qt support mode: %s", qt_support_mode)
|
||||
|
||||
_patched_qt = True
|
||||
|
||||
if qt_support_mode == "auto":
|
||||
patch_qt_on_import = None
|
||||
try:
|
||||
import PySide2 # @UnresolvedImport @UnusedImport
|
||||
|
||||
qt_support_mode = "pyside2"
|
||||
except:
|
||||
try:
|
||||
import Pyside # @UnresolvedImport @UnusedImport
|
||||
|
||||
qt_support_mode = "pyside"
|
||||
except:
|
||||
try:
|
||||
import PyQt5 # @UnresolvedImport @UnusedImport
|
||||
|
||||
qt_support_mode = "pyqt5"
|
||||
except:
|
||||
try:
|
||||
import PyQt4 # @UnresolvedImport @UnusedImport
|
||||
|
||||
qt_support_mode = "pyqt4"
|
||||
except:
|
||||
return
|
||||
|
||||
if qt_support_mode == "pyside2":
|
||||
try:
|
||||
import PySide2.QtCore # @UnresolvedImport
|
||||
|
||||
_internal_patch_qt(PySide2.QtCore, qt_support_mode)
|
||||
except:
|
||||
return
|
||||
|
||||
elif qt_support_mode == "pyside":
|
||||
try:
|
||||
import PySide.QtCore # @UnresolvedImport
|
||||
|
||||
_internal_patch_qt(PySide.QtCore, qt_support_mode)
|
||||
except:
|
||||
return
|
||||
|
||||
elif qt_support_mode == "pyqt5":
|
||||
try:
|
||||
import PyQt5.QtCore # @UnresolvedImport
|
||||
|
||||
_internal_patch_qt(PyQt5.QtCore)
|
||||
except:
|
||||
return
|
||||
|
||||
elif qt_support_mode == "pyqt4":
|
||||
# Ok, we have an issue here:
|
||||
# PyDev-452: Selecting PyQT API version using sip.setapi fails in debug mode
|
||||
# http://pyqt.sourceforge.net/Docs/PyQt4/incompatible_apis.html
|
||||
# Mostly, if the user uses a different API version (i.e.: v2 instead of v1),
|
||||
# that has to be done before importing PyQt4 modules (PySide/PyQt5 don't have this issue
|
||||
# as they only implements v2).
|
||||
patch_qt_on_import = "PyQt4"
|
||||
|
||||
def get_qt_core_module():
|
||||
import PyQt4.QtCore # @UnresolvedImport
|
||||
|
||||
return PyQt4.QtCore
|
||||
|
||||
_patch_import_to_patch_pyqt_on_import(patch_qt_on_import, get_qt_core_module)
|
||||
|
||||
else:
|
||||
raise ValueError("Unexpected qt support mode: %s" % (qt_support_mode,))
|
||||
|
||||
|
||||
def _patch_import_to_patch_pyqt_on_import(patch_qt_on_import, get_qt_core_module):
|
||||
# I don't like this approach very much as we have to patch __import__, but I like even less
|
||||
# asking the user to configure something in the client side...
|
||||
# So, our approach is to patch PyQt4 right before the user tries to import it (at which
|
||||
# point he should've set the sip api version properly already anyways).
|
||||
|
||||
pydev_log.debug("Setting up Qt post-import monkeypatch.")
|
||||
|
||||
dotted = patch_qt_on_import + "."
|
||||
original_import = __import__
|
||||
|
||||
from _pydev_bundle._pydev_sys_patch import patch_sys_module, patch_reload, cancel_patches_in_sys_module
|
||||
|
||||
patch_sys_module()
|
||||
patch_reload()
|
||||
|
||||
def patched_import(name, *args, **kwargs):
|
||||
if patch_qt_on_import == name or name.startswith(dotted):
|
||||
builtins.__import__ = original_import
|
||||
cancel_patches_in_sys_module()
|
||||
_internal_patch_qt(get_qt_core_module()) # Patch it only when the user would import the qt module
|
||||
return original_import(name, *args, **kwargs)
|
||||
|
||||
import builtins # Py3
|
||||
|
||||
builtins.__import__ = patched_import
|
||||
|
||||
|
||||
def _internal_patch_qt(QtCore, qt_support_mode="auto"):
|
||||
pydev_log.debug("Patching Qt: %s", QtCore)
|
||||
|
||||
_original_thread_init = QtCore.QThread.__init__
|
||||
_original_runnable_init = QtCore.QRunnable.__init__
|
||||
_original_QThread = QtCore.QThread
|
||||
|
||||
class FuncWrapper:
|
||||
def __init__(self, original):
|
||||
self._original = original
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
set_trace_in_qt()
|
||||
return self._original(*args, **kwargs)
|
||||
|
||||
class StartedSignalWrapper(QtCore.QObject): # Wrapper for the QThread.started signal
|
||||
try:
|
||||
_signal = QtCore.Signal() # @UndefinedVariable
|
||||
except:
|
||||
_signal = QtCore.pyqtSignal() # @UndefinedVariable
|
||||
|
||||
def __init__(self, thread, original_started):
|
||||
QtCore.QObject.__init__(self)
|
||||
self.thread = thread
|
||||
self.original_started = original_started
|
||||
if qt_support_mode in ("pyside", "pyside2"):
|
||||
self._signal = original_started
|
||||
else:
|
||||
self._signal.connect(self._on_call)
|
||||
self.original_started.connect(self._signal)
|
||||
|
||||
def connect(self, func, *args, **kwargs):
|
||||
if qt_support_mode in ("pyside", "pyside2"):
|
||||
return self._signal.connect(FuncWrapper(func), *args, **kwargs)
|
||||
else:
|
||||
return self._signal.connect(func, *args, **kwargs)
|
||||
|
||||
def disconnect(self, *args, **kwargs):
|
||||
return self._signal.disconnect(*args, **kwargs)
|
||||
|
||||
def emit(self, *args, **kwargs):
|
||||
return self._signal.emit(*args, **kwargs)
|
||||
|
||||
def _on_call(self, *args, **kwargs):
|
||||
set_trace_in_qt()
|
||||
|
||||
class ThreadWrapper(QtCore.QThread): # Wrapper for QThread
|
||||
def __init__(self, *args, **kwargs):
|
||||
_original_thread_init(self, *args, **kwargs)
|
||||
|
||||
# In PyQt5 the program hangs when we try to call original run method of QThread class.
|
||||
# So we need to distinguish instances of QThread class and instances of QThread inheritors.
|
||||
if self.__class__.run == _original_QThread.run:
|
||||
self.run = self._exec_run
|
||||
else:
|
||||
self._original_run = self.run
|
||||
self.run = self._new_run
|
||||
self._original_started = self.started
|
||||
self.started = StartedSignalWrapper(self, self.started)
|
||||
|
||||
def _exec_run(self):
|
||||
set_trace_in_qt()
|
||||
self.exec_()
|
||||
return None
|
||||
|
||||
def _new_run(self):
|
||||
set_trace_in_qt()
|
||||
return self._original_run()
|
||||
|
||||
class RunnableWrapper(QtCore.QRunnable): # Wrapper for QRunnable
|
||||
def __init__(self, *args, **kwargs):
|
||||
_original_runnable_init(self, *args, **kwargs)
|
||||
|
||||
self._original_run = self.run
|
||||
self.run = self._new_run
|
||||
|
||||
def _new_run(self):
|
||||
set_trace_in_qt()
|
||||
return self._original_run()
|
||||
|
||||
QtCore.QThread = ThreadWrapper
|
||||
QtCore.QRunnable = RunnableWrapper
|
||||
@@ -0,0 +1,37 @@
|
||||
def overrides(method):
|
||||
"""
|
||||
Meant to be used as
|
||||
|
||||
class B:
|
||||
@overrides(A.m1)
|
||||
def m1(self):
|
||||
pass
|
||||
"""
|
||||
|
||||
def wrapper(func):
|
||||
if func.__name__ != method.__name__:
|
||||
msg = "Wrong @override: %r expected, but overwriting %r."
|
||||
msg = msg % (func.__name__, method.__name__)
|
||||
raise AssertionError(msg)
|
||||
|
||||
if func.__doc__ is None:
|
||||
func.__doc__ = method.__doc__
|
||||
|
||||
return func
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def implements(method):
|
||||
def wrapper(func):
|
||||
if func.__name__ != method.__name__:
|
||||
msg = "Wrong @implements: %r expected, but implementing %r."
|
||||
msg = msg % (func.__name__, method.__name__)
|
||||
raise AssertionError(msg)
|
||||
|
||||
if func.__doc__ is None:
|
||||
func.__doc__ = method.__doc__
|
||||
|
||||
return func
|
||||
|
||||
return wrapper
|
||||
@@ -0,0 +1,181 @@
|
||||
"""
|
||||
The UserModuleDeleter and runfile methods are copied from
|
||||
Spyder and carry their own license agreement.
|
||||
http://code.google.com/p/spyderlib/source/browse/spyderlib/widgets/externalshell/sitecustomize.py
|
||||
|
||||
Spyder License Agreement (MIT License)
|
||||
--------------------------------------
|
||||
|
||||
Copyright (c) 2009-2012 Pierre Raybaut
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from _pydev_bundle._pydev_execfile import execfile
|
||||
|
||||
|
||||
# The following classes and functions are mainly intended to be used from
|
||||
# an interactive Python session
|
||||
class UserModuleDeleter:
|
||||
"""
|
||||
User Module Deleter (UMD) aims at deleting user modules
|
||||
to force Python to deeply reload them during import
|
||||
|
||||
pathlist [list]: ignore list in terms of module path
|
||||
namelist [list]: ignore list in terms of module name
|
||||
"""
|
||||
|
||||
def __init__(self, namelist=None, pathlist=None):
|
||||
if namelist is None:
|
||||
namelist = []
|
||||
self.namelist = namelist
|
||||
if pathlist is None:
|
||||
pathlist = []
|
||||
self.pathlist = pathlist
|
||||
try:
|
||||
# ignore all files in org.python.pydev/pysrc
|
||||
import pydev_pysrc, inspect
|
||||
|
||||
self.pathlist.append(os.path.dirname(pydev_pysrc.__file__))
|
||||
except:
|
||||
pass
|
||||
self.previous_modules = list(sys.modules.keys())
|
||||
|
||||
def is_module_ignored(self, modname, modpath):
|
||||
for path in [sys.prefix] + self.pathlist:
|
||||
if modpath.startswith(path):
|
||||
return True
|
||||
else:
|
||||
return set(modname.split(".")) & set(self.namelist)
|
||||
|
||||
def run(self, verbose=False):
|
||||
"""
|
||||
Del user modules to force Python to deeply reload them
|
||||
|
||||
Do not del modules which are considered as system modules, i.e.
|
||||
modules installed in subdirectories of Python interpreter's binary
|
||||
Do not del C modules
|
||||
"""
|
||||
log = []
|
||||
modules_copy = dict(sys.modules)
|
||||
for modname, module in modules_copy.items():
|
||||
if modname == "aaaaa":
|
||||
print(modname, module)
|
||||
print(self.previous_modules)
|
||||
if modname not in self.previous_modules:
|
||||
modpath = getattr(module, "__file__", None)
|
||||
if modpath is None:
|
||||
# *module* is a C module that is statically linked into the
|
||||
# interpreter. There is no way to know its path, so we
|
||||
# choose to ignore it.
|
||||
continue
|
||||
if not self.is_module_ignored(modname, modpath):
|
||||
log.append(modname)
|
||||
del sys.modules[modname]
|
||||
if verbose and log:
|
||||
print("\x1b[4;33m%s\x1b[24m%s\x1b[0m" % ("UMD has deleted", ": " + ", ".join(log)))
|
||||
|
||||
|
||||
__umd__ = None
|
||||
|
||||
_get_globals_callback = None
|
||||
|
||||
|
||||
def _set_globals_function(get_globals):
|
||||
global _get_globals_callback
|
||||
_get_globals_callback = get_globals
|
||||
|
||||
|
||||
def _get_globals():
|
||||
"""Return current Python interpreter globals namespace"""
|
||||
if _get_globals_callback is not None:
|
||||
return _get_globals_callback()
|
||||
else:
|
||||
try:
|
||||
from __main__ import __dict__ as namespace
|
||||
except ImportError:
|
||||
try:
|
||||
# The import fails on IronPython
|
||||
import __main__
|
||||
|
||||
namespace = __main__.__dict__
|
||||
except:
|
||||
namespace
|
||||
shell = namespace.get("__ipythonshell__")
|
||||
if shell is not None and hasattr(shell, "user_ns"):
|
||||
# IPython 0.12+ kernel
|
||||
return shell.user_ns
|
||||
else:
|
||||
# Python interpreter
|
||||
return namespace
|
||||
return namespace
|
||||
|
||||
|
||||
def runfile(filename, args=None, wdir=None, namespace=None):
|
||||
"""
|
||||
Run filename
|
||||
args: command line arguments (string)
|
||||
wdir: working directory
|
||||
"""
|
||||
try:
|
||||
if hasattr(filename, "decode"):
|
||||
filename = filename.decode("utf-8")
|
||||
except (UnicodeError, TypeError):
|
||||
pass
|
||||
global __umd__
|
||||
if os.environ.get("PYDEV_UMD_ENABLED", "").lower() == "true":
|
||||
if __umd__ is None:
|
||||
namelist = os.environ.get("PYDEV_UMD_NAMELIST", None)
|
||||
if namelist is not None:
|
||||
namelist = namelist.split(",")
|
||||
__umd__ = UserModuleDeleter(namelist=namelist)
|
||||
else:
|
||||
verbose = os.environ.get("PYDEV_UMD_VERBOSE", "").lower() == "true"
|
||||
__umd__.run(verbose=verbose)
|
||||
if args is not None and not isinstance(args, (bytes, str)):
|
||||
raise TypeError("expected a character buffer object")
|
||||
if namespace is None:
|
||||
namespace = _get_globals()
|
||||
if "__file__" in namespace:
|
||||
old_file = namespace["__file__"]
|
||||
else:
|
||||
old_file = None
|
||||
namespace["__file__"] = filename
|
||||
sys.argv = [filename]
|
||||
if args is not None:
|
||||
for arg in args.split():
|
||||
sys.argv.append(arg)
|
||||
if wdir is not None:
|
||||
try:
|
||||
if hasattr(wdir, "decode"):
|
||||
wdir = wdir.decode("utf-8")
|
||||
except (UnicodeError, TypeError):
|
||||
pass
|
||||
os.chdir(wdir)
|
||||
execfile(filename, namespace)
|
||||
sys.argv = [""]
|
||||
if old_file is None:
|
||||
del namespace["__file__"]
|
||||
else:
|
||||
namespace["__file__"] = old_file
|
||||
@@ -0,0 +1,16 @@
|
||||
import sys
|
||||
|
||||
|
||||
def versionok_for_gui():
|
||||
"""Return True if running Python is suitable for GUI Event Integration and deeper IPython integration"""
|
||||
# We require Python 2.6+ ...
|
||||
if sys.hexversion < 0x02060000:
|
||||
return False
|
||||
# Or Python 3.2+
|
||||
if sys.hexversion >= 0x03000000 and sys.hexversion < 0x03020000:
|
||||
return False
|
||||
# Not supported under Jython nor IronPython
|
||||
if sys.platform.startswith("java") or sys.platform.startswith("cli"):
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -0,0 +1,884 @@
|
||||
from __future__ import nested_scopes
|
||||
|
||||
import fnmatch
|
||||
import os.path
|
||||
from _pydev_runfiles.pydev_runfiles_coverage import start_coverage_support
|
||||
from _pydevd_bundle.pydevd_constants import * # @UnusedWildImport
|
||||
import re
|
||||
import time
|
||||
import json
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# Configuration
|
||||
# =======================================================================================================================
|
||||
class Configuration:
|
||||
def __init__(
|
||||
self,
|
||||
files_or_dirs="",
|
||||
verbosity=2,
|
||||
include_tests=None,
|
||||
tests=None,
|
||||
port=None,
|
||||
files_to_tests=None,
|
||||
jobs=1,
|
||||
split_jobs="tests",
|
||||
coverage_output_dir=None,
|
||||
coverage_include=None,
|
||||
coverage_output_file=None,
|
||||
exclude_files=None,
|
||||
exclude_tests=None,
|
||||
include_files=None,
|
||||
django=False,
|
||||
):
|
||||
self.files_or_dirs = files_or_dirs
|
||||
self.verbosity = verbosity
|
||||
self.include_tests = include_tests
|
||||
self.tests = tests
|
||||
self.port = port
|
||||
self.files_to_tests = files_to_tests
|
||||
self.jobs = jobs
|
||||
self.split_jobs = split_jobs
|
||||
self.django = django
|
||||
|
||||
if include_tests:
|
||||
assert isinstance(include_tests, (list, tuple))
|
||||
|
||||
if exclude_files:
|
||||
assert isinstance(exclude_files, (list, tuple))
|
||||
|
||||
if exclude_tests:
|
||||
assert isinstance(exclude_tests, (list, tuple))
|
||||
|
||||
self.exclude_files = exclude_files
|
||||
self.include_files = include_files
|
||||
self.exclude_tests = exclude_tests
|
||||
|
||||
self.coverage_output_dir = coverage_output_dir
|
||||
self.coverage_include = coverage_include
|
||||
self.coverage_output_file = coverage_output_file
|
||||
|
||||
def __str__(self):
|
||||
return """Configuration
|
||||
- files_or_dirs: %s
|
||||
- verbosity: %s
|
||||
- tests: %s
|
||||
- port: %s
|
||||
- files_to_tests: %s
|
||||
- jobs: %s
|
||||
- split_jobs: %s
|
||||
|
||||
- include_files: %s
|
||||
- include_tests: %s
|
||||
|
||||
- exclude_files: %s
|
||||
- exclude_tests: %s
|
||||
|
||||
- coverage_output_dir: %s
|
||||
- coverage_include_dir: %s
|
||||
- coverage_output_file: %s
|
||||
|
||||
- django: %s
|
||||
""" % (
|
||||
self.files_or_dirs,
|
||||
self.verbosity,
|
||||
self.tests,
|
||||
self.port,
|
||||
self.files_to_tests,
|
||||
self.jobs,
|
||||
self.split_jobs,
|
||||
self.include_files,
|
||||
self.include_tests,
|
||||
self.exclude_files,
|
||||
self.exclude_tests,
|
||||
self.coverage_output_dir,
|
||||
self.coverage_include,
|
||||
self.coverage_output_file,
|
||||
self.django,
|
||||
)
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# parse_cmdline
|
||||
# =======================================================================================================================
|
||||
def parse_cmdline(argv=None):
|
||||
"""
|
||||
Parses command line and returns test directories, verbosity, test filter and test suites
|
||||
|
||||
usage:
|
||||
runfiles.py -v|--verbosity <level> -t|--tests <Test.test1,Test2> dirs|files
|
||||
|
||||
Multiprocessing options:
|
||||
jobs=number (with the number of jobs to be used to run the tests)
|
||||
split_jobs='module'|'tests'
|
||||
if == module, a given job will always receive all the tests from a module
|
||||
if == tests, the tests will be split independently of their originating module (default)
|
||||
|
||||
--exclude_files = comma-separated list of patterns with files to exclude (fnmatch style)
|
||||
--include_files = comma-separated list of patterns with files to include (fnmatch style)
|
||||
--exclude_tests = comma-separated list of patterns with test names to exclude (fnmatch style)
|
||||
|
||||
Note: if --tests is given, --exclude_files, --include_files and --exclude_tests are ignored!
|
||||
"""
|
||||
if argv is None:
|
||||
argv = sys.argv
|
||||
|
||||
verbosity = 2
|
||||
include_tests = None
|
||||
tests = None
|
||||
port = None
|
||||
jobs = 1
|
||||
split_jobs = "tests"
|
||||
files_to_tests = {}
|
||||
coverage_output_dir = None
|
||||
coverage_include = None
|
||||
exclude_files = None
|
||||
exclude_tests = None
|
||||
include_files = None
|
||||
django = False
|
||||
|
||||
from _pydev_bundle._pydev_getopt import gnu_getopt
|
||||
|
||||
optlist, dirs = gnu_getopt(
|
||||
argv[1:],
|
||||
"",
|
||||
[
|
||||
"verbosity=",
|
||||
"tests=",
|
||||
"port=",
|
||||
"config_file=",
|
||||
"jobs=",
|
||||
"split_jobs=",
|
||||
"include_tests=",
|
||||
"include_files=",
|
||||
"exclude_files=",
|
||||
"exclude_tests=",
|
||||
"coverage_output_dir=",
|
||||
"coverage_include=",
|
||||
"django=",
|
||||
],
|
||||
)
|
||||
|
||||
for opt, value in optlist:
|
||||
if opt in ("-v", "--verbosity"):
|
||||
verbosity = value
|
||||
|
||||
elif opt in ("-p", "--port"):
|
||||
port = int(value)
|
||||
|
||||
elif opt in ("-j", "--jobs"):
|
||||
jobs = int(value)
|
||||
|
||||
elif opt in ("-s", "--split_jobs"):
|
||||
split_jobs = value
|
||||
if split_jobs not in ("module", "tests"):
|
||||
raise AssertionError('Expected split to be either "module" or "tests". Was :%s' % (split_jobs,))
|
||||
|
||||
elif opt in (
|
||||
"-d",
|
||||
"--coverage_output_dir",
|
||||
):
|
||||
coverage_output_dir = value.strip()
|
||||
|
||||
elif opt in (
|
||||
"-i",
|
||||
"--coverage_include",
|
||||
):
|
||||
coverage_include = value.strip()
|
||||
|
||||
elif opt in ("-I", "--include_tests"):
|
||||
include_tests = value.split(",")
|
||||
|
||||
elif opt in ("-E", "--exclude_files"):
|
||||
exclude_files = value.split(",")
|
||||
|
||||
elif opt in ("-F", "--include_files"):
|
||||
include_files = value.split(",")
|
||||
|
||||
elif opt in ("-e", "--exclude_tests"):
|
||||
exclude_tests = value.split(",")
|
||||
|
||||
elif opt in ("-t", "--tests"):
|
||||
tests = value.split(",")
|
||||
|
||||
elif opt in ("--django",):
|
||||
django = value.strip() in ["true", "True", "1"]
|
||||
|
||||
elif opt in ("-c", "--config_file"):
|
||||
config_file = value.strip()
|
||||
if os.path.exists(config_file):
|
||||
f = open(config_file, "r")
|
||||
try:
|
||||
config_file_contents = f.read()
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
if config_file_contents:
|
||||
config_file_contents = config_file_contents.strip()
|
||||
|
||||
if config_file_contents:
|
||||
for line in config_file_contents.splitlines():
|
||||
file_and_test = line.split("|")
|
||||
if len(file_and_test) == 2:
|
||||
file, test = file_and_test
|
||||
if file in files_to_tests:
|
||||
files_to_tests[file].append(test)
|
||||
else:
|
||||
files_to_tests[file] = [test]
|
||||
|
||||
else:
|
||||
sys.stderr.write("Could not find config file: %s\n" % (config_file,))
|
||||
|
||||
filter_tests_env_var = os.environ.get("PYDEV_RUNFILES_FILTER_TESTS", None)
|
||||
if filter_tests_env_var:
|
||||
loaded = json.loads(filter_tests_env_var)
|
||||
include = loaded["include"]
|
||||
for path, name in include:
|
||||
existing = files_to_tests.get(path)
|
||||
if not existing:
|
||||
existing = files_to_tests[path] = []
|
||||
existing.append(name)
|
||||
# Note: at this point exclude or `*` is not handled.
|
||||
# Clients need to do all the filtering on their side (could
|
||||
# change to have `exclude` and support `*` entries).
|
||||
|
||||
if type([]) != type(dirs):
|
||||
dirs = [dirs]
|
||||
|
||||
ret_dirs = []
|
||||
for d in dirs:
|
||||
if "|" in d:
|
||||
# paths may come from the ide separated by |
|
||||
ret_dirs.extend(d.split("|"))
|
||||
else:
|
||||
ret_dirs.append(d)
|
||||
|
||||
verbosity = int(verbosity)
|
||||
|
||||
if tests:
|
||||
if verbosity > 4:
|
||||
sys.stdout.write("--tests provided. Ignoring --exclude_files, --exclude_tests and --include_files\n")
|
||||
exclude_files = exclude_tests = include_files = None
|
||||
|
||||
config = Configuration(
|
||||
ret_dirs,
|
||||
verbosity,
|
||||
include_tests,
|
||||
tests,
|
||||
port,
|
||||
files_to_tests,
|
||||
jobs,
|
||||
split_jobs,
|
||||
coverage_output_dir,
|
||||
coverage_include,
|
||||
exclude_files=exclude_files,
|
||||
exclude_tests=exclude_tests,
|
||||
include_files=include_files,
|
||||
django=django,
|
||||
)
|
||||
|
||||
if verbosity > 5:
|
||||
sys.stdout.write(str(config) + "\n")
|
||||
return config
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# PydevTestRunner
|
||||
# =======================================================================================================================
|
||||
class PydevTestRunner(object):
|
||||
"""finds and runs a file or directory of files as a unit test"""
|
||||
|
||||
__py_extensions = ["*.py", "*.pyw"]
|
||||
__exclude_files = ["__init__.*"]
|
||||
|
||||
# Just to check that only this attributes will be written to this file
|
||||
__slots__ = [
|
||||
"verbosity", # Always used
|
||||
"files_to_tests", # If this one is given, the ones below are not used
|
||||
"files_or_dirs", # Files or directories received in the command line
|
||||
"include_tests", # The filter used to collect the tests
|
||||
"tests", # Strings with the tests to be run
|
||||
"jobs", # Integer with the number of jobs that should be used to run the test cases
|
||||
"split_jobs", # String with 'tests' or 'module' (how should the jobs be split)
|
||||
"configuration",
|
||||
"coverage",
|
||||
]
|
||||
|
||||
def __init__(self, configuration):
|
||||
self.verbosity = configuration.verbosity
|
||||
|
||||
self.jobs = configuration.jobs
|
||||
self.split_jobs = configuration.split_jobs
|
||||
|
||||
files_to_tests = configuration.files_to_tests
|
||||
if files_to_tests:
|
||||
self.files_to_tests = files_to_tests
|
||||
self.files_or_dirs = list(files_to_tests.keys())
|
||||
self.tests = None
|
||||
else:
|
||||
self.files_to_tests = {}
|
||||
self.files_or_dirs = configuration.files_or_dirs
|
||||
self.tests = configuration.tests
|
||||
|
||||
self.configuration = configuration
|
||||
self.__adjust_path()
|
||||
|
||||
def __adjust_path(self):
|
||||
"""add the current file or directory to the python path"""
|
||||
path_to_append = None
|
||||
for n in range(len(self.files_or_dirs)):
|
||||
dir_name = self.__unixify(self.files_or_dirs[n])
|
||||
if os.path.isdir(dir_name):
|
||||
if not dir_name.endswith("/"):
|
||||
self.files_or_dirs[n] = dir_name + "/"
|
||||
path_to_append = os.path.normpath(dir_name)
|
||||
elif os.path.isfile(dir_name):
|
||||
path_to_append = os.path.dirname(dir_name)
|
||||
else:
|
||||
if not os.path.exists(dir_name):
|
||||
block_line = "*" * 120
|
||||
sys.stderr.write("\n%s\n* PyDev test runner error: %s does not exist.\n%s\n" % (block_line, dir_name, block_line))
|
||||
return
|
||||
msg = "unknown type. \n%s\nshould be file or a directory.\n" % (dir_name)
|
||||
raise RuntimeError(msg)
|
||||
if path_to_append is not None:
|
||||
# Add it as the last one (so, first things are resolved against the default dirs and
|
||||
# if none resolves, then we try a relative import).
|
||||
sys.path.append(path_to_append)
|
||||
|
||||
def __is_valid_py_file(self, fname):
|
||||
"""tests that a particular file contains the proper file extension
|
||||
and is not in the list of files to exclude"""
|
||||
is_valid_fname = 0
|
||||
for invalid_fname in self.__class__.__exclude_files:
|
||||
is_valid_fname += int(not fnmatch.fnmatch(fname, invalid_fname))
|
||||
if_valid_ext = 0
|
||||
for ext in self.__class__.__py_extensions:
|
||||
if_valid_ext += int(fnmatch.fnmatch(fname, ext))
|
||||
return is_valid_fname > 0 and if_valid_ext > 0
|
||||
|
||||
def __unixify(self, s):
|
||||
"""stupid windows. converts the backslash to forwardslash for consistency"""
|
||||
return os.path.normpath(s).replace(os.sep, "/")
|
||||
|
||||
def __importify(self, s, dir=False):
|
||||
"""turns directory separators into dots and removes the ".py*" extension
|
||||
so the string can be used as import statement"""
|
||||
if not dir:
|
||||
dirname, fname = os.path.split(s)
|
||||
|
||||
if fname.count(".") > 1:
|
||||
# if there's a file named xxx.xx.py, it is not a valid module, so, let's not load it...
|
||||
return
|
||||
|
||||
imp_stmt_pieces = [dirname.replace("\\", "/").replace("/", "."), os.path.splitext(fname)[0]]
|
||||
|
||||
if len(imp_stmt_pieces[0]) == 0:
|
||||
imp_stmt_pieces = imp_stmt_pieces[1:]
|
||||
|
||||
return ".".join(imp_stmt_pieces)
|
||||
|
||||
else: # handle dir
|
||||
return s.replace("\\", "/").replace("/", ".")
|
||||
|
||||
def __add_files(self, pyfiles, root, files):
|
||||
"""if files match, appends them to pyfiles. used by os.path.walk fcn"""
|
||||
for fname in files:
|
||||
if self.__is_valid_py_file(fname):
|
||||
name_without_base_dir = self.__unixify(os.path.join(root, fname))
|
||||
pyfiles.append(name_without_base_dir)
|
||||
|
||||
def find_import_files(self):
|
||||
"""return a list of files to import"""
|
||||
if self.files_to_tests:
|
||||
pyfiles = self.files_to_tests.keys()
|
||||
else:
|
||||
pyfiles = []
|
||||
|
||||
for base_dir in self.files_or_dirs:
|
||||
if os.path.isdir(base_dir):
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
# Note: handling directories that should be excluded from the search because
|
||||
# they don't have __init__.py
|
||||
exclude = {}
|
||||
for d in dirs:
|
||||
for init in ["__init__.py", "__init__.pyo", "__init__.pyc", "__init__.pyw", "__init__$py.class"]:
|
||||
if os.path.exists(os.path.join(root, d, init).replace("\\", "/")):
|
||||
break
|
||||
else:
|
||||
exclude[d] = 1
|
||||
|
||||
if exclude:
|
||||
new = []
|
||||
for d in dirs:
|
||||
if d not in exclude:
|
||||
new.append(d)
|
||||
|
||||
dirs[:] = new
|
||||
|
||||
self.__add_files(pyfiles, root, files)
|
||||
|
||||
elif os.path.isfile(base_dir):
|
||||
pyfiles.append(base_dir)
|
||||
|
||||
if self.configuration.exclude_files or self.configuration.include_files:
|
||||
ret = []
|
||||
for f in pyfiles:
|
||||
add = True
|
||||
basename = os.path.basename(f)
|
||||
if self.configuration.include_files:
|
||||
add = False
|
||||
|
||||
for pat in self.configuration.include_files:
|
||||
if fnmatch.fnmatchcase(basename, pat):
|
||||
add = True
|
||||
break
|
||||
|
||||
if not add:
|
||||
if self.verbosity > 3:
|
||||
sys.stdout.write(
|
||||
"Skipped file: %s (did not match any include_files pattern: %s)\n" % (f, self.configuration.include_files)
|
||||
)
|
||||
|
||||
elif self.configuration.exclude_files:
|
||||
for pat in self.configuration.exclude_files:
|
||||
if fnmatch.fnmatchcase(basename, pat):
|
||||
if self.verbosity > 3:
|
||||
sys.stdout.write("Skipped file: %s (matched exclude_files pattern: %s)\n" % (f, pat))
|
||||
|
||||
elif self.verbosity > 2:
|
||||
sys.stdout.write("Skipped file: %s\n" % (f,))
|
||||
|
||||
add = False
|
||||
break
|
||||
|
||||
if add:
|
||||
if self.verbosity > 3:
|
||||
sys.stdout.write("Adding file: %s for test discovery.\n" % (f,))
|
||||
ret.append(f)
|
||||
|
||||
pyfiles = ret
|
||||
|
||||
return pyfiles
|
||||
|
||||
def __get_module_from_str(self, modname, print_exception, pyfile):
|
||||
"""Import the module in the given import path.
|
||||
* Returns the "final" module, so importing "coilib40.subject.visu"
|
||||
returns the "visu" module, not the "coilib40" as returned by __import__"""
|
||||
try:
|
||||
mod = __import__(modname)
|
||||
for part in modname.split(".")[1:]:
|
||||
mod = getattr(mod, part)
|
||||
return mod
|
||||
except:
|
||||
if print_exception:
|
||||
from _pydev_runfiles import pydev_runfiles_xml_rpc
|
||||
from _pydevd_bundle import pydevd_io
|
||||
|
||||
buf_err = pydevd_io.start_redirect(keep_original_redirection=True, std="stderr")
|
||||
buf_out = pydevd_io.start_redirect(keep_original_redirection=True, std="stdout")
|
||||
try:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
sys.stderr.write("ERROR: Module: %s could not be imported (file: %s).\n" % (modname, pyfile))
|
||||
finally:
|
||||
pydevd_io.end_redirect("stderr")
|
||||
pydevd_io.end_redirect("stdout")
|
||||
|
||||
pydev_runfiles_xml_rpc.notifyTest("error", buf_out.getvalue(), buf_err.getvalue(), pyfile, modname, 0)
|
||||
|
||||
return None
|
||||
|
||||
def remove_duplicates_keeping_order(self, seq):
|
||||
seen = set()
|
||||
seen_add = seen.add
|
||||
return [x for x in seq if not (x in seen or seen_add(x))]
|
||||
|
||||
def find_modules_from_files(self, pyfiles):
|
||||
"""returns a list of modules given a list of files"""
|
||||
# let's make sure that the paths we want are in the pythonpath...
|
||||
imports = [(s, self.__importify(s)) for s in pyfiles]
|
||||
|
||||
sys_path = [os.path.normpath(path) for path in sys.path]
|
||||
sys_path = self.remove_duplicates_keeping_order(sys_path)
|
||||
|
||||
system_paths = []
|
||||
for s in sys_path:
|
||||
system_paths.append(self.__importify(s, True))
|
||||
|
||||
ret = []
|
||||
for pyfile, imp in imports:
|
||||
if imp is None:
|
||||
continue # can happen if a file is not a valid module
|
||||
choices = []
|
||||
for s in system_paths:
|
||||
if imp.startswith(s):
|
||||
add = imp[len(s) + 1 :]
|
||||
if add:
|
||||
choices.append(add)
|
||||
# sys.stdout.write(' ' + add + ' ')
|
||||
|
||||
if not choices:
|
||||
sys.stdout.write("PYTHONPATH not found for file: %s\n" % imp)
|
||||
else:
|
||||
for i, import_str in enumerate(choices):
|
||||
print_exception = i == len(choices) - 1
|
||||
mod = self.__get_module_from_str(import_str, print_exception, pyfile)
|
||||
if mod is not None:
|
||||
ret.append((pyfile, mod, import_str))
|
||||
break
|
||||
|
||||
return ret
|
||||
|
||||
# ===================================================================================================================
|
||||
# GetTestCaseNames
|
||||
# ===================================================================================================================
|
||||
class GetTestCaseNames:
|
||||
"""Yes, we need a class for that (cannot use outer context on jython 2.1)"""
|
||||
|
||||
def __init__(self, accepted_classes, accepted_methods):
|
||||
self.accepted_classes = accepted_classes
|
||||
self.accepted_methods = accepted_methods
|
||||
|
||||
def __call__(self, testCaseClass):
|
||||
"""Return a sorted sequence of method names found within testCaseClass"""
|
||||
testFnNames = []
|
||||
className = testCaseClass.__name__
|
||||
|
||||
if className in self.accepted_classes:
|
||||
for attrname in dir(testCaseClass):
|
||||
# If a class is chosen, we select all the 'test' methods'
|
||||
if attrname.startswith("test") and hasattr(getattr(testCaseClass, attrname), "__call__"):
|
||||
testFnNames.append(attrname)
|
||||
|
||||
else:
|
||||
for attrname in dir(testCaseClass):
|
||||
# If we have the class+method name, we must do a full check and have an exact match.
|
||||
if className + "." + attrname in self.accepted_methods:
|
||||
if hasattr(getattr(testCaseClass, attrname), "__call__"):
|
||||
testFnNames.append(attrname)
|
||||
|
||||
# sorted() is not available in jython 2.1
|
||||
testFnNames.sort()
|
||||
return testFnNames
|
||||
|
||||
def _decorate_test_suite(self, suite, pyfile, module_name):
|
||||
import unittest
|
||||
|
||||
if isinstance(suite, unittest.TestSuite):
|
||||
add = False
|
||||
suite.__pydev_pyfile__ = pyfile
|
||||
suite.__pydev_module_name__ = module_name
|
||||
|
||||
for t in suite._tests:
|
||||
t.__pydev_pyfile__ = pyfile
|
||||
t.__pydev_module_name__ = module_name
|
||||
if self._decorate_test_suite(t, pyfile, module_name):
|
||||
add = True
|
||||
|
||||
return add
|
||||
|
||||
elif isinstance(suite, unittest.TestCase):
|
||||
return True
|
||||
|
||||
else:
|
||||
return False
|
||||
|
||||
def find_tests_from_modules(self, file_and_modules_and_module_name):
|
||||
"""returns the unittests given a list of modules"""
|
||||
# Use our own suite!
|
||||
from _pydev_runfiles import pydev_runfiles_unittest
|
||||
import unittest
|
||||
|
||||
unittest.TestLoader.suiteClass = pydev_runfiles_unittest.PydevTestSuite
|
||||
loader = unittest.TestLoader()
|
||||
|
||||
ret = []
|
||||
if self.files_to_tests:
|
||||
for pyfile, m, module_name in file_and_modules_and_module_name:
|
||||
accepted_classes = {}
|
||||
accepted_methods = {}
|
||||
tests = self.files_to_tests[pyfile]
|
||||
for t in tests:
|
||||
accepted_methods[t] = t
|
||||
|
||||
loader.getTestCaseNames = self.GetTestCaseNames(accepted_classes, accepted_methods)
|
||||
|
||||
suite = loader.loadTestsFromModule(m)
|
||||
if self._decorate_test_suite(suite, pyfile, module_name):
|
||||
ret.append(suite)
|
||||
return ret
|
||||
|
||||
if self.tests:
|
||||
accepted_classes = {}
|
||||
accepted_methods = {}
|
||||
|
||||
for t in self.tests:
|
||||
splitted = t.split(".")
|
||||
if len(splitted) == 1:
|
||||
accepted_classes[t] = t
|
||||
|
||||
elif len(splitted) == 2:
|
||||
accepted_methods[t] = t
|
||||
|
||||
loader.getTestCaseNames = self.GetTestCaseNames(accepted_classes, accepted_methods)
|
||||
|
||||
for pyfile, m, module_name in file_and_modules_and_module_name:
|
||||
suite = loader.loadTestsFromModule(m)
|
||||
if self._decorate_test_suite(suite, pyfile, module_name):
|
||||
ret.append(suite)
|
||||
|
||||
return ret
|
||||
|
||||
def filter_tests(self, test_objs, internal_call=False):
|
||||
"""based on a filter name, only return those tests that have
|
||||
the test case names that match"""
|
||||
import unittest
|
||||
|
||||
if not internal_call:
|
||||
if not self.configuration.include_tests and not self.tests and not self.configuration.exclude_tests:
|
||||
# No need to filter if we have nothing to filter!
|
||||
return test_objs
|
||||
|
||||
if self.verbosity > 1:
|
||||
if self.configuration.include_tests:
|
||||
sys.stdout.write("Tests to include: %s\n" % (self.configuration.include_tests,))
|
||||
|
||||
if self.tests:
|
||||
sys.stdout.write("Tests to run: %s\n" % (self.tests,))
|
||||
|
||||
if self.configuration.exclude_tests:
|
||||
sys.stdout.write("Tests to exclude: %s\n" % (self.configuration.exclude_tests,))
|
||||
|
||||
test_suite = []
|
||||
for test_obj in test_objs:
|
||||
if isinstance(test_obj, unittest.TestSuite):
|
||||
# Note: keep the suites as they are and just 'fix' the tests (so, don't use the iter_tests).
|
||||
if test_obj._tests:
|
||||
test_obj._tests = self.filter_tests(test_obj._tests, True)
|
||||
if test_obj._tests: # Only add the suite if we still have tests there.
|
||||
test_suite.append(test_obj)
|
||||
|
||||
elif isinstance(test_obj, unittest.TestCase):
|
||||
try:
|
||||
testMethodName = test_obj._TestCase__testMethodName
|
||||
except AttributeError:
|
||||
# changed in python 2.5
|
||||
testMethodName = test_obj._testMethodName
|
||||
|
||||
add = True
|
||||
if self.configuration.exclude_tests:
|
||||
for pat in self.configuration.exclude_tests:
|
||||
if fnmatch.fnmatchcase(testMethodName, pat):
|
||||
if self.verbosity > 3:
|
||||
sys.stdout.write("Skipped test: %s (matched exclude_tests pattern: %s)\n" % (testMethodName, pat))
|
||||
|
||||
elif self.verbosity > 2:
|
||||
sys.stdout.write("Skipped test: %s\n" % (testMethodName,))
|
||||
|
||||
add = False
|
||||
break
|
||||
|
||||
if add:
|
||||
if self.__match_tests(self.tests, test_obj, testMethodName):
|
||||
include = True
|
||||
if self.configuration.include_tests:
|
||||
include = False
|
||||
for pat in self.configuration.include_tests:
|
||||
if fnmatch.fnmatchcase(testMethodName, pat):
|
||||
include = True
|
||||
break
|
||||
if include:
|
||||
test_suite.append(test_obj)
|
||||
else:
|
||||
if self.verbosity > 3:
|
||||
sys.stdout.write(
|
||||
"Skipped test: %s (did not match any include_tests pattern %s)\n"
|
||||
% (
|
||||
testMethodName,
|
||||
self.configuration.include_tests,
|
||||
)
|
||||
)
|
||||
return test_suite
|
||||
|
||||
def iter_tests(self, test_objs):
|
||||
# Note: not using yield because of Jython 2.1.
|
||||
import unittest
|
||||
|
||||
tests = []
|
||||
for test_obj in test_objs:
|
||||
if isinstance(test_obj, unittest.TestSuite):
|
||||
tests.extend(self.iter_tests(test_obj._tests))
|
||||
|
||||
elif isinstance(test_obj, unittest.TestCase):
|
||||
tests.append(test_obj)
|
||||
return tests
|
||||
|
||||
def list_test_names(self, test_objs):
|
||||
names = []
|
||||
for tc in self.iter_tests(test_objs):
|
||||
try:
|
||||
testMethodName = tc._TestCase__testMethodName
|
||||
except AttributeError:
|
||||
# changed in python 2.5
|
||||
testMethodName = tc._testMethodName
|
||||
names.append(testMethodName)
|
||||
return names
|
||||
|
||||
def __match_tests(self, tests, test_case, test_method_name):
|
||||
if not tests:
|
||||
return 1
|
||||
|
||||
for t in tests:
|
||||
class_and_method = t.split(".")
|
||||
if len(class_and_method) == 1:
|
||||
# only class name
|
||||
if class_and_method[0] == test_case.__class__.__name__:
|
||||
return 1
|
||||
|
||||
elif len(class_and_method) == 2:
|
||||
if class_and_method[0] == test_case.__class__.__name__ and class_and_method[1] == test_method_name:
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
def __match(self, filter_list, name):
|
||||
"""returns whether a test name matches the test filter"""
|
||||
if filter_list is None:
|
||||
return 1
|
||||
for f in filter_list:
|
||||
if re.match(f, name):
|
||||
return 1
|
||||
return 0
|
||||
|
||||
def run_tests(self, handle_coverage=True):
|
||||
"""runs all tests"""
|
||||
sys.stdout.write("Finding files... ")
|
||||
files = self.find_import_files()
|
||||
if self.verbosity > 3:
|
||||
sys.stdout.write("%s ... done.\n" % (self.files_or_dirs))
|
||||
else:
|
||||
sys.stdout.write("done.\n")
|
||||
sys.stdout.write("Importing test modules ... ")
|
||||
|
||||
if self.configuration.django:
|
||||
import django
|
||||
|
||||
if hasattr(django, "setup"):
|
||||
django.setup()
|
||||
|
||||
if handle_coverage:
|
||||
coverage_files, coverage = start_coverage_support(self.configuration)
|
||||
|
||||
file_and_modules_and_module_name = self.find_modules_from_files(files)
|
||||
sys.stdout.write("done.\n")
|
||||
|
||||
all_tests = self.find_tests_from_modules(file_and_modules_and_module_name)
|
||||
all_tests = self.filter_tests(all_tests)
|
||||
|
||||
from _pydev_runfiles import pydev_runfiles_unittest
|
||||
|
||||
test_suite = pydev_runfiles_unittest.PydevTestSuite(all_tests)
|
||||
from _pydev_runfiles import pydev_runfiles_xml_rpc
|
||||
|
||||
pydev_runfiles_xml_rpc.notifyTestsCollected(test_suite.countTestCases())
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
def run_tests():
|
||||
executed_in_parallel = False
|
||||
if self.jobs > 1:
|
||||
from _pydev_runfiles import pydev_runfiles_parallel
|
||||
|
||||
# What may happen is that the number of jobs needed is lower than the number of jobs requested
|
||||
# (e.g.: 2 jobs were requested for running 1 test) -- in which case execute_tests_in_parallel will
|
||||
# return False and won't run any tests.
|
||||
executed_in_parallel = pydev_runfiles_parallel.execute_tests_in_parallel(
|
||||
all_tests, self.jobs, self.split_jobs, self.verbosity, coverage_files, self.configuration.coverage_include
|
||||
)
|
||||
|
||||
if not executed_in_parallel:
|
||||
# If in coverage, we don't need to pass anything here (coverage is already enabled for this execution).
|
||||
runner = pydev_runfiles_unittest.PydevTextTestRunner(stream=sys.stdout, descriptions=1, verbosity=self.verbosity)
|
||||
sys.stdout.write("\n")
|
||||
runner.run(test_suite)
|
||||
|
||||
if self.configuration.django:
|
||||
get_django_test_suite_runner()(run_tests).run_tests([])
|
||||
else:
|
||||
run_tests()
|
||||
|
||||
if handle_coverage:
|
||||
coverage.stop()
|
||||
coverage.save()
|
||||
|
||||
total_time = "Finished in: %.2f secs." % (time.time() - start_time,)
|
||||
pydev_runfiles_xml_rpc.notifyTestRunFinished(total_time)
|
||||
|
||||
|
||||
DJANGO_TEST_SUITE_RUNNER = None
|
||||
|
||||
|
||||
def get_django_test_suite_runner():
|
||||
global DJANGO_TEST_SUITE_RUNNER
|
||||
if DJANGO_TEST_SUITE_RUNNER:
|
||||
return DJANGO_TEST_SUITE_RUNNER
|
||||
try:
|
||||
# django >= 1.8
|
||||
import django
|
||||
from django.test.runner import DiscoverRunner
|
||||
|
||||
class MyDjangoTestSuiteRunner(DiscoverRunner):
|
||||
def __init__(self, on_run_suite):
|
||||
django.setup()
|
||||
DiscoverRunner.__init__(self)
|
||||
self.on_run_suite = on_run_suite
|
||||
|
||||
def build_suite(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def suite_result(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def run_suite(self, *args, **kwargs):
|
||||
self.on_run_suite()
|
||||
|
||||
except:
|
||||
# django < 1.8
|
||||
try:
|
||||
from django.test.simple import DjangoTestSuiteRunner
|
||||
except:
|
||||
|
||||
class DjangoTestSuiteRunner:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def run_tests(self, *args, **kwargs):
|
||||
raise AssertionError(
|
||||
"Unable to run suite with django.test.runner.DiscoverRunner nor django.test.simple.DjangoTestSuiteRunner because it couldn't be imported."
|
||||
)
|
||||
|
||||
class MyDjangoTestSuiteRunner(DjangoTestSuiteRunner):
|
||||
def __init__(self, on_run_suite):
|
||||
DjangoTestSuiteRunner.__init__(self)
|
||||
self.on_run_suite = on_run_suite
|
||||
|
||||
def build_suite(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def suite_result(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def run_suite(self, *args, **kwargs):
|
||||
self.on_run_suite()
|
||||
|
||||
DJANGO_TEST_SUITE_RUNNER = MyDjangoTestSuiteRunner
|
||||
return DJANGO_TEST_SUITE_RUNNER
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# main
|
||||
# =======================================================================================================================
|
||||
def main(configuration):
|
||||
PydevTestRunner(configuration).run_tests()
|
||||
@@ -0,0 +1,77 @@
|
||||
import os.path
|
||||
import sys
|
||||
from _pydevd_bundle.pydevd_constants import Null
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# get_coverage_files
|
||||
# =======================================================================================================================
|
||||
def get_coverage_files(coverage_output_dir, number_of_files):
|
||||
base_dir = coverage_output_dir
|
||||
ret = []
|
||||
i = 0
|
||||
while len(ret) < number_of_files:
|
||||
while True:
|
||||
f = os.path.join(base_dir, ".coverage.%s" % i)
|
||||
i += 1
|
||||
if not os.path.exists(f):
|
||||
ret.append(f)
|
||||
break # Break only inner for.
|
||||
return ret
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# start_coverage_support
|
||||
# =======================================================================================================================
|
||||
def start_coverage_support(configuration):
|
||||
return start_coverage_support_from_params(
|
||||
configuration.coverage_output_dir,
|
||||
configuration.coverage_output_file,
|
||||
configuration.jobs,
|
||||
configuration.coverage_include,
|
||||
)
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# start_coverage_support_from_params
|
||||
# =======================================================================================================================
|
||||
def start_coverage_support_from_params(coverage_output_dir, coverage_output_file, jobs, coverage_include):
|
||||
coverage_files = []
|
||||
coverage_instance = Null()
|
||||
if coverage_output_dir or coverage_output_file:
|
||||
try:
|
||||
import coverage # @UnresolvedImport
|
||||
except:
|
||||
sys.stderr.write("Error: coverage module could not be imported\n")
|
||||
sys.stderr.write("Please make sure that the coverage module (http://nedbatchelder.com/code/coverage/)\n")
|
||||
sys.stderr.write("is properly installed in your interpreter: %s\n" % (sys.executable,))
|
||||
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
else:
|
||||
if coverage_output_dir:
|
||||
if not os.path.exists(coverage_output_dir):
|
||||
sys.stderr.write("Error: directory for coverage output (%s) does not exist.\n" % (coverage_output_dir,))
|
||||
|
||||
elif not os.path.isdir(coverage_output_dir):
|
||||
sys.stderr.write("Error: expected (%s) to be a directory.\n" % (coverage_output_dir,))
|
||||
|
||||
else:
|
||||
n = jobs
|
||||
if n <= 0:
|
||||
n += 1
|
||||
n += 1 # Add 1 more for the current process (which will do the initial import).
|
||||
coverage_files = get_coverage_files(coverage_output_dir, n)
|
||||
os.environ["COVERAGE_FILE"] = coverage_files.pop(0)
|
||||
|
||||
coverage_instance = coverage.coverage(source=[coverage_include])
|
||||
coverage_instance.start()
|
||||
|
||||
elif coverage_output_file:
|
||||
# Client of parallel run.
|
||||
os.environ["COVERAGE_FILE"] = coverage_output_file
|
||||
coverage_instance = coverage.coverage(source=[coverage_include])
|
||||
coverage_instance.start()
|
||||
|
||||
return coverage_files, coverage_instance
|
||||
@@ -0,0 +1,206 @@
|
||||
from nose.plugins.multiprocess import MultiProcessTestRunner # @UnresolvedImport
|
||||
from nose.plugins.base import Plugin # @UnresolvedImport
|
||||
import sys
|
||||
from _pydev_runfiles import pydev_runfiles_xml_rpc
|
||||
import time
|
||||
from _pydev_runfiles.pydev_runfiles_coverage import start_coverage_support
|
||||
from contextlib import contextmanager
|
||||
from io import StringIO
|
||||
import traceback
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# PydevPlugin
|
||||
# =======================================================================================================================
|
||||
class PydevPlugin(Plugin):
|
||||
def __init__(self, configuration):
|
||||
self.configuration = configuration
|
||||
Plugin.__init__(self)
|
||||
|
||||
def begin(self):
|
||||
# Called before any test is run (it's always called, with multiprocess or not)
|
||||
self.start_time = time.time()
|
||||
self.coverage_files, self.coverage = start_coverage_support(self.configuration)
|
||||
|
||||
def finalize(self, result):
|
||||
# Called after all tests are run (it's always called, with multiprocess or not)
|
||||
self.coverage.stop()
|
||||
self.coverage.save()
|
||||
|
||||
pydev_runfiles_xml_rpc.notifyTestRunFinished("Finished in: %.2f secs." % (time.time() - self.start_time,))
|
||||
|
||||
# ===================================================================================================================
|
||||
# Methods below are not called with multiprocess (so, we monkey-patch MultiProcessTestRunner.consolidate
|
||||
# so that they're called, but unfortunately we loose some info -- i.e.: the time for each test in this
|
||||
# process).
|
||||
# ===================================================================================================================
|
||||
|
||||
class Sentinel(object):
|
||||
pass
|
||||
|
||||
@contextmanager
|
||||
def _without_user_address(self, test):
|
||||
# #PyDev-1095: Conflict between address in test and test.address() in PydevPlugin().report_cond()
|
||||
user_test_instance = test.test
|
||||
user_address = self.Sentinel
|
||||
user_class_address = self.Sentinel
|
||||
try:
|
||||
if "address" in user_test_instance.__dict__:
|
||||
user_address = user_test_instance.__dict__.pop("address")
|
||||
except:
|
||||
# Just ignore anything here.
|
||||
pass
|
||||
try:
|
||||
user_class_address = user_test_instance.__class__.address
|
||||
del user_test_instance.__class__.address
|
||||
except:
|
||||
# Just ignore anything here.
|
||||
pass
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if user_address is not self.Sentinel:
|
||||
user_test_instance.__dict__["address"] = user_address
|
||||
|
||||
if user_class_address is not self.Sentinel:
|
||||
user_test_instance.__class__.address = user_class_address
|
||||
|
||||
def _get_test_address(self, test):
|
||||
try:
|
||||
if hasattr(test, "address"):
|
||||
with self._without_user_address(test):
|
||||
address = test.address()
|
||||
|
||||
# test.address() is something as:
|
||||
# ('D:\\workspaces\\temp\\test_workspace\\pytesting1\\src\\mod1\\hello.py', 'mod1.hello', 'TestCase.testMet1')
|
||||
#
|
||||
# and we must pass: location, test
|
||||
# E.g.: ['D:\\src\\mod1\\hello.py', 'TestCase.testMet1']
|
||||
address = address[0], address[2]
|
||||
else:
|
||||
# multiprocess
|
||||
try:
|
||||
address = test[0], test[1]
|
||||
except TypeError:
|
||||
# It may be an error at setup, in which case it's not really a test, but a Context object.
|
||||
f = test.context.__file__
|
||||
if f.endswith(".pyc"):
|
||||
f = f[:-1]
|
||||
elif f.endswith("$py.class"):
|
||||
f = f[: -len("$py.class")] + ".py"
|
||||
address = f, "?"
|
||||
except:
|
||||
sys.stderr.write("PyDev: Internal pydev error getting test address. Please report at the pydev bug tracker\n")
|
||||
traceback.print_exc()
|
||||
sys.stderr.write("\n\n\n")
|
||||
address = "?", "?"
|
||||
return address
|
||||
|
||||
def report_cond(self, cond, test, captured_output, error=""):
|
||||
"""
|
||||
@param cond: fail, error, ok
|
||||
"""
|
||||
|
||||
address = self._get_test_address(test)
|
||||
|
||||
error_contents = self.get_io_from_error(error)
|
||||
try:
|
||||
time_str = "%.2f" % (time.time() - test._pydev_start_time)
|
||||
except:
|
||||
time_str = "?"
|
||||
|
||||
pydev_runfiles_xml_rpc.notifyTest(cond, captured_output, error_contents, address[0], address[1], time_str)
|
||||
|
||||
def startTest(self, test):
|
||||
test._pydev_start_time = time.time()
|
||||
file, test = self._get_test_address(test)
|
||||
pydev_runfiles_xml_rpc.notifyStartTest(file, test)
|
||||
|
||||
def get_io_from_error(self, err):
|
||||
if type(err) == type(()):
|
||||
if len(err) != 3:
|
||||
if len(err) == 2:
|
||||
return err[1] # multiprocess
|
||||
s = StringIO()
|
||||
etype, value, tb = err
|
||||
if isinstance(value, str):
|
||||
return value
|
||||
traceback.print_exception(etype, value, tb, file=s)
|
||||
return s.getvalue()
|
||||
return err
|
||||
|
||||
def get_captured_output(self, test):
|
||||
if hasattr(test, "capturedOutput") and test.capturedOutput:
|
||||
return test.capturedOutput
|
||||
return ""
|
||||
|
||||
def addError(self, test, err):
|
||||
self.report_cond(
|
||||
"error",
|
||||
test,
|
||||
self.get_captured_output(test),
|
||||
err,
|
||||
)
|
||||
|
||||
def addFailure(self, test, err):
|
||||
self.report_cond(
|
||||
"fail",
|
||||
test,
|
||||
self.get_captured_output(test),
|
||||
err,
|
||||
)
|
||||
|
||||
def addSuccess(self, test):
|
||||
self.report_cond(
|
||||
"ok",
|
||||
test,
|
||||
self.get_captured_output(test),
|
||||
"",
|
||||
)
|
||||
|
||||
|
||||
PYDEV_NOSE_PLUGIN_SINGLETON = None
|
||||
|
||||
|
||||
def start_pydev_nose_plugin_singleton(configuration):
|
||||
global PYDEV_NOSE_PLUGIN_SINGLETON
|
||||
PYDEV_NOSE_PLUGIN_SINGLETON = PydevPlugin(configuration)
|
||||
return PYDEV_NOSE_PLUGIN_SINGLETON
|
||||
|
||||
|
||||
original = MultiProcessTestRunner.consolidate
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# new_consolidate
|
||||
# =======================================================================================================================
|
||||
def new_consolidate(self, result, batch_result):
|
||||
"""
|
||||
Used so that it can work with the multiprocess plugin.
|
||||
Monkeypatched because nose seems a bit unsupported at this time (ideally
|
||||
the plugin would have this support by default).
|
||||
"""
|
||||
ret = original(self, result, batch_result)
|
||||
|
||||
parent_frame = sys._getframe().f_back
|
||||
# addr is something as D:\pytesting1\src\mod1\hello.py:TestCase.testMet4
|
||||
# so, convert it to what report_cond expects
|
||||
addr = parent_frame.f_locals["addr"]
|
||||
i = addr.rindex(":")
|
||||
addr = [addr[:i], addr[i + 1 :]]
|
||||
|
||||
output, testsRun, failures, errors, errorClasses = batch_result
|
||||
if failures or errors:
|
||||
for failure in failures:
|
||||
PYDEV_NOSE_PLUGIN_SINGLETON.report_cond("fail", addr, output, failure)
|
||||
|
||||
for error in errors:
|
||||
PYDEV_NOSE_PLUGIN_SINGLETON.report_cond("error", addr, output, error)
|
||||
else:
|
||||
PYDEV_NOSE_PLUGIN_SINGLETON.report_cond("ok", addr, output)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
MultiProcessTestRunner.consolidate = new_consolidate
|
||||
@@ -0,0 +1,266 @@
|
||||
import unittest
|
||||
from _pydev_bundle._pydev_saved_modules import thread
|
||||
import queue as Queue
|
||||
from _pydev_runfiles import pydev_runfiles_xml_rpc
|
||||
import time
|
||||
import os
|
||||
import threading
|
||||
import sys
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# flatten_test_suite
|
||||
# =======================================================================================================================
|
||||
def flatten_test_suite(test_suite, ret):
|
||||
if isinstance(test_suite, unittest.TestSuite):
|
||||
for t in test_suite._tests:
|
||||
flatten_test_suite(t, ret)
|
||||
|
||||
elif isinstance(test_suite, unittest.TestCase):
|
||||
ret.append(test_suite)
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# execute_tests_in_parallel
|
||||
# =======================================================================================================================
|
||||
def execute_tests_in_parallel(tests, jobs, split, verbosity, coverage_files, coverage_include):
|
||||
"""
|
||||
@param tests: list(PydevTestSuite)
|
||||
A list with the suites to be run
|
||||
|
||||
@param split: str
|
||||
Either 'module' or the number of tests that should be run in each batch
|
||||
|
||||
@param coverage_files: list(file)
|
||||
A list with the files that should be used for giving coverage information (if empty, coverage information
|
||||
should not be gathered).
|
||||
|
||||
@param coverage_include: str
|
||||
The pattern that should be included in the coverage.
|
||||
|
||||
@return: bool
|
||||
Returns True if the tests were actually executed in parallel. If the tests were not executed because only 1
|
||||
should be used (e.g.: 2 jobs were requested for running 1 test), False will be returned and no tests will be
|
||||
run.
|
||||
|
||||
It may also return False if in debug mode (in which case, multi-processes are not accepted)
|
||||
"""
|
||||
try:
|
||||
from _pydevd_bundle.pydevd_comm import get_global_debugger
|
||||
|
||||
if get_global_debugger() is not None:
|
||||
return False
|
||||
except:
|
||||
pass # Ignore any error here.
|
||||
|
||||
# This queue will receive the tests to be run. Each entry in a queue is a list with the tests to be run together When
|
||||
# split == 'tests', each list will have a single element, when split == 'module', each list will have all the tests
|
||||
# from a given module.
|
||||
tests_queue = []
|
||||
|
||||
queue_elements = []
|
||||
if split == "module":
|
||||
module_to_tests = {}
|
||||
for test in tests:
|
||||
lst = []
|
||||
flatten_test_suite(test, lst)
|
||||
for test in lst:
|
||||
key = (test.__pydev_pyfile__, test.__pydev_module_name__)
|
||||
module_to_tests.setdefault(key, []).append(test)
|
||||
|
||||
for key, tests in module_to_tests.items():
|
||||
queue_elements.append(tests)
|
||||
|
||||
if len(queue_elements) < jobs:
|
||||
# Don't create jobs we will never use.
|
||||
jobs = len(queue_elements)
|
||||
|
||||
elif split == "tests":
|
||||
for test in tests:
|
||||
lst = []
|
||||
flatten_test_suite(test, lst)
|
||||
for test in lst:
|
||||
queue_elements.append([test])
|
||||
|
||||
if len(queue_elements) < jobs:
|
||||
# Don't create jobs we will never use.
|
||||
jobs = len(queue_elements)
|
||||
|
||||
else:
|
||||
raise AssertionError("Do not know how to handle: %s" % (split,))
|
||||
|
||||
for test_cases in queue_elements:
|
||||
test_queue_elements = []
|
||||
for test_case in test_cases:
|
||||
try:
|
||||
test_name = test_case.__class__.__name__ + "." + test_case._testMethodName
|
||||
except AttributeError:
|
||||
# Support for jython 2.1 (__testMethodName is pseudo-private in the test case)
|
||||
test_name = test_case.__class__.__name__ + "." + test_case._TestCase__testMethodName
|
||||
|
||||
test_queue_elements.append(test_case.__pydev_pyfile__ + "|" + test_name)
|
||||
|
||||
tests_queue.append(test_queue_elements)
|
||||
|
||||
if jobs < 2:
|
||||
return False
|
||||
|
||||
sys.stdout.write("Running tests in parallel with: %s jobs.\n" % (jobs,))
|
||||
|
||||
queue = Queue.Queue()
|
||||
for item in tests_queue:
|
||||
queue.put(item, block=False)
|
||||
|
||||
providers = []
|
||||
clients = []
|
||||
for i in range(jobs):
|
||||
test_cases_provider = CommunicationThread(queue)
|
||||
providers.append(test_cases_provider)
|
||||
|
||||
test_cases_provider.start()
|
||||
port = test_cases_provider.port
|
||||
|
||||
if coverage_files:
|
||||
clients.append(ClientThread(i, port, verbosity, coverage_files.pop(0), coverage_include))
|
||||
else:
|
||||
clients.append(ClientThread(i, port, verbosity))
|
||||
|
||||
for client in clients:
|
||||
client.start()
|
||||
|
||||
client_alive = True
|
||||
while client_alive:
|
||||
client_alive = False
|
||||
for client in clients:
|
||||
# Wait for all the clients to exit.
|
||||
if not client.finished:
|
||||
client_alive = True
|
||||
time.sleep(0.2)
|
||||
break
|
||||
|
||||
for provider in providers:
|
||||
provider.shutdown()
|
||||
|
||||
return True
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# CommunicationThread
|
||||
# =======================================================================================================================
|
||||
class CommunicationThread(threading.Thread):
|
||||
def __init__(self, tests_queue):
|
||||
threading.Thread.__init__(self)
|
||||
self.daemon = True
|
||||
self.queue = tests_queue
|
||||
self.finished = False
|
||||
from _pydev_bundle.pydev_imports import SimpleXMLRPCServer
|
||||
from _pydev_bundle import pydev_localhost
|
||||
|
||||
# Create server
|
||||
server = SimpleXMLRPCServer((pydev_localhost.get_localhost(), 0), logRequests=False)
|
||||
server.register_function(self.GetTestsToRun)
|
||||
server.register_function(self.notifyStartTest)
|
||||
server.register_function(self.notifyTest)
|
||||
server.register_function(self.notifyCommands)
|
||||
self.port = server.socket.getsockname()[1]
|
||||
self.server = server
|
||||
|
||||
def GetTestsToRun(self, job_id):
|
||||
"""
|
||||
@param job_id:
|
||||
|
||||
@return: list(str)
|
||||
Each entry is a string in the format: filename|Test.testName
|
||||
"""
|
||||
try:
|
||||
ret = self.queue.get(block=False)
|
||||
return ret
|
||||
except: # Any exception getting from the queue (empty or not) means we finished our work on providing the tests.
|
||||
self.finished = True
|
||||
return []
|
||||
|
||||
def notifyCommands(self, job_id, commands):
|
||||
# Batch notification.
|
||||
for command in commands:
|
||||
getattr(self, command[0])(job_id, *command[1], **command[2])
|
||||
|
||||
return True
|
||||
|
||||
def notifyStartTest(self, job_id, *args, **kwargs):
|
||||
pydev_runfiles_xml_rpc.notifyStartTest(*args, **kwargs)
|
||||
return True
|
||||
|
||||
def notifyTest(self, job_id, *args, **kwargs):
|
||||
pydev_runfiles_xml_rpc.notifyTest(*args, **kwargs)
|
||||
return True
|
||||
|
||||
def shutdown(self):
|
||||
if hasattr(self.server, "shutdown"):
|
||||
self.server.shutdown()
|
||||
else:
|
||||
self._shutdown = True
|
||||
|
||||
def run(self):
|
||||
if hasattr(self.server, "shutdown"):
|
||||
self.server.serve_forever()
|
||||
else:
|
||||
self._shutdown = False
|
||||
while not self._shutdown:
|
||||
self.server.handle_request()
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# Client
|
||||
# =======================================================================================================================
|
||||
class ClientThread(threading.Thread):
|
||||
def __init__(self, job_id, port, verbosity, coverage_output_file=None, coverage_include=None):
|
||||
threading.Thread.__init__(self)
|
||||
self.daemon = True
|
||||
self.port = port
|
||||
self.job_id = job_id
|
||||
self.verbosity = verbosity
|
||||
self.finished = False
|
||||
self.coverage_output_file = coverage_output_file
|
||||
self.coverage_include = coverage_include
|
||||
|
||||
def _reader_thread(self, pipe, target):
|
||||
while True:
|
||||
target.write(pipe.read(1))
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
from _pydev_runfiles import pydev_runfiles_parallel_client
|
||||
# TODO: Support Jython:
|
||||
#
|
||||
# For jython, instead of using sys.executable, we should use:
|
||||
# r'D:\bin\jdk_1_5_09\bin\java.exe',
|
||||
# '-classpath',
|
||||
# 'D:/bin/jython-2.2.1/jython.jar',
|
||||
# 'org.python.util.jython',
|
||||
|
||||
args = [
|
||||
sys.executable,
|
||||
pydev_runfiles_parallel_client.__file__,
|
||||
str(self.job_id),
|
||||
str(self.port),
|
||||
str(self.verbosity),
|
||||
]
|
||||
|
||||
if self.coverage_output_file and self.coverage_include:
|
||||
args.append(self.coverage_output_file)
|
||||
args.append(self.coverage_include)
|
||||
|
||||
import subprocess
|
||||
|
||||
if False:
|
||||
proc = subprocess.Popen(args, env=os.environ, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
|
||||
thread.start_new_thread(self._reader_thread, (proc.stdout, sys.stdout))
|
||||
|
||||
thread.start_new_thread(target=self._reader_thread, args=(proc.stderr, sys.stderr))
|
||||
else:
|
||||
proc = subprocess.Popen(args, env=os.environ, shell=False)
|
||||
proc.wait()
|
||||
|
||||
finally:
|
||||
self.finished = True
|
||||
+194
@@ -0,0 +1,194 @@
|
||||
from _pydev_bundle.pydev_imports import xmlrpclib, _queue
|
||||
|
||||
Queue = _queue.Queue
|
||||
import traceback
|
||||
import sys
|
||||
from _pydev_runfiles.pydev_runfiles_coverage import start_coverage_support_from_params
|
||||
import threading
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# ParallelNotification
|
||||
# =======================================================================================================================
|
||||
class ParallelNotification(object):
|
||||
def __init__(self, method, args, kwargs):
|
||||
self.method = method
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
|
||||
def to_tuple(self):
|
||||
return self.method, self.args, self.kwargs
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# KillServer
|
||||
# =======================================================================================================================
|
||||
class KillServer(object):
|
||||
pass
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# ServerComm
|
||||
# =======================================================================================================================
|
||||
class ServerComm(threading.Thread):
|
||||
def __init__(self, job_id, server):
|
||||
self.notifications_queue = Queue()
|
||||
threading.Thread.__init__(self)
|
||||
self.setDaemon(False) # Wait for all the notifications to be passed before exiting!
|
||||
assert job_id is not None
|
||||
assert port is not None
|
||||
self.job_id = job_id
|
||||
|
||||
self.finished = False
|
||||
self.server = server
|
||||
|
||||
def run(self):
|
||||
while True:
|
||||
kill_found = False
|
||||
commands = []
|
||||
command = self.notifications_queue.get(block=True)
|
||||
if isinstance(command, KillServer):
|
||||
kill_found = True
|
||||
else:
|
||||
assert isinstance(command, ParallelNotification)
|
||||
commands.append(command.to_tuple())
|
||||
|
||||
try:
|
||||
while True:
|
||||
command = self.notifications_queue.get(block=False) # No block to create a batch.
|
||||
if isinstance(command, KillServer):
|
||||
kill_found = True
|
||||
else:
|
||||
assert isinstance(command, ParallelNotification)
|
||||
commands.append(command.to_tuple())
|
||||
except:
|
||||
pass # That's OK, we're getting it until it becomes empty so that we notify multiple at once.
|
||||
|
||||
if commands:
|
||||
try:
|
||||
# Batch notification.
|
||||
self.server.lock.acquire()
|
||||
try:
|
||||
self.server.notifyCommands(self.job_id, commands)
|
||||
finally:
|
||||
self.server.lock.release()
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
if kill_found:
|
||||
self.finished = True
|
||||
return
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# ServerFacade
|
||||
# =======================================================================================================================
|
||||
class ServerFacade(object):
|
||||
def __init__(self, notifications_queue):
|
||||
self.notifications_queue = notifications_queue
|
||||
|
||||
def notifyTestsCollected(self, *args, **kwargs):
|
||||
pass # This notification won't be passed
|
||||
|
||||
def notifyTestRunFinished(self, *args, **kwargs):
|
||||
pass # This notification won't be passed
|
||||
|
||||
def notifyStartTest(self, *args, **kwargs):
|
||||
self.notifications_queue.put_nowait(ParallelNotification("notifyStartTest", args, kwargs))
|
||||
|
||||
def notifyTest(self, *args, **kwargs):
|
||||
self.notifications_queue.put_nowait(ParallelNotification("notifyTest", args, kwargs))
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# run_client
|
||||
# =======================================================================================================================
|
||||
def run_client(job_id, port, verbosity, coverage_output_file, coverage_include):
|
||||
job_id = int(job_id)
|
||||
|
||||
from _pydev_bundle import pydev_localhost
|
||||
|
||||
server = xmlrpclib.Server("http://%s:%s" % (pydev_localhost.get_localhost(), port))
|
||||
server.lock = threading.Lock()
|
||||
|
||||
server_comm = ServerComm(job_id, server)
|
||||
server_comm.start()
|
||||
|
||||
try:
|
||||
server_facade = ServerFacade(server_comm.notifications_queue)
|
||||
from _pydev_runfiles import pydev_runfiles
|
||||
from _pydev_runfiles import pydev_runfiles_xml_rpc
|
||||
|
||||
pydev_runfiles_xml_rpc.set_server(server_facade)
|
||||
|
||||
# Starts None and when the 1st test is gotten, it's started (because a server may be initiated and terminated
|
||||
# before receiving any test -- which would mean a different process got all the tests to run).
|
||||
coverage = None
|
||||
|
||||
try:
|
||||
tests_to_run = [1]
|
||||
while tests_to_run:
|
||||
# Investigate: is it dangerous to use the same xmlrpclib server from different threads?
|
||||
# It seems it should be, as it creates a new connection for each request...
|
||||
server.lock.acquire()
|
||||
try:
|
||||
tests_to_run = server.GetTestsToRun(job_id)
|
||||
finally:
|
||||
server.lock.release()
|
||||
|
||||
if not tests_to_run:
|
||||
break
|
||||
|
||||
if coverage is None:
|
||||
_coverage_files, coverage = start_coverage_support_from_params(None, coverage_output_file, 1, coverage_include)
|
||||
|
||||
files_to_tests = {}
|
||||
for test in tests_to_run:
|
||||
filename_and_test = test.split("|")
|
||||
if len(filename_and_test) == 2:
|
||||
files_to_tests.setdefault(filename_and_test[0], []).append(filename_and_test[1])
|
||||
|
||||
configuration = pydev_runfiles.Configuration(
|
||||
"",
|
||||
verbosity,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
files_to_tests,
|
||||
1, # Always single job here
|
||||
None,
|
||||
# The coverage is handled in this loop.
|
||||
coverage_output_file=None,
|
||||
coverage_include=None,
|
||||
)
|
||||
test_runner = pydev_runfiles.PydevTestRunner(configuration)
|
||||
sys.stdout.flush()
|
||||
test_runner.run_tests(handle_coverage=False)
|
||||
finally:
|
||||
if coverage is not None:
|
||||
coverage.stop()
|
||||
coverage.save()
|
||||
|
||||
except:
|
||||
traceback.print_exc()
|
||||
server_comm.notifications_queue.put_nowait(KillServer())
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# main
|
||||
# =======================================================================================================================
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) - 1 == 3:
|
||||
job_id, port, verbosity = sys.argv[1:]
|
||||
coverage_output_file, coverage_include = None, None
|
||||
|
||||
elif len(sys.argv) - 1 == 5:
|
||||
job_id, port, verbosity, coverage_output_file, coverage_include = sys.argv[1:]
|
||||
|
||||
else:
|
||||
raise AssertionError("Could not find out how to handle the parameters: " + sys.argv[1:])
|
||||
|
||||
job_id = int(job_id)
|
||||
port = int(port)
|
||||
verbosity = int(verbosity)
|
||||
run_client(job_id, port, verbosity, coverage_output_file, coverage_include)
|
||||
@@ -0,0 +1,308 @@
|
||||
import base64
|
||||
import os
|
||||
import pickle
|
||||
import sys
|
||||
import time
|
||||
import zlib
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from pydevd_file_utils import canonical_normalized_path
|
||||
|
||||
from _pydev_runfiles import pydev_runfiles_xml_rpc
|
||||
|
||||
# =========================================================================
|
||||
# Load filters with tests we should skip
|
||||
# =========================================================================
|
||||
py_test_accept_filter = None
|
||||
|
||||
|
||||
def _load_filters():
|
||||
global py_test_accept_filter
|
||||
if py_test_accept_filter is None:
|
||||
py_test_accept_filter = os.environ.get("PYDEV_PYTEST_SKIP")
|
||||
if py_test_accept_filter:
|
||||
py_test_accept_filter = pickle.loads(zlib.decompress(base64.b64decode(py_test_accept_filter)))
|
||||
|
||||
# Newer versions of pytest resolve symlinks, so, we
|
||||
# may need to filter with a resolved path too.
|
||||
new_dct = {}
|
||||
for filename, value in py_test_accept_filter.items():
|
||||
new_dct[canonical_normalized_path(str(Path(filename).resolve()))] = value
|
||||
|
||||
py_test_accept_filter.update(new_dct)
|
||||
|
||||
else:
|
||||
py_test_accept_filter = {}
|
||||
|
||||
|
||||
def is_in_xdist_node():
|
||||
main_pid = os.environ.get("PYDEV_MAIN_PID")
|
||||
if main_pid and main_pid != str(os.getpid()):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
connected = False
|
||||
|
||||
|
||||
def connect_to_server_for_communication_to_xml_rpc_on_xdist():
|
||||
global connected
|
||||
if connected:
|
||||
return
|
||||
connected = True
|
||||
if is_in_xdist_node():
|
||||
port = os.environ.get("PYDEV_PYTEST_SERVER")
|
||||
if port == "None":
|
||||
pass
|
||||
elif not port:
|
||||
sys.stderr.write("Error: no PYDEV_PYTEST_SERVER environment variable defined.\n")
|
||||
else:
|
||||
pydev_runfiles_xml_rpc.initialize_server(int(port), daemon=True)
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] <= 2
|
||||
PY3 = not PY2
|
||||
|
||||
|
||||
class State:
|
||||
start_time = time.time()
|
||||
buf_err = None
|
||||
buf_out = None
|
||||
|
||||
|
||||
def start_redirect():
|
||||
if State.buf_out is not None:
|
||||
return
|
||||
from _pydevd_bundle import pydevd_io
|
||||
|
||||
State.buf_err = pydevd_io.start_redirect(keep_original_redirection=True, std="stderr")
|
||||
State.buf_out = pydevd_io.start_redirect(keep_original_redirection=True, std="stdout")
|
||||
|
||||
|
||||
def get_curr_output():
|
||||
buf_out = State.buf_out
|
||||
buf_err = State.buf_err
|
||||
return buf_out.getvalue() if buf_out is not None else "", buf_err.getvalue() if buf_err is not None else ""
|
||||
|
||||
|
||||
def pytest_unconfigure():
|
||||
if is_in_xdist_node():
|
||||
return
|
||||
# Only report that it finished when on the main node (we don't want to report
|
||||
# the finish on each separate node).
|
||||
pydev_runfiles_xml_rpc.notifyTestRunFinished("Finished in: %.2f secs." % (time.time() - State.start_time,))
|
||||
|
||||
|
||||
def pytest_collection_modifyitems(session, config, items):
|
||||
# A note: in xdist, this is not called on the main process, only in the
|
||||
# secondary nodes, so, we'll actually make the filter and report it multiple
|
||||
# times.
|
||||
connect_to_server_for_communication_to_xml_rpc_on_xdist()
|
||||
|
||||
_load_filters()
|
||||
if not py_test_accept_filter:
|
||||
pydev_runfiles_xml_rpc.notifyTestsCollected(len(items))
|
||||
return # Keep on going (nothing to filter)
|
||||
|
||||
new_items = []
|
||||
for item in items:
|
||||
f = canonical_normalized_path(str(item.parent.fspath))
|
||||
name = item.name
|
||||
|
||||
if f not in py_test_accept_filter:
|
||||
# print('Skip file: %s' % (f,))
|
||||
continue # Skip the file
|
||||
|
||||
i = name.find("[")
|
||||
name_without_parametrize = None
|
||||
if i > 0:
|
||||
name_without_parametrize = name[:i]
|
||||
|
||||
accept_tests = py_test_accept_filter[f]
|
||||
|
||||
if item.cls is not None:
|
||||
class_name = item.cls.__name__
|
||||
else:
|
||||
class_name = None
|
||||
for test in accept_tests:
|
||||
if test == name:
|
||||
# Direct match of the test (just go on with the default
|
||||
# loading)
|
||||
new_items.append(item)
|
||||
break
|
||||
|
||||
if name_without_parametrize is not None and test == name_without_parametrize:
|
||||
# This happens when parameterizing pytest tests on older versions
|
||||
# of pytest where the test name doesn't include the fixture name
|
||||
# in it.
|
||||
new_items.append(item)
|
||||
break
|
||||
|
||||
if class_name is not None:
|
||||
if test == class_name + "." + name:
|
||||
new_items.append(item)
|
||||
break
|
||||
|
||||
if name_without_parametrize is not None and test == class_name + "." + name_without_parametrize:
|
||||
new_items.append(item)
|
||||
break
|
||||
|
||||
if class_name == test:
|
||||
new_items.append(item)
|
||||
break
|
||||
else:
|
||||
pass
|
||||
# print('Skip test: %s.%s. Accept: %s' % (class_name, name, accept_tests))
|
||||
|
||||
# Modify the original list
|
||||
items[:] = new_items
|
||||
pydev_runfiles_xml_rpc.notifyTestsCollected(len(items))
|
||||
|
||||
|
||||
try:
|
||||
"""
|
||||
pytest > 5.4 uses own version of TerminalWriter based on py.io.TerminalWriter
|
||||
and assumes there is a specific method TerminalWriter._write_source
|
||||
so try load pytest version first or fallback to default one
|
||||
"""
|
||||
from _pytest._io import TerminalWriter
|
||||
except ImportError:
|
||||
from py.io import TerminalWriter
|
||||
|
||||
|
||||
def _get_error_contents_from_report(report):
|
||||
if report.longrepr is not None:
|
||||
try:
|
||||
tw = TerminalWriter(stringio=True)
|
||||
stringio = tw.stringio
|
||||
except TypeError:
|
||||
import io
|
||||
|
||||
stringio = io.StringIO()
|
||||
tw = TerminalWriter(file=stringio)
|
||||
tw.hasmarkup = False
|
||||
report.toterminal(tw)
|
||||
exc = stringio.getvalue()
|
||||
s = exc.strip()
|
||||
if s:
|
||||
return s
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
def pytest_collectreport(report):
|
||||
error_contents = _get_error_contents_from_report(report)
|
||||
if error_contents:
|
||||
report_test("fail", "<collect errors>", "<collect errors>", "", error_contents, 0.0)
|
||||
|
||||
|
||||
def append_strings(s1, s2):
|
||||
if s1.__class__ == s2.__class__:
|
||||
return s1 + s2
|
||||
|
||||
# Prefer str
|
||||
if isinstance(s1, bytes):
|
||||
s1 = s1.decode("utf-8", "replace")
|
||||
|
||||
if isinstance(s2, bytes):
|
||||
s2 = s2.decode("utf-8", "replace")
|
||||
|
||||
return s1 + s2
|
||||
|
||||
|
||||
def pytest_runtest_logreport(report):
|
||||
if is_in_xdist_node():
|
||||
# When running with xdist, we don't want the report to be called from the node, only
|
||||
# from the main process.
|
||||
return
|
||||
report_duration = report.duration
|
||||
report_when = report.when
|
||||
report_outcome = report.outcome
|
||||
|
||||
if hasattr(report, "wasxfail"):
|
||||
if report_outcome != "skipped":
|
||||
report_outcome = "passed"
|
||||
|
||||
if report_outcome == "passed":
|
||||
# passed on setup/teardown: no need to report if in setup or teardown
|
||||
# (only on the actual test if it passed).
|
||||
if report_when in ("setup", "teardown"):
|
||||
return
|
||||
|
||||
status = "ok"
|
||||
|
||||
elif report_outcome == "skipped":
|
||||
status = "skip"
|
||||
|
||||
else:
|
||||
# It has only passed, skipped and failed (no error), so, let's consider
|
||||
# error if not on call.
|
||||
if report_when in ("setup", "teardown"):
|
||||
status = "error"
|
||||
|
||||
else:
|
||||
# any error in the call (not in setup or teardown) is considered a
|
||||
# regular failure.
|
||||
status = "fail"
|
||||
|
||||
# This will work if pytest is not capturing it, if it is, nothing will
|
||||
# come from here...
|
||||
captured_output, error_contents = getattr(report, "pydev_captured_output", ""), getattr(report, "pydev_error_contents", "")
|
||||
for type_section, value in report.sections:
|
||||
if value:
|
||||
if type_section in ("err", "stderr", "Captured stderr call"):
|
||||
error_contents = append_strings(error_contents, value)
|
||||
else:
|
||||
captured_output = append_strings(error_contents, value)
|
||||
|
||||
filename = getattr(report, "pydev_fspath_strpath", "<unable to get>")
|
||||
test = report.location[2]
|
||||
|
||||
if report_outcome != "skipped":
|
||||
# On skipped, we'll have a traceback for the skip, which is not what we
|
||||
# want.
|
||||
exc = _get_error_contents_from_report(report)
|
||||
if exc:
|
||||
if error_contents:
|
||||
error_contents = append_strings(error_contents, "----------------------------- Exceptions -----------------------------\n")
|
||||
error_contents = append_strings(error_contents, exc)
|
||||
|
||||
report_test(status, filename, test, captured_output, error_contents, report_duration)
|
||||
|
||||
|
||||
def report_test(status, filename, test, captured_output, error_contents, duration):
|
||||
"""
|
||||
@param filename: 'D:\\src\\mod1\\hello.py'
|
||||
@param test: 'TestCase.testMet1'
|
||||
@param status: fail, error, ok
|
||||
"""
|
||||
time_str = "%.2f" % (duration,)
|
||||
pydev_runfiles_xml_rpc.notifyTest(status, captured_output, error_contents, filename, test, time_str)
|
||||
|
||||
|
||||
if not hasattr(pytest, "hookimpl"):
|
||||
raise AssertionError("Please upgrade pytest (the current version of pytest: %s is unsupported)" % (pytest.__version__,))
|
||||
|
||||
|
||||
@pytest.hookimpl(hookwrapper=True)
|
||||
def pytest_runtest_makereport(item, call):
|
||||
outcome = yield
|
||||
report = outcome.get_result()
|
||||
report.pydev_fspath_strpath = item.fspath.strpath
|
||||
report.pydev_captured_output, report.pydev_error_contents = get_curr_output()
|
||||
|
||||
|
||||
@pytest.mark.tryfirst
|
||||
def pytest_runtest_setup(item):
|
||||
"""
|
||||
Note: with xdist will be on a secondary process.
|
||||
"""
|
||||
# We have our own redirection: if xdist does its redirection, we'll have
|
||||
# nothing in our contents (which is OK), but if it does, we'll get nothing
|
||||
# from pytest but will get our own here.
|
||||
start_redirect()
|
||||
filename = item.fspath.strpath
|
||||
test = item.location[2]
|
||||
|
||||
pydev_runfiles_xml_rpc.notifyStartTest(filename, test)
|
||||
@@ -0,0 +1,141 @@
|
||||
import unittest as python_unittest
|
||||
from _pydev_runfiles import pydev_runfiles_xml_rpc
|
||||
import time
|
||||
from _pydevd_bundle import pydevd_io
|
||||
import traceback
|
||||
from _pydevd_bundle.pydevd_constants import * # @UnusedWildImport
|
||||
from io import StringIO
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# PydevTextTestRunner
|
||||
# =======================================================================================================================
|
||||
class PydevTextTestRunner(python_unittest.TextTestRunner):
|
||||
def _makeResult(self):
|
||||
return PydevTestResult(self.stream, self.descriptions, self.verbosity)
|
||||
|
||||
|
||||
_PythonTextTestResult = python_unittest.TextTestRunner()._makeResult().__class__
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# PydevTestResult
|
||||
# =======================================================================================================================
|
||||
class PydevTestResult(_PythonTextTestResult):
|
||||
def addSubTest(self, test, subtest, err):
|
||||
"""Called at the end of a subtest.
|
||||
'err' is None if the subtest ended successfully, otherwise it's a
|
||||
tuple of values as returned by sys.exc_info().
|
||||
"""
|
||||
_PythonTextTestResult.addSubTest(self, test, subtest, err)
|
||||
if err is not None:
|
||||
subdesc = subtest._subDescription()
|
||||
error = (test, self._exc_info_to_string(err, test))
|
||||
self._reportErrors([error], [], "", "%s %s" % (self.get_test_name(test), subdesc))
|
||||
|
||||
def startTest(self, test):
|
||||
_PythonTextTestResult.startTest(self, test)
|
||||
self.buf = pydevd_io.start_redirect(keep_original_redirection=True, std="both")
|
||||
self.start_time = time.time()
|
||||
self._current_errors_stack = []
|
||||
self._current_failures_stack = []
|
||||
|
||||
try:
|
||||
test_name = test.__class__.__name__ + "." + test._testMethodName
|
||||
except AttributeError:
|
||||
# Support for jython 2.1 (__testMethodName is pseudo-private in the test case)
|
||||
test_name = test.__class__.__name__ + "." + test._TestCase__testMethodName
|
||||
|
||||
pydev_runfiles_xml_rpc.notifyStartTest(test.__pydev_pyfile__, test_name)
|
||||
|
||||
def get_test_name(self, test):
|
||||
try:
|
||||
try:
|
||||
test_name = test.__class__.__name__ + "." + test._testMethodName
|
||||
except AttributeError:
|
||||
# Support for jython 2.1 (__testMethodName is pseudo-private in the test case)
|
||||
try:
|
||||
test_name = test.__class__.__name__ + "." + test._TestCase__testMethodName
|
||||
# Support for class/module exceptions (test is instance of _ErrorHolder)
|
||||
except:
|
||||
test_name = test.description.split()[1][1:-1] + " <" + test.description.split()[0] + ">"
|
||||
except:
|
||||
traceback.print_exc()
|
||||
return "<unable to get test name>"
|
||||
return test_name
|
||||
|
||||
def stopTest(self, test):
|
||||
end_time = time.time()
|
||||
pydevd_io.end_redirect(std="both")
|
||||
|
||||
_PythonTextTestResult.stopTest(self, test)
|
||||
|
||||
captured_output = self.buf.getvalue()
|
||||
del self.buf
|
||||
error_contents = ""
|
||||
test_name = self.get_test_name(test)
|
||||
|
||||
diff_time = "%.2f" % (end_time - self.start_time)
|
||||
|
||||
skipped = False
|
||||
outcome = getattr(test, "_outcome", None)
|
||||
if outcome is not None:
|
||||
skipped = bool(getattr(outcome, "skipped", None))
|
||||
|
||||
if skipped:
|
||||
pydev_runfiles_xml_rpc.notifyTest("skip", captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
|
||||
elif not self._current_errors_stack and not self._current_failures_stack:
|
||||
pydev_runfiles_xml_rpc.notifyTest("ok", captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
|
||||
else:
|
||||
self._reportErrors(self._current_errors_stack, self._current_failures_stack, captured_output, test_name)
|
||||
|
||||
def _reportErrors(self, errors, failures, captured_output, test_name, diff_time=""):
|
||||
error_contents = []
|
||||
for test, s in errors + failures:
|
||||
if type(s) == type((1,)): # If it's a tuple (for jython 2.1)
|
||||
sio = StringIO()
|
||||
traceback.print_exception(s[0], s[1], s[2], file=sio)
|
||||
s = sio.getvalue()
|
||||
error_contents.append(s)
|
||||
|
||||
sep = "\n" + self.separator1
|
||||
error_contents = sep.join(error_contents)
|
||||
|
||||
if errors and not failures:
|
||||
try:
|
||||
pydev_runfiles_xml_rpc.notifyTest("error", captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
|
||||
except:
|
||||
file_start = error_contents.find('File "')
|
||||
file_end = error_contents.find('", ', file_start)
|
||||
if file_start != -1 and file_end != -1:
|
||||
file = error_contents[file_start + 6 : file_end]
|
||||
else:
|
||||
file = "<unable to get file>"
|
||||
pydev_runfiles_xml_rpc.notifyTest("error", captured_output, error_contents, file, test_name, diff_time)
|
||||
|
||||
elif failures and not errors:
|
||||
pydev_runfiles_xml_rpc.notifyTest("fail", captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
|
||||
|
||||
else: # Ok, we got both, errors and failures. Let's mark it as an error in the end.
|
||||
pydev_runfiles_xml_rpc.notifyTest("error", captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
|
||||
|
||||
def addError(self, test, err):
|
||||
_PythonTextTestResult.addError(self, test, err)
|
||||
# Support for class/module exceptions (test is instance of _ErrorHolder)
|
||||
if not hasattr(self, "_current_errors_stack") or test.__class__.__name__ == "_ErrorHolder":
|
||||
# Not in start...end, so, report error now (i.e.: django pre/post-setup)
|
||||
self._reportErrors([self.errors[-1]], [], "", self.get_test_name(test))
|
||||
else:
|
||||
self._current_errors_stack.append(self.errors[-1])
|
||||
|
||||
def addFailure(self, test, err):
|
||||
_PythonTextTestResult.addFailure(self, test, err)
|
||||
if not hasattr(self, "_current_failures_stack"):
|
||||
# Not in start...end, so, report error now (i.e.: django pre/post-setup)
|
||||
self._reportErrors([], [self.failures[-1]], "", self.get_test_name(test))
|
||||
else:
|
||||
self._current_failures_stack.append(self.failures[-1])
|
||||
|
||||
|
||||
class PydevTestSuite(python_unittest.TestSuite):
|
||||
pass
|
||||
@@ -0,0 +1,260 @@
|
||||
import sys
|
||||
import threading
|
||||
import traceback
|
||||
import warnings
|
||||
|
||||
from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding
|
||||
from _pydev_bundle.pydev_imports import _queue, xmlrpclib
|
||||
from _pydevd_bundle.pydevd_constants import Null
|
||||
|
||||
Queue = _queue.Queue
|
||||
|
||||
# This may happen in IronPython (in Python it shouldn't happen as there are
|
||||
# 'fast' replacements that are used in xmlrpclib.py)
|
||||
warnings.filterwarnings("ignore", "The xmllib module is obsolete.*", DeprecationWarning)
|
||||
|
||||
file_system_encoding = getfilesystemencoding()
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# _ServerHolder
|
||||
# =======================================================================================================================
|
||||
class _ServerHolder:
|
||||
"""
|
||||
Helper so that we don't have to use a global here.
|
||||
"""
|
||||
|
||||
SERVER = None
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# set_server
|
||||
# =======================================================================================================================
|
||||
def set_server(server):
|
||||
_ServerHolder.SERVER = server
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# ParallelNotification
|
||||
# =======================================================================================================================
|
||||
class ParallelNotification(object):
|
||||
def __init__(self, method, args):
|
||||
self.method = method
|
||||
self.args = args
|
||||
|
||||
def to_tuple(self):
|
||||
return self.method, self.args
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# KillServer
|
||||
# =======================================================================================================================
|
||||
class KillServer(object):
|
||||
pass
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# ServerFacade
|
||||
# =======================================================================================================================
|
||||
class ServerFacade(object):
|
||||
def __init__(self, notifications_queue):
|
||||
self.notifications_queue = notifications_queue
|
||||
|
||||
def notifyTestsCollected(self, *args):
|
||||
self.notifications_queue.put_nowait(ParallelNotification("notifyTestsCollected", args))
|
||||
|
||||
def notifyConnected(self, *args):
|
||||
self.notifications_queue.put_nowait(ParallelNotification("notifyConnected", args))
|
||||
|
||||
def notifyTestRunFinished(self, *args):
|
||||
self.notifications_queue.put_nowait(ParallelNotification("notifyTestRunFinished", args))
|
||||
|
||||
def notifyStartTest(self, *args):
|
||||
self.notifications_queue.put_nowait(ParallelNotification("notifyStartTest", args))
|
||||
|
||||
def notifyTest(self, *args):
|
||||
new_args = []
|
||||
for arg in args:
|
||||
new_args.append(_encode_if_needed(arg))
|
||||
args = tuple(new_args)
|
||||
self.notifications_queue.put_nowait(ParallelNotification("notifyTest", args))
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# ServerComm
|
||||
# =======================================================================================================================
|
||||
class ServerComm(threading.Thread):
|
||||
def __init__(self, notifications_queue, port, daemon=False):
|
||||
# If daemon is False, wait for all the notifications to be passed before exiting!
|
||||
threading.Thread.__init__(self, daemon=daemon)
|
||||
self.finished = False
|
||||
self.notifications_queue = notifications_queue
|
||||
|
||||
from _pydev_bundle import pydev_localhost
|
||||
|
||||
# It is necessary to specify an encoding, that matches
|
||||
# the encoding of all bytes-strings passed into an
|
||||
# XMLRPC call: "All 8-bit strings in the data structure are assumed to use the
|
||||
# packet encoding. Unicode strings are automatically converted,
|
||||
# where necessary."
|
||||
# Byte strings most likely come from file names.
|
||||
encoding = file_system_encoding
|
||||
if encoding == "mbcs":
|
||||
# Windos symbolic name for the system encoding CP_ACP.
|
||||
# We need to convert it into a encoding that is recognized by Java.
|
||||
# Unfortunately this is not always possible. You could use
|
||||
# GetCPInfoEx and get a name similar to "windows-1251". Then
|
||||
# you need a table to translate on a best effort basis. Much to complicated.
|
||||
# ISO-8859-1 is good enough.
|
||||
encoding = "ISO-8859-1"
|
||||
|
||||
self.server = xmlrpclib.Server("http://%s:%s" % (pydev_localhost.get_localhost(), port), encoding=encoding)
|
||||
|
||||
def run(self):
|
||||
while True:
|
||||
kill_found = False
|
||||
commands = []
|
||||
command = self.notifications_queue.get(block=True)
|
||||
if isinstance(command, KillServer):
|
||||
kill_found = True
|
||||
else:
|
||||
assert isinstance(command, ParallelNotification)
|
||||
commands.append(command.to_tuple())
|
||||
|
||||
try:
|
||||
while True:
|
||||
command = self.notifications_queue.get(block=False) # No block to create a batch.
|
||||
if isinstance(command, KillServer):
|
||||
kill_found = True
|
||||
else:
|
||||
assert isinstance(command, ParallelNotification)
|
||||
commands.append(command.to_tuple())
|
||||
except:
|
||||
pass # That's OK, we're getting it until it becomes empty so that we notify multiple at once.
|
||||
|
||||
if commands:
|
||||
try:
|
||||
self.server.notifyCommands(commands)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
if kill_found:
|
||||
self.finished = True
|
||||
return
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# initialize_server
|
||||
# =======================================================================================================================
|
||||
def initialize_server(port, daemon=False):
|
||||
if _ServerHolder.SERVER is None:
|
||||
if port is not None:
|
||||
notifications_queue = Queue()
|
||||
_ServerHolder.SERVER = ServerFacade(notifications_queue)
|
||||
_ServerHolder.SERVER_COMM = ServerComm(notifications_queue, port, daemon)
|
||||
_ServerHolder.SERVER_COMM.start()
|
||||
else:
|
||||
# Create a null server, so that we keep the interface even without any connection.
|
||||
_ServerHolder.SERVER = Null()
|
||||
_ServerHolder.SERVER_COMM = Null()
|
||||
|
||||
try:
|
||||
if _ServerHolder.SERVER is not None:
|
||||
_ServerHolder.SERVER.notifyConnected()
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# notifyTest
|
||||
# =======================================================================================================================
|
||||
def notifyTestsCollected(tests_count):
|
||||
assert tests_count is not None
|
||||
try:
|
||||
if _ServerHolder.SERVER is not None:
|
||||
_ServerHolder.SERVER.notifyTestsCollected(tests_count)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# notifyStartTest
|
||||
# =======================================================================================================================
|
||||
def notifyStartTest(file, test):
|
||||
"""
|
||||
@param file: the tests file (c:/temp/test.py)
|
||||
@param test: the test ran (i.e.: TestCase.test1)
|
||||
"""
|
||||
assert file is not None
|
||||
if test is None:
|
||||
test = "" # Could happen if we have an import error importing module.
|
||||
|
||||
try:
|
||||
if _ServerHolder.SERVER is not None:
|
||||
_ServerHolder.SERVER.notifyStartTest(file, test)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def _encode_if_needed(obj):
|
||||
# In the java side we expect strings to be ISO-8859-1 (org.python.pydev.debug.pyunit.PyUnitServer.initializeDispatches().new Dispatch() {...}.getAsStr(Object))
|
||||
if isinstance(obj, str): # Unicode in py3
|
||||
return xmlrpclib.Binary(obj.encode("ISO-8859-1", "xmlcharrefreplace"))
|
||||
|
||||
elif isinstance(obj, bytes):
|
||||
try:
|
||||
return xmlrpclib.Binary(obj.decode(sys.stdin.encoding, "replace").encode("ISO-8859-1", "xmlcharrefreplace"))
|
||||
except:
|
||||
return xmlrpclib.Binary(obj) # bytes already
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# notifyTest
|
||||
# =======================================================================================================================
|
||||
def notifyTest(cond, captured_output, error_contents, file, test, time):
|
||||
"""
|
||||
@param cond: ok, fail, error
|
||||
@param captured_output: output captured from stdout
|
||||
@param captured_output: output captured from stderr
|
||||
@param file: the tests file (c:/temp/test.py)
|
||||
@param test: the test ran (i.e.: TestCase.test1)
|
||||
@param time: float with the number of seconds elapsed
|
||||
"""
|
||||
if _ServerHolder.SERVER is None:
|
||||
return
|
||||
|
||||
assert cond is not None
|
||||
assert captured_output is not None
|
||||
assert error_contents is not None
|
||||
assert file is not None
|
||||
if test is None:
|
||||
test = "" # Could happen if we have an import error importing module.
|
||||
assert time is not None
|
||||
try:
|
||||
captured_output = _encode_if_needed(captured_output)
|
||||
error_contents = _encode_if_needed(error_contents)
|
||||
|
||||
_ServerHolder.SERVER.notifyTest(cond, captured_output, error_contents, file, test, time)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# notifyTestRunFinished
|
||||
# =======================================================================================================================
|
||||
def notifyTestRunFinished(total_time):
|
||||
assert total_time is not None
|
||||
try:
|
||||
if _ServerHolder.SERVER is not None:
|
||||
_ServerHolder.SERVER.notifyTestRunFinished(total_time)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# force_server_kill
|
||||
# =======================================================================================================================
|
||||
def force_server_kill():
|
||||
_ServerHolder.SERVER_COMM.notifications_queue.put_nowait(KillServer())
|
||||
+606
@@ -0,0 +1,606 @@
|
||||
"""
|
||||
Run this module to regenerate the `pydevd_schema.py` file.
|
||||
|
||||
Note that it'll generate it based on the current debugProtocol.json. Erase it and rerun
|
||||
to download the latest version.
|
||||
"""
|
||||
|
||||
|
||||
def is_variable_to_translate(cls_name, var_name):
|
||||
if var_name in ("variablesReference", "frameId", "threadId"):
|
||||
return True
|
||||
|
||||
if cls_name == "StackFrame" and var_name == "id":
|
||||
# It's frameId everywhere except on StackFrame.
|
||||
return True
|
||||
|
||||
if cls_name == "Thread" and var_name == "id":
|
||||
# It's threadId everywhere except on Thread.
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _get_noqa_for_var(prop_name):
|
||||
return " # noqa (assign to builtin)" if prop_name in ("type", "format", "id", "hex", "breakpoint", "filter") else ""
|
||||
|
||||
|
||||
class _OrderedSet(object):
|
||||
# Not a good ordered set (just something to be small without adding any deps)
|
||||
|
||||
def __init__(self, initial_contents=None):
|
||||
self._contents = []
|
||||
self._contents_as_set = set()
|
||||
if initial_contents is not None:
|
||||
for x in initial_contents:
|
||||
self.add(x)
|
||||
|
||||
def add(self, x):
|
||||
if x not in self._contents_as_set:
|
||||
self._contents_as_set.add(x)
|
||||
self._contents.append(x)
|
||||
|
||||
def discard(self, x):
|
||||
if x in self._contents_as_set:
|
||||
self._contents_as_set.remove(x)
|
||||
self._contents.remove(x)
|
||||
|
||||
def copy(self):
|
||||
return _OrderedSet(self._contents)
|
||||
|
||||
def update(self, contents):
|
||||
for x in contents:
|
||||
self.add(x)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._contents)
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self._contents_as_set
|
||||
|
||||
def __len__(self):
|
||||
return len(self._contents)
|
||||
|
||||
def set_repr(self):
|
||||
if len(self) == 0:
|
||||
return "set()"
|
||||
|
||||
lst = [repr(x) for x in self]
|
||||
return "set([" + ", ".join(lst) + "])"
|
||||
|
||||
|
||||
class Ref(object):
|
||||
def __init__(self, ref, ref_data):
|
||||
self.ref = ref
|
||||
self.ref_data = ref_data
|
||||
|
||||
def __str__(self):
|
||||
return self.ref
|
||||
|
||||
|
||||
def load_schema_data():
|
||||
import os.path
|
||||
import json
|
||||
|
||||
json_file = os.path.join(os.path.dirname(__file__), "debugProtocol.json")
|
||||
if not os.path.exists(json_file):
|
||||
import requests
|
||||
|
||||
req = requests.get("https://raw.githubusercontent.com/microsoft/debug-adapter-protocol/gh-pages/debugAdapterProtocol.json")
|
||||
assert req.status_code == 200
|
||||
with open(json_file, "wb") as stream:
|
||||
stream.write(req.content)
|
||||
|
||||
with open(json_file, "rb") as json_contents:
|
||||
json_schema_data = json.loads(json_contents.read())
|
||||
return json_schema_data
|
||||
|
||||
|
||||
def load_custom_schema_data():
|
||||
import os.path
|
||||
import json
|
||||
|
||||
json_file = os.path.join(os.path.dirname(__file__), "debugProtocolCustom.json")
|
||||
|
||||
with open(json_file, "rb") as json_contents:
|
||||
json_schema_data = json.loads(json_contents.read())
|
||||
return json_schema_data
|
||||
|
||||
|
||||
def create_classes_to_generate_structure(json_schema_data):
|
||||
definitions = json_schema_data["definitions"]
|
||||
|
||||
class_to_generatees = {}
|
||||
|
||||
for name, definition in definitions.items():
|
||||
all_of = definition.get("allOf")
|
||||
description = definition.get("description")
|
||||
is_enum = definition.get("type") == "string" and "enum" in definition
|
||||
enum_values = None
|
||||
if is_enum:
|
||||
enum_values = definition["enum"]
|
||||
properties = {}
|
||||
properties.update(definition.get("properties", {}))
|
||||
required = _OrderedSet(definition.get("required", _OrderedSet()))
|
||||
base_definitions = []
|
||||
|
||||
if all_of is not None:
|
||||
for definition in all_of:
|
||||
ref = definition.get("$ref")
|
||||
if ref is not None:
|
||||
assert ref.startswith("#/definitions/")
|
||||
ref = ref[len("#/definitions/") :]
|
||||
base_definitions.append(ref)
|
||||
else:
|
||||
if not description:
|
||||
description = definition.get("description")
|
||||
properties.update(definition.get("properties", {}))
|
||||
required.update(_OrderedSet(definition.get("required", _OrderedSet())))
|
||||
|
||||
if isinstance(description, (list, tuple)):
|
||||
description = "\n".join(description)
|
||||
|
||||
if name == "ModulesRequest": # Hack to accept modules request without arguments (ptvsd: 2050).
|
||||
required.discard("arguments")
|
||||
class_to_generatees[name] = dict(
|
||||
name=name,
|
||||
properties=properties,
|
||||
base_definitions=base_definitions,
|
||||
description=description,
|
||||
required=required,
|
||||
is_enum=is_enum,
|
||||
enum_values=enum_values,
|
||||
)
|
||||
return class_to_generatees
|
||||
|
||||
|
||||
def collect_bases(curr_class, classes_to_generate, memo=None):
|
||||
ret = []
|
||||
if memo is None:
|
||||
memo = {}
|
||||
|
||||
base_definitions = curr_class["base_definitions"]
|
||||
for base_definition in base_definitions:
|
||||
if base_definition not in memo:
|
||||
ret.append(base_definition)
|
||||
ret.extend(collect_bases(classes_to_generate[base_definition], classes_to_generate, memo))
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def fill_properties_and_required_from_base(classes_to_generate):
|
||||
# Now, resolve properties based on refs
|
||||
for class_to_generate in classes_to_generate.values():
|
||||
dct = {}
|
||||
s = _OrderedSet()
|
||||
|
||||
for base_definition in reversed(collect_bases(class_to_generate, classes_to_generate)):
|
||||
# Note: go from base to current so that the initial order of the properties has that
|
||||
# same order.
|
||||
dct.update(classes_to_generate[base_definition].get("properties", {}))
|
||||
s.update(classes_to_generate[base_definition].get("required", _OrderedSet()))
|
||||
|
||||
dct.update(class_to_generate["properties"])
|
||||
class_to_generate["properties"] = dct
|
||||
|
||||
s.update(class_to_generate["required"])
|
||||
class_to_generate["required"] = s
|
||||
|
||||
return class_to_generate
|
||||
|
||||
|
||||
def update_class_to_generate_description(class_to_generate):
|
||||
import textwrap
|
||||
|
||||
description = class_to_generate["description"]
|
||||
lines = []
|
||||
for line in description.splitlines():
|
||||
wrapped = textwrap.wrap(line.strip(), 100)
|
||||
lines.extend(wrapped)
|
||||
lines.append("")
|
||||
|
||||
while lines and lines[-1] == "":
|
||||
lines = lines[:-1]
|
||||
|
||||
class_to_generate["description"] = " " + ("\n ".join(lines))
|
||||
|
||||
|
||||
def update_class_to_generate_type(classes_to_generate, class_to_generate):
|
||||
properties = class_to_generate.get("properties")
|
||||
for _prop_name, prop_val in properties.items():
|
||||
prop_type = prop_val.get("type", "")
|
||||
if not prop_type:
|
||||
prop_type = prop_val.pop("$ref", "")
|
||||
if prop_type:
|
||||
assert prop_type.startswith("#/definitions/")
|
||||
prop_type = prop_type[len("#/definitions/") :]
|
||||
prop_val["type"] = Ref(prop_type, classes_to_generate[prop_type])
|
||||
|
||||
|
||||
def update_class_to_generate_register_dec(classes_to_generate, class_to_generate):
|
||||
# Default
|
||||
class_to_generate["register_request"] = ""
|
||||
class_to_generate["register_dec"] = "@register"
|
||||
|
||||
properties = class_to_generate.get("properties")
|
||||
enum_type = properties.get("type", {}).get("enum")
|
||||
command = None
|
||||
event = None
|
||||
if enum_type and len(enum_type) == 1 and next(iter(enum_type)) in ("request", "response", "event"):
|
||||
msg_type = next(iter(enum_type))
|
||||
if msg_type == "response":
|
||||
# The actual command is typed in the request
|
||||
response_name = class_to_generate["name"]
|
||||
request_name = response_name[: -len("Response")] + "Request"
|
||||
if request_name in classes_to_generate:
|
||||
command = classes_to_generate[request_name]["properties"].get("command")
|
||||
else:
|
||||
if response_name == "ErrorResponse":
|
||||
command = {"enum": ["error"]}
|
||||
else:
|
||||
raise AssertionError("Unhandled: %s" % (response_name,))
|
||||
|
||||
elif msg_type == "request":
|
||||
command = properties.get("command")
|
||||
|
||||
elif msg_type == "event":
|
||||
command = properties.get("event")
|
||||
|
||||
else:
|
||||
raise AssertionError("Unexpected condition.")
|
||||
|
||||
if command:
|
||||
enum = command.get("enum")
|
||||
if enum and len(enum) == 1:
|
||||
class_to_generate["register_request"] = "@register_%s(%r)\n" % (msg_type, enum[0])
|
||||
|
||||
|
||||
def extract_prop_name_and_prop(class_to_generate):
|
||||
properties = class_to_generate.get("properties")
|
||||
required = _OrderedSet(class_to_generate.get("required", _OrderedSet()))
|
||||
|
||||
# Sort so that required come first
|
||||
prop_name_and_prop = list(properties.items())
|
||||
|
||||
def compute_sort_key(x):
|
||||
key = x[0]
|
||||
if key in required:
|
||||
if key == "seq":
|
||||
return 0.5 # seq when required is after the other required keys (to have a default of -1).
|
||||
return 0
|
||||
return 1
|
||||
|
||||
prop_name_and_prop.sort(key=compute_sort_key)
|
||||
|
||||
return prop_name_and_prop
|
||||
|
||||
|
||||
def update_class_to_generate_to_json(class_to_generate):
|
||||
required = _OrderedSet(class_to_generate.get("required", _OrderedSet()))
|
||||
prop_name_and_prop = extract_prop_name_and_prop(class_to_generate)
|
||||
|
||||
to_dict_body = ["def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be unused)"]
|
||||
|
||||
translate_prop_names = []
|
||||
for prop_name, prop in prop_name_and_prop:
|
||||
if is_variable_to_translate(class_to_generate["name"], prop_name):
|
||||
translate_prop_names.append(prop_name)
|
||||
|
||||
for prop_name, prop in prop_name_and_prop:
|
||||
namespace = dict(prop_name=prop_name, noqa=_get_noqa_for_var(prop_name))
|
||||
to_dict_body.append(" %(prop_name)s = self.%(prop_name)s%(noqa)s" % namespace)
|
||||
|
||||
if prop.get("type") == "array":
|
||||
to_dict_body.append(' if %(prop_name)s and hasattr(%(prop_name)s[0], "to_dict"):' % namespace)
|
||||
to_dict_body.append(" %(prop_name)s = [x.to_dict() for x in %(prop_name)s]" % namespace)
|
||||
|
||||
if translate_prop_names:
|
||||
to_dict_body.append(" if update_ids_to_dap:")
|
||||
for prop_name in translate_prop_names:
|
||||
namespace = dict(prop_name=prop_name, noqa=_get_noqa_for_var(prop_name))
|
||||
to_dict_body.append(" if %(prop_name)s is not None:" % namespace)
|
||||
to_dict_body.append(" %(prop_name)s = self._translate_id_to_dap(%(prop_name)s)%(noqa)s" % namespace)
|
||||
|
||||
if not translate_prop_names:
|
||||
update_dict_ids_from_dap_body = []
|
||||
else:
|
||||
update_dict_ids_from_dap_body = ["", "", "@classmethod", "def update_dict_ids_from_dap(cls, dct):"]
|
||||
for prop_name in translate_prop_names:
|
||||
namespace = dict(prop_name=prop_name)
|
||||
update_dict_ids_from_dap_body.append(" if %(prop_name)r in dct:" % namespace)
|
||||
update_dict_ids_from_dap_body.append(" dct[%(prop_name)r] = cls._translate_id_from_dap(dct[%(prop_name)r])" % namespace)
|
||||
update_dict_ids_from_dap_body.append(" return dct")
|
||||
|
||||
class_to_generate["update_dict_ids_from_dap"] = _indent_lines("\n".join(update_dict_ids_from_dap_body))
|
||||
|
||||
to_dict_body.append(" dct = {")
|
||||
first_not_required = False
|
||||
|
||||
for prop_name, prop in prop_name_and_prop:
|
||||
use_to_dict = prop["type"].__class__ == Ref and not prop["type"].ref_data.get("is_enum", False)
|
||||
is_array = prop["type"] == "array"
|
||||
ref_array_cls_name = ""
|
||||
if is_array:
|
||||
ref = prop["items"].get("$ref")
|
||||
if ref is not None:
|
||||
ref_array_cls_name = ref.split("/")[-1]
|
||||
|
||||
namespace = dict(prop_name=prop_name, ref_array_cls_name=ref_array_cls_name)
|
||||
if prop_name in required:
|
||||
if use_to_dict:
|
||||
to_dict_body.append(" %(prop_name)r: %(prop_name)s.to_dict(update_ids_to_dap=update_ids_to_dap)," % namespace)
|
||||
else:
|
||||
if ref_array_cls_name:
|
||||
to_dict_body.append(
|
||||
" %(prop_name)r: [%(ref_array_cls_name)s.update_dict_ids_to_dap(o) for o in %(prop_name)s] if (update_ids_to_dap and %(prop_name)s) else %(prop_name)s,"
|
||||
% namespace
|
||||
)
|
||||
else:
|
||||
to_dict_body.append(" %(prop_name)r: %(prop_name)s," % namespace)
|
||||
else:
|
||||
if not first_not_required:
|
||||
first_not_required = True
|
||||
to_dict_body.append(" }")
|
||||
|
||||
to_dict_body.append(" if %(prop_name)s is not None:" % namespace)
|
||||
if use_to_dict:
|
||||
to_dict_body.append(" dct[%(prop_name)r] = %(prop_name)s.to_dict(update_ids_to_dap=update_ids_to_dap)" % namespace)
|
||||
else:
|
||||
if ref_array_cls_name:
|
||||
to_dict_body.append(
|
||||
" dct[%(prop_name)r] = [%(ref_array_cls_name)s.update_dict_ids_to_dap(o) for o in %(prop_name)s] if (update_ids_to_dap and %(prop_name)s) else %(prop_name)s"
|
||||
% namespace
|
||||
)
|
||||
else:
|
||||
to_dict_body.append(" dct[%(prop_name)r] = %(prop_name)s" % namespace)
|
||||
|
||||
if not first_not_required:
|
||||
first_not_required = True
|
||||
to_dict_body.append(" }")
|
||||
|
||||
to_dict_body.append(" dct.update(self.kwargs)")
|
||||
to_dict_body.append(" return dct")
|
||||
|
||||
class_to_generate["to_dict"] = _indent_lines("\n".join(to_dict_body))
|
||||
|
||||
if not translate_prop_names:
|
||||
update_dict_ids_to_dap_body = []
|
||||
else:
|
||||
update_dict_ids_to_dap_body = ["", "", "@classmethod", "def update_dict_ids_to_dap(cls, dct):"]
|
||||
for prop_name in translate_prop_names:
|
||||
namespace = dict(prop_name=prop_name)
|
||||
update_dict_ids_to_dap_body.append(" if %(prop_name)r in dct:" % namespace)
|
||||
update_dict_ids_to_dap_body.append(" dct[%(prop_name)r] = cls._translate_id_to_dap(dct[%(prop_name)r])" % namespace)
|
||||
update_dict_ids_to_dap_body.append(" return dct")
|
||||
|
||||
class_to_generate["update_dict_ids_to_dap"] = _indent_lines("\n".join(update_dict_ids_to_dap_body))
|
||||
|
||||
|
||||
def update_class_to_generate_init(class_to_generate):
|
||||
args = []
|
||||
init_body = []
|
||||
docstring = []
|
||||
|
||||
required = _OrderedSet(class_to_generate.get("required", _OrderedSet()))
|
||||
prop_name_and_prop = extract_prop_name_and_prop(class_to_generate)
|
||||
|
||||
translate_prop_names = []
|
||||
for prop_name, prop in prop_name_and_prop:
|
||||
if is_variable_to_translate(class_to_generate["name"], prop_name):
|
||||
translate_prop_names.append(prop_name)
|
||||
|
||||
enum = prop.get("enum")
|
||||
if enum and len(enum) == 1:
|
||||
init_body.append(" self.%(prop_name)s = %(enum)r" % dict(prop_name=prop_name, enum=next(iter(enum))))
|
||||
else:
|
||||
if prop_name in required:
|
||||
if prop_name == "seq":
|
||||
args.append(prop_name + "=-1")
|
||||
else:
|
||||
args.append(prop_name)
|
||||
else:
|
||||
args.append(prop_name + "=None")
|
||||
|
||||
if prop["type"].__class__ == Ref:
|
||||
ref = prop["type"]
|
||||
ref_data = ref.ref_data
|
||||
if ref_data.get("is_enum", False):
|
||||
init_body.append(" if %s is not None:" % (prop_name,))
|
||||
init_body.append(" assert %s in %s.VALID_VALUES" % (prop_name, str(ref)))
|
||||
init_body.append(" self.%(prop_name)s = %(prop_name)s" % dict(prop_name=prop_name))
|
||||
else:
|
||||
namespace = dict(prop_name=prop_name, ref_name=str(ref))
|
||||
init_body.append(" if %(prop_name)s is None:" % namespace)
|
||||
init_body.append(" self.%(prop_name)s = %(ref_name)s()" % namespace)
|
||||
init_body.append(" else:")
|
||||
init_body.append(
|
||||
" self.%(prop_name)s = %(ref_name)s(update_ids_from_dap=update_ids_from_dap, **%(prop_name)s) if %(prop_name)s.__class__ != %(ref_name)s else %(prop_name)s"
|
||||
% namespace
|
||||
)
|
||||
|
||||
else:
|
||||
init_body.append(" self.%(prop_name)s = %(prop_name)s" % dict(prop_name=prop_name))
|
||||
|
||||
if prop["type"] == "array":
|
||||
ref = prop["items"].get("$ref")
|
||||
if ref is not None:
|
||||
ref_array_cls_name = ref.split("/")[-1]
|
||||
init_body.append(" if update_ids_from_dap and self.%(prop_name)s:" % dict(prop_name=prop_name))
|
||||
init_body.append(" for o in self.%(prop_name)s:" % dict(prop_name=prop_name))
|
||||
init_body.append(
|
||||
" %(ref_array_cls_name)s.update_dict_ids_from_dap(o)" % dict(ref_array_cls_name=ref_array_cls_name)
|
||||
)
|
||||
|
||||
prop_type = prop["type"]
|
||||
prop_description = prop.get("description", "")
|
||||
|
||||
if isinstance(prop_description, (list, tuple)):
|
||||
prop_description = "\n ".join(prop_description)
|
||||
|
||||
docstring.append(
|
||||
":param %(prop_type)s %(prop_name)s: %(prop_description)s"
|
||||
% dict(prop_type=prop_type, prop_name=prop_name, prop_description=prop_description)
|
||||
)
|
||||
|
||||
if translate_prop_names:
|
||||
init_body.append(" if update_ids_from_dap:")
|
||||
for prop_name in translate_prop_names:
|
||||
init_body.append(" self.%(prop_name)s = self._translate_id_from_dap(self.%(prop_name)s)" % dict(prop_name=prop_name))
|
||||
|
||||
docstring = _indent_lines("\n".join(docstring))
|
||||
init_body = "\n".join(init_body)
|
||||
|
||||
# Actually bundle the whole __init__ from the parts.
|
||||
args = ", ".join(args)
|
||||
if args:
|
||||
args = ", " + args
|
||||
|
||||
# Note: added kwargs because some messages are expected to be extended by the user (so, we'll actually
|
||||
# make all extendable so that we don't have to worry about which ones -- we loose a little on typing,
|
||||
# but may be better than doing a allow list based on something only pointed out in the documentation).
|
||||
class_to_generate[
|
||||
"init"
|
||||
] = '''def __init__(self%(args)s, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused)
|
||||
"""
|
||||
%(docstring)s
|
||||
"""
|
||||
%(init_body)s
|
||||
self.kwargs = kwargs
|
||||
''' % dict(args=args, init_body=init_body, docstring=docstring)
|
||||
|
||||
class_to_generate["init"] = _indent_lines(class_to_generate["init"])
|
||||
|
||||
|
||||
def update_class_to_generate_props(class_to_generate):
|
||||
import json
|
||||
|
||||
def default(o):
|
||||
if isinstance(o, Ref):
|
||||
return o.ref
|
||||
raise AssertionError("Unhandled: %s" % (o,))
|
||||
|
||||
properties = class_to_generate["properties"]
|
||||
class_to_generate["props"] = (
|
||||
" __props__ = %s" % _indent_lines(json.dumps(properties, indent=4, default=default).replace("true", "True")).strip()
|
||||
)
|
||||
|
||||
|
||||
def update_class_to_generate_refs(class_to_generate):
|
||||
properties = class_to_generate["properties"]
|
||||
class_to_generate["refs"] = (
|
||||
" __refs__ = %s" % _OrderedSet(key for (key, val) in properties.items() if val["type"].__class__ == Ref).set_repr()
|
||||
)
|
||||
|
||||
|
||||
def update_class_to_generate_enums(class_to_generate):
|
||||
class_to_generate["enums"] = ""
|
||||
if class_to_generate.get("is_enum", False):
|
||||
enums = ""
|
||||
for enum in class_to_generate["enum_values"]:
|
||||
enums += " %s = %r\n" % (enum.upper(), enum)
|
||||
enums += "\n"
|
||||
enums += " VALID_VALUES = %s\n\n" % _OrderedSet(class_to_generate["enum_values"]).set_repr()
|
||||
class_to_generate["enums"] = enums
|
||||
|
||||
|
||||
def update_class_to_generate_objects(classes_to_generate, class_to_generate):
|
||||
properties = class_to_generate["properties"]
|
||||
for key, val in properties.items():
|
||||
if "type" not in val:
|
||||
val["type"] = "TypeNA"
|
||||
continue
|
||||
|
||||
if val["type"] == "object":
|
||||
create_new = val.copy()
|
||||
create_new.update(
|
||||
{
|
||||
"name": "%s%s" % (class_to_generate["name"], key.title()),
|
||||
"description": ' "%s" of %s' % (key, class_to_generate["name"]),
|
||||
}
|
||||
)
|
||||
if "properties" not in create_new:
|
||||
create_new["properties"] = {}
|
||||
|
||||
assert create_new["name"] not in classes_to_generate
|
||||
classes_to_generate[create_new["name"]] = create_new
|
||||
|
||||
update_class_to_generate_type(classes_to_generate, create_new)
|
||||
update_class_to_generate_props(create_new)
|
||||
|
||||
# Update nested object types
|
||||
update_class_to_generate_objects(classes_to_generate, create_new)
|
||||
|
||||
val["type"] = Ref(create_new["name"], classes_to_generate[create_new["name"]])
|
||||
val.pop("properties", None)
|
||||
|
||||
|
||||
def gen_debugger_protocol():
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
if sys.version_info[:2] < (3, 6):
|
||||
raise AssertionError("Must be run with Python 3.6 onwards (to keep dict order).")
|
||||
|
||||
classes_to_generate = create_classes_to_generate_structure(load_schema_data())
|
||||
classes_to_generate.update(create_classes_to_generate_structure(load_custom_schema_data()))
|
||||
|
||||
class_to_generate = fill_properties_and_required_from_base(classes_to_generate)
|
||||
|
||||
for class_to_generate in list(classes_to_generate.values()):
|
||||
update_class_to_generate_description(class_to_generate)
|
||||
update_class_to_generate_type(classes_to_generate, class_to_generate)
|
||||
update_class_to_generate_props(class_to_generate)
|
||||
update_class_to_generate_objects(classes_to_generate, class_to_generate)
|
||||
|
||||
for class_to_generate in classes_to_generate.values():
|
||||
update_class_to_generate_refs(class_to_generate)
|
||||
update_class_to_generate_init(class_to_generate)
|
||||
update_class_to_generate_enums(class_to_generate)
|
||||
update_class_to_generate_to_json(class_to_generate)
|
||||
update_class_to_generate_register_dec(classes_to_generate, class_to_generate)
|
||||
|
||||
class_template = '''
|
||||
%(register_request)s%(register_dec)s
|
||||
class %(name)s(BaseSchema):
|
||||
"""
|
||||
%(description)s
|
||||
|
||||
Note: automatically generated code. Do not edit manually.
|
||||
"""
|
||||
|
||||
%(enums)s%(props)s
|
||||
%(refs)s
|
||||
|
||||
__slots__ = list(__props__.keys()) + ['kwargs']
|
||||
|
||||
%(init)s%(update_dict_ids_from_dap)s
|
||||
|
||||
%(to_dict)s%(update_dict_ids_to_dap)s
|
||||
'''
|
||||
|
||||
contents = []
|
||||
contents.append("# coding: utf-8")
|
||||
contents.append("# Automatically generated code.")
|
||||
contents.append("# Do not edit manually.")
|
||||
contents.append("# Generated by running: %s" % os.path.basename(__file__))
|
||||
contents.append("from .pydevd_base_schema import BaseSchema, register, register_request, register_response, register_event")
|
||||
contents.append("")
|
||||
for class_to_generate in classes_to_generate.values():
|
||||
contents.append(class_template % class_to_generate)
|
||||
|
||||
parent_dir = os.path.dirname(__file__)
|
||||
schema = os.path.join(parent_dir, "pydevd_schema.py")
|
||||
with open(schema, "w", encoding="utf-8") as stream:
|
||||
stream.write("\n".join(contents))
|
||||
|
||||
|
||||
def _indent_lines(lines, indent=" "):
|
||||
out_lines = []
|
||||
for line in lines.splitlines(keepends=True):
|
||||
out_lines.append(indent + line)
|
||||
|
||||
return "".join(out_lines)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
gen_debugger_protocol()
|
||||
+4190
File diff suppressed because it is too large
Load Diff
+325
@@ -0,0 +1,325 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"title": "Custom Debug Adapter Protocol",
|
||||
"description": "Extension to the DAP to support additional features.",
|
||||
"type": "object",
|
||||
|
||||
|
||||
"definitions": {
|
||||
|
||||
"SetDebuggerPropertyRequest": {
|
||||
"allOf": [ { "$ref": "#/definitions/Request" }, {
|
||||
"type": "object",
|
||||
"description": "The request can be used to enable or disable debugger features.",
|
||||
"properties": {
|
||||
"command": {
|
||||
"type": "string",
|
||||
"enum": [ "setDebuggerProperty" ]
|
||||
},
|
||||
"arguments": {
|
||||
"$ref": "#/definitions/SetDebuggerPropertyArguments"
|
||||
}
|
||||
},
|
||||
"required": [ "command", "arguments" ]
|
||||
}]
|
||||
},
|
||||
"SetDebuggerPropertyArguments": {
|
||||
"type": "object",
|
||||
"description": "Arguments for 'setDebuggerProperty' request.",
|
||||
"properties": {
|
||||
"ideOS": {
|
||||
"type": [ "string" ],
|
||||
"description": "OS where the ide is running. Supported values [Windows, Linux]"
|
||||
},
|
||||
"dontTraceStartPatterns": {
|
||||
"type": [ "array" ],
|
||||
"description": "Patterns to match with the start of the file paths. Matching paths will be added to a list of file where trace is ignored."
|
||||
},
|
||||
"dontTraceEndPatterns": {
|
||||
"type": [ "array" ],
|
||||
"description": "Patterns to match with the end of the file paths. Matching paths will be added to a list of file where trace is ignored."
|
||||
},
|
||||
"skipSuspendOnBreakpointException": {
|
||||
"type": [ "array" ],
|
||||
"description": "List of exceptions that should be skipped when doing condition evaluations."
|
||||
},
|
||||
"skipPrintBreakpointException": {
|
||||
"type": [ "array" ],
|
||||
"description": "List of exceptions that should skip printing to stderr when doing condition evaluations."
|
||||
},
|
||||
"multiThreadsSingleNotification": {
|
||||
"type": [ "boolean" ],
|
||||
"description": "If false then a notification is generated for each thread event. If true a single event is gnenerated, and all threads follow that behavior."
|
||||
}
|
||||
}
|
||||
},
|
||||
"SetDebuggerPropertyResponse": {
|
||||
"allOf": [ { "$ref": "#/definitions/Response" }, {
|
||||
"type": "object",
|
||||
"description": "Response to 'setDebuggerProperty' request. This is just an acknowledgement, so no body field is required."
|
||||
}]
|
||||
},
|
||||
|
||||
"PydevdInputRequestedEvent": {
|
||||
"allOf": [ { "$ref": "#/definitions/Event" }, {
|
||||
"type": "object",
|
||||
"description": "The event indicates input was requested by debuggee.",
|
||||
"properties": {
|
||||
"event": {
|
||||
"type": "string",
|
||||
"enum": [ "pydevdInputRequested" ]
|
||||
}
|
||||
},
|
||||
"required": [ "event" ]
|
||||
}]
|
||||
},
|
||||
|
||||
"SetPydevdSourceMapRequest": {
|
||||
"allOf": [ { "$ref": "#/definitions/Request" }, {
|
||||
"type": "object",
|
||||
"description": [
|
||||
"Sets multiple PydevdSourceMap for a single source and clears all previous PydevdSourceMap in that source.",
|
||||
"i.e.: Maps paths and lines in a 1:N mapping (use case: map a single file in the IDE to multiple IPython cells).",
|
||||
"To clear all PydevdSourceMap for a source, specify an empty array.",
|
||||
"Interaction with breakpoints: When a new mapping is sent, breakpoints that match the source (or previously matched a source) are reapplied.",
|
||||
"Interaction with launch pathMapping: both mappings are independent. This mapping is applied after the launch pathMapping."
|
||||
],
|
||||
"properties": {
|
||||
"command": {
|
||||
"type": "string",
|
||||
"enum": [ "setPydevdSourceMap" ]
|
||||
},
|
||||
"arguments": {
|
||||
"$ref": "#/definitions/SetPydevdSourceMapArguments"
|
||||
}
|
||||
},
|
||||
"required": [ "command", "arguments" ]
|
||||
}]
|
||||
},
|
||||
"SetPydevdSourceMapArguments": {
|
||||
"type": "object",
|
||||
"description": "Arguments for 'setPydevdSourceMap' request.",
|
||||
"properties": {
|
||||
"source": {
|
||||
"$ref": "#/definitions/Source",
|
||||
"description": "The source location of the PydevdSourceMap; 'source.path' must be specified (e.g.: for an ipython notebook this could be something as /home/notebook/note.py)."
|
||||
},
|
||||
"pydevdSourceMaps": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/PydevdSourceMap"
|
||||
},
|
||||
"description": "The PydevdSourceMaps to be set to the given source (provide an empty array to clear the source mappings for a given path)."
|
||||
}
|
||||
},
|
||||
"required": [ "source", "pydevdSourceMap" ]
|
||||
},
|
||||
"SetPydevdSourceMapResponse": {
|
||||
"allOf": [ { "$ref": "#/definitions/Response" }, {
|
||||
"type": "object",
|
||||
"description": "Response to 'setPydevdSourceMap' request. This is just an acknowledgement, so no body field is required."
|
||||
}]
|
||||
},
|
||||
|
||||
"PydevdSourceMap": {
|
||||
"type": "object",
|
||||
"description": "Information that allows mapping a local line to a remote source/line.",
|
||||
"properties": {
|
||||
"line": {
|
||||
"type": "integer",
|
||||
"description": "The local line to which the mapping should map to (e.g.: for an ipython notebook this would be the first line of the cell in the file)."
|
||||
},
|
||||
"endLine": {
|
||||
"type": "integer",
|
||||
"description": "The end line."
|
||||
},
|
||||
"runtimeSource": {
|
||||
"$ref": "#/definitions/Source",
|
||||
"description": "The path that the user has remotely -- 'source.path' must be specified (e.g.: for an ipython notebook this could be something as '<ipython-input-1-4561234>')"
|
||||
},
|
||||
"runtimeLine": {
|
||||
"type": "integer",
|
||||
"description": "The remote line to which the mapping should map to (e.g.: for an ipython notebook this would be always 1 as it'd map the start of the cell)."
|
||||
}
|
||||
},
|
||||
"required": ["line", "endLine", "runtimeSource", "runtimeLine"]
|
||||
},
|
||||
|
||||
"PydevdSystemInfoRequest": {
|
||||
"allOf": [ { "$ref": "#/definitions/Request" }, {
|
||||
"type": "object",
|
||||
"description": "The request can be used retrieve system information, python version, etc.",
|
||||
"properties": {
|
||||
"command": {
|
||||
"type": "string",
|
||||
"enum": [ "pydevdSystemInfo" ]
|
||||
},
|
||||
"arguments": {
|
||||
"$ref": "#/definitions/PydevdSystemInfoArguments"
|
||||
}
|
||||
},
|
||||
"required": [ "command" ]
|
||||
}]
|
||||
},
|
||||
"PydevdSystemInfoArguments": {
|
||||
"type": "object",
|
||||
"description": "Arguments for 'pydevdSystemInfo' request."
|
||||
},
|
||||
"PydevdSystemInfoResponse": {
|
||||
"allOf": [ { "$ref": "#/definitions/Response" }, {
|
||||
"type": "object",
|
||||
"description": "Response to 'pydevdSystemInfo' request.",
|
||||
"properties": {
|
||||
"body": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"python": {
|
||||
"$ref": "#/definitions/PydevdPythonInfo",
|
||||
"description": "Information about the python version running in the current process."
|
||||
},
|
||||
"platform": {
|
||||
"$ref": "#/definitions/PydevdPlatformInfo",
|
||||
"description": "Information about the plarforn on which the current process is running."
|
||||
},
|
||||
"process": {
|
||||
"$ref": "#/definitions/PydevdProcessInfo",
|
||||
"description": "Information about the current process."
|
||||
},
|
||||
"pydevd": {
|
||||
"$ref": "#/definitions/PydevdInfo",
|
||||
"description": "Information about pydevd."
|
||||
}
|
||||
},
|
||||
"required": [ "python", "platform", "process", "pydevd" ]
|
||||
}
|
||||
},
|
||||
"required": [ "body" ]
|
||||
}]
|
||||
},
|
||||
|
||||
"PydevdPythonInfo": {
|
||||
"type": "object",
|
||||
"description": "This object contains python version and implementation details.",
|
||||
"properties": {
|
||||
"version": {
|
||||
"type": "string",
|
||||
"description": "Python version as a string in semver format: <major>.<minor>.<micro><releaselevel><serial>."
|
||||
},
|
||||
"implementation": {
|
||||
"$ref": "#/definitions/PydevdPythonImplementationInfo",
|
||||
"description": "Python version as a string in this format <major>.<minor>.<micro><releaselevel><serial>."
|
||||
}
|
||||
}
|
||||
},
|
||||
"PydevdPythonImplementationInfo": {
|
||||
"type": "object",
|
||||
"description": "This object contains python implementation details.",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "Python implementation name."
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"description": "Python version as a string in semver format: <major>.<minor>.<micro><releaselevel><serial>."
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"description": "Optional description for this python implementation."
|
||||
}
|
||||
}
|
||||
},
|
||||
"PydevdPlatformInfo": {
|
||||
"type": "object",
|
||||
"description": "This object contains python version and implementation details.",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "Name of the platform as returned by 'sys.platform'."
|
||||
}
|
||||
}
|
||||
},
|
||||
"PydevdProcessInfo": {
|
||||
"type": "object",
|
||||
"description": "This object contains python process details.",
|
||||
"properties": {
|
||||
"pid": {
|
||||
"type": "integer",
|
||||
"description": "Process ID for the current process."
|
||||
},
|
||||
"ppid": {
|
||||
"type": "integer",
|
||||
"description": "Parent Process ID for the current process."
|
||||
},
|
||||
"executable": {
|
||||
"type": "string",
|
||||
"description": "Path to the executable as returned by 'sys.executable'."
|
||||
},
|
||||
"bitness": {
|
||||
"type": "integer",
|
||||
"description": "Integer value indicating the bitness of the current process."
|
||||
}
|
||||
}
|
||||
},
|
||||
"PydevdInfo": {
|
||||
"type": "object",
|
||||
"description": "This object contains details on pydevd.",
|
||||
"properties": {
|
||||
"usingCython": {
|
||||
"type": "boolean",
|
||||
"description": "Specifies whether the cython native module is being used."
|
||||
},
|
||||
"usingFrameEval": {
|
||||
"type": "boolean",
|
||||
"description": "Specifies whether the frame eval native module is being used."
|
||||
}
|
||||
}
|
||||
},
|
||||
"PydevdAuthorizeRequest": {
|
||||
"allOf": [ { "$ref": "#/definitions/Request" }, {
|
||||
"type": "object",
|
||||
"description": "A request to authorize the ide to start accepting commands.",
|
||||
"properties": {
|
||||
"command": {
|
||||
"type": "string",
|
||||
"enum": [ "pydevdAuthorize" ]
|
||||
},
|
||||
"arguments": {
|
||||
"$ref": "#/definitions/PydevdAuthorizeArguments"
|
||||
}
|
||||
},
|
||||
"required": [ "command", "arguments" ]
|
||||
}]
|
||||
},
|
||||
"PydevdAuthorizeArguments": {
|
||||
"type": "object",
|
||||
"description": "Arguments for 'pydevdAuthorize' request.",
|
||||
"properties": {
|
||||
"debugServerAccessToken": {
|
||||
"type": "string" ,
|
||||
"description": "The access token to access the debug server."
|
||||
}
|
||||
},
|
||||
"required": [ "command" ]
|
||||
},
|
||||
"PydevdAuthorizeResponse": {
|
||||
"allOf": [ { "$ref": "#/definitions/Response" }, {
|
||||
"type": "object",
|
||||
"description": "Response to 'pydevdAuthorize' request.",
|
||||
"properties": {
|
||||
"body": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"clientAccessToken": {
|
||||
"type": "string",
|
||||
"description": "The access token to access the client (i.e.: usually the IDE)."
|
||||
}
|
||||
},
|
||||
"required": [ "clientAccessToken" ]
|
||||
}
|
||||
},
|
||||
"required": [ "body" ]
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
+143
@@ -0,0 +1,143 @@
|
||||
from _pydevd_bundle._debug_adapter.pydevd_schema_log import debug_exception
|
||||
import json
|
||||
import itertools
|
||||
from functools import partial
|
||||
|
||||
|
||||
class BaseSchema(object):
|
||||
@staticmethod
|
||||
def initialize_ids_translation():
|
||||
BaseSchema._dap_id_to_obj_id = {0: 0, None: None}
|
||||
BaseSchema._obj_id_to_dap_id = {0: 0, None: None}
|
||||
BaseSchema._next_dap_id = partial(next, itertools.count(1))
|
||||
|
||||
def to_json(self):
|
||||
return json.dumps(self.to_dict())
|
||||
|
||||
@staticmethod
|
||||
def _translate_id_to_dap(obj_id):
|
||||
if obj_id == "*":
|
||||
return "*"
|
||||
# Note: we don't invalidate ids, so, if some object starts using the same id
|
||||
# of another object, the same id will be used.
|
||||
dap_id = BaseSchema._obj_id_to_dap_id.get(obj_id)
|
||||
if dap_id is None:
|
||||
dap_id = BaseSchema._obj_id_to_dap_id[obj_id] = BaseSchema._next_dap_id()
|
||||
BaseSchema._dap_id_to_obj_id[dap_id] = obj_id
|
||||
return dap_id
|
||||
|
||||
@staticmethod
|
||||
def _translate_id_from_dap(dap_id):
|
||||
if dap_id == "*":
|
||||
return "*"
|
||||
try:
|
||||
return BaseSchema._dap_id_to_obj_id[dap_id]
|
||||
except:
|
||||
raise KeyError("Wrong ID sent from the client: %s" % (dap_id,))
|
||||
|
||||
@staticmethod
|
||||
def update_dict_ids_to_dap(dct):
|
||||
return dct
|
||||
|
||||
@staticmethod
|
||||
def update_dict_ids_from_dap(dct):
|
||||
return dct
|
||||
|
||||
|
||||
BaseSchema.initialize_ids_translation()
|
||||
|
||||
_requests_to_types = {}
|
||||
_responses_to_types = {}
|
||||
_event_to_types = {}
|
||||
_all_messages = {}
|
||||
|
||||
|
||||
def register(cls):
|
||||
_all_messages[cls.__name__] = cls
|
||||
return cls
|
||||
|
||||
|
||||
def register_request(command):
|
||||
def do_register(cls):
|
||||
_requests_to_types[command] = cls
|
||||
return cls
|
||||
|
||||
return do_register
|
||||
|
||||
|
||||
def register_response(command):
|
||||
def do_register(cls):
|
||||
_responses_to_types[command] = cls
|
||||
return cls
|
||||
|
||||
return do_register
|
||||
|
||||
|
||||
def register_event(event):
|
||||
def do_register(cls):
|
||||
_event_to_types[event] = cls
|
||||
return cls
|
||||
|
||||
return do_register
|
||||
|
||||
|
||||
def from_dict(dct, update_ids_from_dap=False):
|
||||
msg_type = dct.get("type")
|
||||
if msg_type is None:
|
||||
raise ValueError("Unable to make sense of message: %s" % (dct,))
|
||||
|
||||
if msg_type == "request":
|
||||
to_type = _requests_to_types
|
||||
use = dct["command"]
|
||||
|
||||
elif msg_type == "response":
|
||||
to_type = _responses_to_types
|
||||
use = dct["command"]
|
||||
|
||||
else:
|
||||
to_type = _event_to_types
|
||||
use = dct["event"]
|
||||
|
||||
cls = to_type.get(use)
|
||||
if cls is None:
|
||||
raise ValueError("Unable to create message from dict: %s. %s not in %s" % (dct, use, sorted(to_type.keys())))
|
||||
try:
|
||||
return cls(update_ids_from_dap=update_ids_from_dap, **dct)
|
||||
except:
|
||||
msg = "Error creating %s from %s" % (cls, dct)
|
||||
debug_exception(msg)
|
||||
raise
|
||||
|
||||
|
||||
def from_json(json_msg, update_ids_from_dap=False, on_dict_loaded=lambda dct: None):
|
||||
if isinstance(json_msg, bytes):
|
||||
json_msg = json_msg.decode("utf-8")
|
||||
|
||||
as_dict = json.loads(json_msg)
|
||||
on_dict_loaded(as_dict)
|
||||
try:
|
||||
return from_dict(as_dict, update_ids_from_dap=update_ids_from_dap)
|
||||
except:
|
||||
if as_dict.get("type") == "response" and not as_dict.get("success"):
|
||||
# Error messages may not have required body (return as a generic Response).
|
||||
Response = _all_messages["Response"]
|
||||
return Response(**as_dict)
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
def get_response_class(request):
|
||||
if request.__class__ == dict:
|
||||
return _responses_to_types[request["command"]]
|
||||
return _responses_to_types[request.command]
|
||||
|
||||
|
||||
def build_response(request, kwargs=None):
|
||||
if kwargs is None:
|
||||
kwargs = {"success": True}
|
||||
else:
|
||||
if "success" not in kwargs:
|
||||
kwargs["success"] = True
|
||||
response_class = _responses_to_types[request.command]
|
||||
kwargs.setdefault("seq", -1) # To be overwritten before sending
|
||||
return response_class(command=request.command, request_seq=request.seq, **kwargs)
|
||||
+17458
File diff suppressed because it is too large
Load Diff
+45
@@ -0,0 +1,45 @@
|
||||
import os
|
||||
import traceback
|
||||
from _pydevd_bundle.pydevd_constants import ForkSafeLock
|
||||
|
||||
_pid = os.getpid()
|
||||
_pid_msg = "%s: " % (_pid,)
|
||||
|
||||
_debug_lock = ForkSafeLock()
|
||||
|
||||
DEBUG = False
|
||||
DEBUG_FILE = os.path.join(os.path.dirname(__file__), "__debug_output__.txt")
|
||||
|
||||
|
||||
def debug(msg):
|
||||
if DEBUG:
|
||||
with _debug_lock:
|
||||
_pid_prefix = _pid_msg
|
||||
if isinstance(msg, bytes):
|
||||
_pid_prefix = _pid_prefix.encode("utf-8")
|
||||
|
||||
if not msg.endswith(b"\r") and not msg.endswith(b"\n"):
|
||||
msg += b"\n"
|
||||
mode = "a+b"
|
||||
else:
|
||||
if not msg.endswith("\r") and not msg.endswith("\n"):
|
||||
msg += "\n"
|
||||
mode = "a+"
|
||||
with open(DEBUG_FILE, mode) as stream:
|
||||
stream.write(_pid_prefix)
|
||||
stream.write(msg)
|
||||
|
||||
|
||||
def debug_exception(msg=None):
|
||||
if DEBUG:
|
||||
if msg:
|
||||
debug(msg)
|
||||
|
||||
with _debug_lock:
|
||||
with open(DEBUG_FILE, "a+") as stream:
|
||||
_pid_prefix = _pid_msg
|
||||
if isinstance(msg, bytes):
|
||||
_pid_prefix = _pid_prefix.encode("utf-8")
|
||||
stream.write(_pid_prefix)
|
||||
|
||||
traceback.print_exc(file=stream)
|
||||
@@ -0,0 +1,553 @@
|
||||
"""
|
||||
A copy of the code module in the standard library with some changes to work with
|
||||
async evaluation.
|
||||
|
||||
Utilities needed to emulate Python's interactive interpreter.
|
||||
"""
|
||||
|
||||
# Inspired by similar code by Jeff Epler and Fredrik Lundh.
|
||||
|
||||
import sys
|
||||
import traceback
|
||||
import inspect
|
||||
|
||||
# START --------------------------- from codeop import CommandCompiler, compile_command
|
||||
# START --------------------------- from codeop import CommandCompiler, compile_command
|
||||
# START --------------------------- from codeop import CommandCompiler, compile_command
|
||||
# START --------------------------- from codeop import CommandCompiler, compile_command
|
||||
# START --------------------------- from codeop import CommandCompiler, compile_command
|
||||
r"""Utilities to compile possibly incomplete Python source code.
|
||||
|
||||
This module provides two interfaces, broadly similar to the builtin
|
||||
function compile(), which take program text, a filename and a 'mode'
|
||||
and:
|
||||
|
||||
- Return code object if the command is complete and valid
|
||||
- Return None if the command is incomplete
|
||||
- Raise SyntaxError, ValueError or OverflowError if the command is a
|
||||
syntax error (OverflowError and ValueError can be produced by
|
||||
malformed literals).
|
||||
|
||||
Approach:
|
||||
|
||||
First, check if the source consists entirely of blank lines and
|
||||
comments; if so, replace it with 'pass', because the built-in
|
||||
parser doesn't always do the right thing for these.
|
||||
|
||||
Compile three times: as is, with \n, and with \n\n appended. If it
|
||||
compiles as is, it's complete. If it compiles with one \n appended,
|
||||
we expect more. If it doesn't compile either way, we compare the
|
||||
error we get when compiling with \n or \n\n appended. If the errors
|
||||
are the same, the code is broken. But if the errors are different, we
|
||||
expect more. Not intuitive; not even guaranteed to hold in future
|
||||
releases; but this matches the compiler's behavior from Python 1.4
|
||||
through 2.2, at least.
|
||||
|
||||
Caveat:
|
||||
|
||||
It is possible (but not likely) that the parser stops parsing with a
|
||||
successful outcome before reaching the end of the source; in this
|
||||
case, trailing symbols may be ignored instead of causing an error.
|
||||
For example, a backslash followed by two newlines may be followed by
|
||||
arbitrary garbage. This will be fixed once the API for the parser is
|
||||
better.
|
||||
|
||||
The two interfaces are:
|
||||
|
||||
compile_command(source, filename, symbol):
|
||||
|
||||
Compiles a single command in the manner described above.
|
||||
|
||||
CommandCompiler():
|
||||
|
||||
Instances of this class have __call__ methods identical in
|
||||
signature to compile_command; the difference is that if the
|
||||
instance compiles program text containing a __future__ statement,
|
||||
the instance 'remembers' and compiles all subsequent program texts
|
||||
with the statement in force.
|
||||
|
||||
The module also provides another class:
|
||||
|
||||
Compile():
|
||||
|
||||
Instances of this class act like the built-in function compile,
|
||||
but with 'memory' in the sense described above.
|
||||
"""
|
||||
|
||||
import __future__
|
||||
|
||||
_features = [getattr(__future__, fname) for fname in __future__.all_feature_names]
|
||||
|
||||
__all__ = ["compile_command", "Compile", "CommandCompiler"]
|
||||
|
||||
PyCF_DONT_IMPLY_DEDENT = 0x200 # Matches pythonrun.h
|
||||
|
||||
|
||||
def _maybe_compile(compiler, source, filename, symbol):
|
||||
# Check for source consisting of only blank lines and comments
|
||||
for line in source.split("\n"):
|
||||
line = line.strip()
|
||||
if line and line[0] != "#":
|
||||
break # Leave it alone
|
||||
else:
|
||||
if symbol != "eval":
|
||||
source = "pass" # Replace it with a 'pass' statement
|
||||
|
||||
err = err1 = err2 = None
|
||||
code = code1 = code2 = None
|
||||
|
||||
try:
|
||||
code = compiler(source, filename, symbol)
|
||||
except SyntaxError as err:
|
||||
pass
|
||||
|
||||
try:
|
||||
code1 = compiler(source + "\n", filename, symbol)
|
||||
except SyntaxError as e:
|
||||
err1 = e
|
||||
|
||||
try:
|
||||
code2 = compiler(source + "\n\n", filename, symbol)
|
||||
except SyntaxError as e:
|
||||
err2 = e
|
||||
|
||||
try:
|
||||
if code:
|
||||
return code
|
||||
if not code1 and repr(err1) == repr(err2):
|
||||
raise err1
|
||||
finally:
|
||||
err1 = err2 = None
|
||||
|
||||
|
||||
def _compile(source, filename, symbol):
|
||||
return compile(source, filename, symbol, PyCF_DONT_IMPLY_DEDENT)
|
||||
|
||||
|
||||
def compile_command(source, filename="<input>", symbol="single"):
|
||||
r"""Compile a command and determine whether it is incomplete.
|
||||
|
||||
Arguments:
|
||||
|
||||
source -- the source string; may contain \n characters
|
||||
filename -- optional filename from which source was read; default
|
||||
"<input>"
|
||||
symbol -- optional grammar start symbol; "single" (default) or "eval"
|
||||
|
||||
Return value / exceptions raised:
|
||||
|
||||
- Return a code object if the command is complete and valid
|
||||
- Return None if the command is incomplete
|
||||
- Raise SyntaxError, ValueError or OverflowError if the command is a
|
||||
syntax error (OverflowError and ValueError can be produced by
|
||||
malformed literals).
|
||||
"""
|
||||
return _maybe_compile(_compile, source, filename, symbol)
|
||||
|
||||
|
||||
class Compile:
|
||||
"""Instances of this class behave much like the built-in compile
|
||||
function, but if one is used to compile text containing a future
|
||||
statement, it "remembers" and compiles all subsequent program texts
|
||||
with the statement in force."""
|
||||
|
||||
def __init__(self):
|
||||
self.flags = PyCF_DONT_IMPLY_DEDENT
|
||||
|
||||
try:
|
||||
from ast import PyCF_ALLOW_TOP_LEVEL_AWAIT
|
||||
|
||||
self.flags |= PyCF_ALLOW_TOP_LEVEL_AWAIT
|
||||
except:
|
||||
pass
|
||||
|
||||
def __call__(self, source, filename, symbol):
|
||||
codeob = compile(source, filename, symbol, self.flags, 1)
|
||||
for feature in _features:
|
||||
if codeob.co_flags & feature.compiler_flag:
|
||||
self.flags |= feature.compiler_flag
|
||||
return codeob
|
||||
|
||||
|
||||
class CommandCompiler:
|
||||
"""Instances of this class have __call__ methods identical in
|
||||
signature to compile_command; the difference is that if the
|
||||
instance compiles program text containing a __future__ statement,
|
||||
the instance 'remembers' and compiles all subsequent program texts
|
||||
with the statement in force."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
):
|
||||
self.compiler = Compile()
|
||||
|
||||
def __call__(self, source, filename="<input>", symbol="single"):
|
||||
r"""Compile a command and determine whether it is incomplete.
|
||||
|
||||
Arguments:
|
||||
|
||||
source -- the source string; may contain \n characters
|
||||
filename -- optional filename from which source was read;
|
||||
default "<input>"
|
||||
symbol -- optional grammar start symbol; "single" (default) or
|
||||
"eval"
|
||||
|
||||
Return value / exceptions raised:
|
||||
|
||||
- Return a code object if the command is complete and valid
|
||||
- Return None if the command is incomplete
|
||||
- Raise SyntaxError, ValueError or OverflowError if the command is a
|
||||
syntax error (OverflowError and ValueError can be produced by
|
||||
malformed literals).
|
||||
"""
|
||||
return _maybe_compile(self.compiler, source, filename, symbol)
|
||||
|
||||
|
||||
# END --------------------------- from codeop import CommandCompiler, compile_command
|
||||
# END --------------------------- from codeop import CommandCompiler, compile_command
|
||||
# END --------------------------- from codeop import CommandCompiler, compile_command
|
||||
# END --------------------------- from codeop import CommandCompiler, compile_command
|
||||
# END --------------------------- from codeop import CommandCompiler, compile_command
|
||||
|
||||
|
||||
__all__ = ["InteractiveInterpreter", "InteractiveConsole", "interact", "compile_command"]
|
||||
|
||||
from _pydev_bundle._pydev_saved_modules import threading
|
||||
|
||||
|
||||
class _EvalAwaitInNewEventLoop(threading.Thread):
|
||||
def __init__(self, compiled, updated_globals, updated_locals):
|
||||
threading.Thread.__init__(self)
|
||||
self.daemon = True
|
||||
self._compiled = compiled
|
||||
self._updated_globals = updated_globals
|
||||
self._updated_locals = updated_locals
|
||||
|
||||
# Output
|
||||
self.evaluated_value = None
|
||||
self.exc = None
|
||||
|
||||
async def _async_func(self):
|
||||
return await eval(self._compiled, self._updated_locals, self._updated_globals)
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
import asyncio
|
||||
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
self.evaluated_value = asyncio.run(self._async_func())
|
||||
except:
|
||||
self.exc = sys.exc_info()
|
||||
|
||||
|
||||
class InteractiveInterpreter:
|
||||
"""Base class for InteractiveConsole.
|
||||
|
||||
This class deals with parsing and interpreter state (the user's
|
||||
namespace); it doesn't deal with input buffering or prompting or
|
||||
input file naming (the filename is always passed in explicitly).
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, locals=None):
|
||||
"""Constructor.
|
||||
|
||||
The optional 'locals' argument specifies the dictionary in
|
||||
which code will be executed; it defaults to a newly created
|
||||
dictionary with key "__name__" set to "__console__" and key
|
||||
"__doc__" set to None.
|
||||
|
||||
"""
|
||||
if locals is None:
|
||||
locals = {"__name__": "__console__", "__doc__": None}
|
||||
self.locals = locals
|
||||
self.compile = CommandCompiler()
|
||||
|
||||
def runsource(self, source, filename="<input>", symbol="single"):
|
||||
"""Compile and run some source in the interpreter.
|
||||
|
||||
Arguments are as for compile_command().
|
||||
|
||||
One of several things can happen:
|
||||
|
||||
1) The input is incorrect; compile_command() raised an
|
||||
exception (SyntaxError or OverflowError). A syntax traceback
|
||||
will be printed by calling the showsyntaxerror() method.
|
||||
|
||||
2) The input is incomplete, and more input is required;
|
||||
compile_command() returned None. Nothing happens.
|
||||
|
||||
3) The input is complete; compile_command() returned a code
|
||||
object. The code is executed by calling self.runcode() (which
|
||||
also handles run-time exceptions, except for SystemExit).
|
||||
|
||||
The return value is True in case 2, False in the other cases (unless
|
||||
an exception is raised). The return value can be used to
|
||||
decide whether to use sys.ps1 or sys.ps2 to prompt the next
|
||||
line.
|
||||
|
||||
"""
|
||||
try:
|
||||
code = self.compile(source, filename, symbol)
|
||||
except (OverflowError, SyntaxError, ValueError):
|
||||
# Case 1
|
||||
self.showsyntaxerror(filename)
|
||||
return False
|
||||
|
||||
if code is None:
|
||||
# Case 2
|
||||
return True
|
||||
|
||||
# Case 3
|
||||
self.runcode(code)
|
||||
return False
|
||||
|
||||
def runcode(self, code):
|
||||
"""Execute a code object.
|
||||
|
||||
When an exception occurs, self.showtraceback() is called to
|
||||
display a traceback. All exceptions are caught except
|
||||
SystemExit, which is reraised.
|
||||
|
||||
A note about KeyboardInterrupt: this exception may occur
|
||||
elsewhere in this code, and may not always be caught. The
|
||||
caller should be prepared to deal with it.
|
||||
|
||||
"""
|
||||
try:
|
||||
is_async = False
|
||||
if hasattr(inspect, "CO_COROUTINE"):
|
||||
is_async = inspect.CO_COROUTINE & code.co_flags == inspect.CO_COROUTINE
|
||||
|
||||
if is_async:
|
||||
t = _EvalAwaitInNewEventLoop(code, self.locals, None)
|
||||
t.start()
|
||||
t.join()
|
||||
|
||||
if t.exc:
|
||||
raise t.exc[1].with_traceback(t.exc[2])
|
||||
|
||||
else:
|
||||
exec(code, self.locals)
|
||||
except SystemExit:
|
||||
raise
|
||||
except:
|
||||
self.showtraceback()
|
||||
|
||||
def showsyntaxerror(self, filename=None):
|
||||
"""Display the syntax error that just occurred.
|
||||
|
||||
This doesn't display a stack trace because there isn't one.
|
||||
|
||||
If a filename is given, it is stuffed in the exception instead
|
||||
of what was there before (because Python's parser always uses
|
||||
"<string>" when reading from a string).
|
||||
|
||||
The output is written by self.write(), below.
|
||||
|
||||
"""
|
||||
type, value, tb = sys.exc_info()
|
||||
sys.last_type = type
|
||||
sys.last_value = value
|
||||
sys.last_traceback = tb
|
||||
if filename and type is SyntaxError:
|
||||
# Work hard to stuff the correct filename in the exception
|
||||
try:
|
||||
msg, (dummy_filename, lineno, offset, line) = value.args
|
||||
except ValueError:
|
||||
# Not the format we expect; leave it alone
|
||||
pass
|
||||
else:
|
||||
# Stuff in the right filename
|
||||
value = SyntaxError(msg, (filename, lineno, offset, line))
|
||||
sys.last_value = value
|
||||
if sys.excepthook is sys.__excepthook__:
|
||||
lines = traceback.format_exception_only(type, value)
|
||||
self.write("".join(lines))
|
||||
else:
|
||||
# If someone has set sys.excepthook, we let that take precedence
|
||||
# over self.write
|
||||
sys.excepthook(type, value, tb)
|
||||
|
||||
def showtraceback(self):
|
||||
"""Display the exception that just occurred.
|
||||
|
||||
We remove the first stack item because it is our own code.
|
||||
|
||||
The output is written by self.write(), below.
|
||||
|
||||
"""
|
||||
sys.last_type, sys.last_value, last_tb = ei = sys.exc_info()
|
||||
sys.last_traceback = last_tb
|
||||
try:
|
||||
lines = traceback.format_exception(ei[0], ei[1], last_tb.tb_next)
|
||||
if sys.excepthook is sys.__excepthook__:
|
||||
self.write("".join(lines))
|
||||
else:
|
||||
# If someone has set sys.excepthook, we let that take precedence
|
||||
# over self.write
|
||||
sys.excepthook(ei[0], ei[1], last_tb)
|
||||
finally:
|
||||
last_tb = ei = None
|
||||
|
||||
def write(self, data):
|
||||
"""Write a string.
|
||||
|
||||
The base implementation writes to sys.stderr; a subclass may
|
||||
replace this with a different implementation.
|
||||
|
||||
"""
|
||||
sys.stderr.write(data)
|
||||
|
||||
|
||||
class InteractiveConsole(InteractiveInterpreter):
|
||||
"""Closely emulate the behavior of the interactive Python interpreter.
|
||||
|
||||
This class builds on InteractiveInterpreter and adds prompting
|
||||
using the familiar sys.ps1 and sys.ps2, and input buffering.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, locals=None, filename="<console>"):
|
||||
"""Constructor.
|
||||
|
||||
The optional locals argument will be passed to the
|
||||
InteractiveInterpreter base class.
|
||||
|
||||
The optional filename argument should specify the (file)name
|
||||
of the input stream; it will show up in tracebacks.
|
||||
|
||||
"""
|
||||
InteractiveInterpreter.__init__(self, locals)
|
||||
self.filename = filename
|
||||
self.resetbuffer()
|
||||
|
||||
def resetbuffer(self):
|
||||
"""Reset the input buffer."""
|
||||
self.buffer = []
|
||||
|
||||
def interact(self, banner=None, exitmsg=None):
|
||||
"""Closely emulate the interactive Python console.
|
||||
|
||||
The optional banner argument specifies the banner to print
|
||||
before the first interaction; by default it prints a banner
|
||||
similar to the one printed by the real Python interpreter,
|
||||
followed by the current class name in parentheses (so as not
|
||||
to confuse this with the real interpreter -- since it's so
|
||||
close!).
|
||||
|
||||
The optional exitmsg argument specifies the exit message
|
||||
printed when exiting. Pass the empty string to suppress
|
||||
printing an exit message. If exitmsg is not given or None,
|
||||
a default message is printed.
|
||||
|
||||
"""
|
||||
try:
|
||||
sys.ps1
|
||||
except AttributeError:
|
||||
sys.ps1 = ">>> "
|
||||
try:
|
||||
sys.ps2
|
||||
except AttributeError:
|
||||
sys.ps2 = "... "
|
||||
cprt = 'Type "help", "copyright", "credits" or "license" for more information.'
|
||||
if banner is None:
|
||||
self.write("Python %s on %s\n%s\n(%s)\n" % (sys.version, sys.platform, cprt, self.__class__.__name__))
|
||||
elif banner:
|
||||
self.write("%s\n" % str(banner))
|
||||
more = 0
|
||||
while 1:
|
||||
try:
|
||||
if more:
|
||||
prompt = sys.ps2
|
||||
else:
|
||||
prompt = sys.ps1
|
||||
try:
|
||||
line = self.raw_input(prompt)
|
||||
except EOFError:
|
||||
self.write("\n")
|
||||
break
|
||||
else:
|
||||
more = self.push(line)
|
||||
except KeyboardInterrupt:
|
||||
self.write("\nKeyboardInterrupt\n")
|
||||
self.resetbuffer()
|
||||
more = 0
|
||||
if exitmsg is None:
|
||||
self.write("now exiting %s...\n" % self.__class__.__name__)
|
||||
elif exitmsg != "":
|
||||
self.write("%s\n" % exitmsg)
|
||||
|
||||
def push(self, line):
|
||||
"""Push a line to the interpreter.
|
||||
|
||||
The line should not have a trailing newline; it may have
|
||||
internal newlines. The line is appended to a buffer and the
|
||||
interpreter's runsource() method is called with the
|
||||
concatenated contents of the buffer as source. If this
|
||||
indicates that the command was executed or invalid, the buffer
|
||||
is reset; otherwise, the command is incomplete, and the buffer
|
||||
is left as it was after the line was appended. The return
|
||||
value is 1 if more input is required, 0 if the line was dealt
|
||||
with in some way (this is the same as runsource()).
|
||||
|
||||
"""
|
||||
self.buffer.append(line)
|
||||
source = "\n".join(self.buffer)
|
||||
more = self.runsource(source, self.filename)
|
||||
if not more:
|
||||
self.resetbuffer()
|
||||
return more
|
||||
|
||||
def raw_input(self, prompt=""):
|
||||
"""Write a prompt and read a line.
|
||||
|
||||
The returned line does not include the trailing newline.
|
||||
When the user enters the EOF key sequence, EOFError is raised.
|
||||
|
||||
The base implementation uses the built-in function
|
||||
input(); a subclass may replace this with a different
|
||||
implementation.
|
||||
|
||||
"""
|
||||
return input(prompt)
|
||||
|
||||
|
||||
def interact(banner=None, readfunc=None, local=None, exitmsg=None):
|
||||
"""Closely emulate the interactive Python interpreter.
|
||||
|
||||
This is a backwards compatible interface to the InteractiveConsole
|
||||
class. When readfunc is not specified, it attempts to import the
|
||||
readline module to enable GNU readline if it is available.
|
||||
|
||||
Arguments (all optional, all default to None):
|
||||
|
||||
banner -- passed to InteractiveConsole.interact()
|
||||
readfunc -- if not None, replaces InteractiveConsole.raw_input()
|
||||
local -- passed to InteractiveInterpreter.__init__()
|
||||
exitmsg -- passed to InteractiveConsole.interact()
|
||||
|
||||
"""
|
||||
console = InteractiveConsole(local)
|
||||
if readfunc is not None:
|
||||
console.raw_input = readfunc
|
||||
else:
|
||||
try:
|
||||
import readline
|
||||
except ImportError:
|
||||
pass
|
||||
console.interact(banner, exitmsg)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("-q", action="store_true", help="don't print version and copyright messages")
|
||||
args = parser.parse_args()
|
||||
if args.q or sys.flags.quiet:
|
||||
banner = ""
|
||||
else:
|
||||
banner = None
|
||||
interact(banner)
|
||||
+41
@@ -0,0 +1,41 @@
|
||||
# Defines which version of the PyDBAdditionalThreadInfo we'll use.
|
||||
from _pydevd_bundle.pydevd_constants import ENV_FALSE_LOWER_VALUES, USE_CYTHON_FLAG, ENV_TRUE_LOWER_VALUES
|
||||
|
||||
if USE_CYTHON_FLAG in ENV_TRUE_LOWER_VALUES:
|
||||
# We must import the cython version if forcing cython
|
||||
from _pydevd_bundle.pydevd_cython_wrapper import (
|
||||
PyDBAdditionalThreadInfo,
|
||||
set_additional_thread_info,
|
||||
_set_additional_thread_info_lock, # @UnusedImport
|
||||
any_thread_stepping,
|
||||
remove_additional_info,
|
||||
) # @UnusedImport
|
||||
|
||||
elif USE_CYTHON_FLAG in ENV_FALSE_LOWER_VALUES:
|
||||
# Use the regular version if not forcing cython
|
||||
from _pydevd_bundle.pydevd_additional_thread_info_regular import (
|
||||
PyDBAdditionalThreadInfo,
|
||||
set_additional_thread_info,
|
||||
_set_additional_thread_info_lock, # @UnusedImport @Reimport
|
||||
any_thread_stepping,
|
||||
remove_additional_info,
|
||||
) # @UnusedImport @Reimport
|
||||
|
||||
else:
|
||||
# Regular: use fallback if not found (message is already given elsewhere).
|
||||
try:
|
||||
from _pydevd_bundle.pydevd_cython_wrapper import (
|
||||
PyDBAdditionalThreadInfo,
|
||||
set_additional_thread_info,
|
||||
_set_additional_thread_info_lock,
|
||||
any_thread_stepping,
|
||||
remove_additional_info,
|
||||
)
|
||||
except ImportError:
|
||||
from _pydevd_bundle.pydevd_additional_thread_info_regular import (
|
||||
PyDBAdditionalThreadInfo,
|
||||
set_additional_thread_info,
|
||||
_set_additional_thread_info_lock, # @UnusedImport
|
||||
any_thread_stepping,
|
||||
remove_additional_info,
|
||||
) # @UnusedImport
|
||||
+328
@@ -0,0 +1,328 @@
|
||||
from _pydevd_bundle.pydevd_constants import (
|
||||
STATE_RUN,
|
||||
PYTHON_SUSPEND,
|
||||
SUPPORT_GEVENT,
|
||||
ForkSafeLock,
|
||||
_current_frames,
|
||||
STATE_SUSPEND,
|
||||
get_global_debugger,
|
||||
get_thread_id,
|
||||
)
|
||||
from _pydev_bundle import pydev_log
|
||||
from _pydev_bundle._pydev_saved_modules import threading
|
||||
from _pydev_bundle.pydev_is_thread_alive import is_thread_alive
|
||||
import weakref
|
||||
|
||||
version = 11
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# PyDBAdditionalThreadInfo
|
||||
# =======================================================================================================================
|
||||
# fmt: off
|
||||
# IFDEF CYTHON
|
||||
# cdef class PyDBAdditionalThreadInfo:
|
||||
# ELSE
|
||||
class PyDBAdditionalThreadInfo(object):
|
||||
# ENDIF
|
||||
# fmt: on
|
||||
|
||||
# Note: the params in cython are declared in pydevd_cython.pxd.
|
||||
# fmt: off
|
||||
# IFDEF CYTHON
|
||||
# ELSE
|
||||
__slots__ = [
|
||||
"pydev_state",
|
||||
"pydev_step_stop",
|
||||
"pydev_original_step_cmd",
|
||||
"pydev_step_cmd",
|
||||
"pydev_notify_kill",
|
||||
"pydev_django_resolve_frame",
|
||||
"pydev_call_from_jinja2",
|
||||
"pydev_call_inside_jinja2",
|
||||
"is_tracing",
|
||||
"conditional_breakpoint_exception",
|
||||
"pydev_message",
|
||||
"suspend_type",
|
||||
"pydev_next_line",
|
||||
"pydev_func_name",
|
||||
"suspended_at_unhandled",
|
||||
"trace_suspend_type",
|
||||
"top_level_thread_tracer_no_back_frames",
|
||||
"top_level_thread_tracer_unhandled",
|
||||
"thread_tracer",
|
||||
"step_in_initial_location",
|
||||
# Used for CMD_SMART_STEP_INTO (to know which smart step into variant to use)
|
||||
"pydev_smart_parent_offset",
|
||||
"pydev_smart_child_offset",
|
||||
# Used for CMD_SMART_STEP_INTO (list[_pydevd_bundle.pydevd_bytecode_utils.Variant])
|
||||
# Filled when the cmd_get_smart_step_into_variants is requested (so, this is a copy
|
||||
# of the last request for a given thread and pydev_smart_parent_offset/pydev_smart_child_offset relies on it).
|
||||
"pydev_smart_step_into_variants",
|
||||
"target_id_to_smart_step_into_variant",
|
||||
"pydev_use_scoped_step_frame",
|
||||
"weak_thread",
|
||||
"is_in_wait_loop",
|
||||
]
|
||||
# ENDIF
|
||||
# fmt: on
|
||||
|
||||
def __init__(self):
|
||||
self.pydev_state = STATE_RUN # STATE_RUN or STATE_SUSPEND
|
||||
self.pydev_step_stop = None
|
||||
|
||||
# Note: we have `pydev_original_step_cmd` and `pydev_step_cmd` because the original is to
|
||||
# say the action that started it and the other is to say what's the current tracing behavior
|
||||
# (because it's possible that we start with a step over but may have to switch to a
|
||||
# different step strategy -- for instance, if a step over is done and we return the current
|
||||
# method the strategy is changed to a step in).
|
||||
|
||||
self.pydev_original_step_cmd = -1 # Something as CMD_STEP_INTO, CMD_STEP_OVER, etc.
|
||||
self.pydev_step_cmd = -1 # Something as CMD_STEP_INTO, CMD_STEP_OVER, etc.
|
||||
|
||||
self.pydev_notify_kill = False
|
||||
self.pydev_django_resolve_frame = False
|
||||
self.pydev_call_from_jinja2 = None
|
||||
self.pydev_call_inside_jinja2 = None
|
||||
self.is_tracing = 0
|
||||
self.conditional_breakpoint_exception = None
|
||||
self.pydev_message = ""
|
||||
self.suspend_type = PYTHON_SUSPEND
|
||||
self.pydev_next_line = -1
|
||||
self.pydev_func_name = ".invalid." # Must match the type in cython
|
||||
self.suspended_at_unhandled = False
|
||||
self.trace_suspend_type = "trace" # 'trace' or 'frame_eval'
|
||||
self.top_level_thread_tracer_no_back_frames = []
|
||||
self.top_level_thread_tracer_unhandled = None
|
||||
self.thread_tracer = None
|
||||
self.step_in_initial_location = None
|
||||
self.pydev_smart_parent_offset = -1
|
||||
self.pydev_smart_child_offset = -1
|
||||
self.pydev_smart_step_into_variants = ()
|
||||
self.target_id_to_smart_step_into_variant = {}
|
||||
|
||||
# Flag to indicate ipython use-case where each line will be executed as a call/line/return
|
||||
# in a new new frame but in practice we want to consider each new frame as if it was all
|
||||
# part of the same frame.
|
||||
#
|
||||
# In practice this means that a step over shouldn't revert to a step in and we need some
|
||||
# special logic to know when we should stop in a step over as we need to consider 2
|
||||
# different frames as being equal if they're logically the continuation of a frame
|
||||
# being executed by ipython line by line.
|
||||
#
|
||||
# See: https://github.com/microsoft/debugpy/issues/869#issuecomment-1132141003
|
||||
self.pydev_use_scoped_step_frame = False
|
||||
self.weak_thread = None
|
||||
|
||||
# Purpose: detect if this thread is suspended and actually in the wait loop
|
||||
# at this time (otherwise it may be suspended but still didn't reach a point.
|
||||
# to pause).
|
||||
self.is_in_wait_loop = False
|
||||
|
||||
# fmt: off
|
||||
# IFDEF CYTHON
|
||||
# cpdef object _get_related_thread(self):
|
||||
# ELSE
|
||||
def _get_related_thread(self):
|
||||
# ENDIF
|
||||
# fmt: on
|
||||
if self.pydev_notify_kill: # Already killed
|
||||
return None
|
||||
|
||||
if self.weak_thread is None:
|
||||
return None
|
||||
|
||||
thread = self.weak_thread()
|
||||
if thread is None:
|
||||
return False
|
||||
|
||||
if not is_thread_alive(thread):
|
||||
return None
|
||||
|
||||
if thread._ident is None: # Can this happen?
|
||||
pydev_log.critical("thread._ident is None in _get_related_thread!")
|
||||
return None
|
||||
|
||||
if threading._active.get(thread._ident) is not thread:
|
||||
return None
|
||||
|
||||
return thread
|
||||
|
||||
# fmt: off
|
||||
# IFDEF CYTHON
|
||||
# cpdef bint _is_stepping(self):
|
||||
# ELSE
|
||||
def _is_stepping(self):
|
||||
# ENDIF
|
||||
# fmt: on
|
||||
if self.pydev_state == STATE_RUN and self.pydev_step_cmd != -1:
|
||||
# This means actually stepping in a step operation.
|
||||
return True
|
||||
|
||||
if self.pydev_state == STATE_SUSPEND and self.is_in_wait_loop:
|
||||
# This means stepping because it was suspended but still didn't
|
||||
# reach a suspension point.
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
# fmt: off
|
||||
# IFDEF CYTHON
|
||||
# cpdef get_topmost_frame(self, thread):
|
||||
# ELSE
|
||||
def get_topmost_frame(self, thread):
|
||||
# ENDIF
|
||||
# fmt: on
|
||||
"""
|
||||
Gets the topmost frame for the given thread. Note that it may be None
|
||||
and callers should remove the reference to the frame as soon as possible
|
||||
to avoid disturbing user code.
|
||||
"""
|
||||
# sys._current_frames(): dictionary with thread id -> topmost frame
|
||||
current_frames = _current_frames()
|
||||
topmost_frame = current_frames.get(thread._ident)
|
||||
if topmost_frame is None:
|
||||
# Note: this is expected for dummy threads (so, getting the topmost frame should be
|
||||
# treated as optional).
|
||||
pydev_log.info(
|
||||
"Unable to get topmost frame for thread: %s, thread.ident: %s, id(thread): %s\nCurrent frames: %s.\n" "GEVENT_SUPPORT: %s",
|
||||
thread,
|
||||
thread.ident,
|
||||
id(thread),
|
||||
current_frames,
|
||||
SUPPORT_GEVENT,
|
||||
)
|
||||
|
||||
return topmost_frame
|
||||
|
||||
# fmt: off
|
||||
# IFDEF CYTHON
|
||||
# cpdef update_stepping_info(self):
|
||||
# ELSE
|
||||
def update_stepping_info(self):
|
||||
# ENDIF
|
||||
# fmt: on
|
||||
_update_stepping_info(self)
|
||||
|
||||
def __str__(self):
|
||||
return "State:%s Stop:%s Cmd: %s Kill:%s" % (self.pydev_state, self.pydev_step_stop, self.pydev_step_cmd, self.pydev_notify_kill)
|
||||
|
||||
|
||||
_set_additional_thread_info_lock = ForkSafeLock()
|
||||
_next_additional_info = [PyDBAdditionalThreadInfo()]
|
||||
|
||||
|
||||
# fmt: off
|
||||
# IFDEF CYTHON
|
||||
# cpdef set_additional_thread_info(thread):
|
||||
# ELSE
|
||||
def set_additional_thread_info(thread):
|
||||
# ENDIF
|
||||
# fmt: on
|
||||
try:
|
||||
additional_info = thread.additional_info
|
||||
if additional_info is None:
|
||||
raise AttributeError()
|
||||
except:
|
||||
with _set_additional_thread_info_lock:
|
||||
# If it's not there, set it within a lock to avoid any racing
|
||||
# conditions.
|
||||
try:
|
||||
additional_info = thread.additional_info
|
||||
except:
|
||||
additional_info = None
|
||||
|
||||
if additional_info is None:
|
||||
# Note: don't call PyDBAdditionalThreadInfo constructor at this
|
||||
# point as it can piggy-back into the debugger which could
|
||||
# get here again, rather get the global ref which was pre-created
|
||||
# and add a new entry only after we set thread.additional_info.
|
||||
additional_info = _next_additional_info[0]
|
||||
thread.additional_info = additional_info
|
||||
additional_info.weak_thread = weakref.ref(thread)
|
||||
add_additional_info(additional_info)
|
||||
del _next_additional_info[:]
|
||||
_next_additional_info.append(PyDBAdditionalThreadInfo())
|
||||
|
||||
return additional_info
|
||||
|
||||
|
||||
# fmt: off
|
||||
# IFDEF CYTHON
|
||||
# cdef set _all_infos
|
||||
# cdef set _infos_stepping
|
||||
# cdef object _update_infos_lock
|
||||
# ELSE
|
||||
# ENDIF
|
||||
# fmt: on
|
||||
|
||||
_all_infos = set()
|
||||
_infos_stepping = set()
|
||||
_update_infos_lock = ForkSafeLock()
|
||||
|
||||
|
||||
# fmt: off
|
||||
# IFDEF CYTHON
|
||||
# cdef _update_stepping_info(PyDBAdditionalThreadInfo info):
|
||||
# ELSE
|
||||
def _update_stepping_info(info):
|
||||
# ENDIF
|
||||
# fmt: on
|
||||
|
||||
global _infos_stepping
|
||||
global _all_infos
|
||||
|
||||
with _update_infos_lock:
|
||||
# Removes entries that are no longer valid.
|
||||
new_all_infos = set()
|
||||
for info in _all_infos:
|
||||
if info._get_related_thread() is not None:
|
||||
new_all_infos.add(info)
|
||||
_all_infos = new_all_infos
|
||||
|
||||
new_stepping = set()
|
||||
for info in _all_infos:
|
||||
if info._is_stepping():
|
||||
new_stepping.add(info)
|
||||
_infos_stepping = new_stepping
|
||||
|
||||
py_db = get_global_debugger()
|
||||
if py_db is not None and not py_db.pydb_disposed:
|
||||
thread = info.weak_thread()
|
||||
if thread is not None:
|
||||
thread_id = get_thread_id(thread)
|
||||
_queue, event = py_db.get_internal_queue_and_event(thread_id)
|
||||
event.set()
|
||||
|
||||
# fmt: off
|
||||
# IFDEF CYTHON
|
||||
# cpdef add_additional_info(PyDBAdditionalThreadInfo info):
|
||||
# ELSE
|
||||
def add_additional_info(info):
|
||||
# ENDIF
|
||||
# fmt: on
|
||||
with _update_infos_lock:
|
||||
_all_infos.add(info)
|
||||
if info._is_stepping():
|
||||
_infos_stepping.add(info)
|
||||
|
||||
# fmt: off
|
||||
# IFDEF CYTHON
|
||||
# cpdef remove_additional_info(PyDBAdditionalThreadInfo info):
|
||||
# ELSE
|
||||
def remove_additional_info(info):
|
||||
# ENDIF
|
||||
# fmt: on
|
||||
with _update_infos_lock:
|
||||
_all_infos.discard(info)
|
||||
_infos_stepping.discard(info)
|
||||
|
||||
|
||||
# fmt: off
|
||||
# IFDEF CYTHON
|
||||
# cpdef bint any_thread_stepping():
|
||||
# ELSE
|
||||
def any_thread_stepping():
|
||||
# ENDIF
|
||||
# fmt: on
|
||||
return bool(_infos_stepping)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,181 @@
|
||||
from _pydev_bundle import pydev_log
|
||||
from _pydevd_bundle import pydevd_import_class
|
||||
from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame
|
||||
from _pydev_bundle._pydev_saved_modules import threading
|
||||
|
||||
|
||||
class ExceptionBreakpoint(object):
|
||||
def __init__(
|
||||
self,
|
||||
qname,
|
||||
condition,
|
||||
expression,
|
||||
notify_on_handled_exceptions,
|
||||
notify_on_unhandled_exceptions,
|
||||
notify_on_user_unhandled_exceptions,
|
||||
notify_on_first_raise_only,
|
||||
ignore_libraries,
|
||||
):
|
||||
exctype = get_exception_class(qname)
|
||||
self.qname = qname
|
||||
if exctype is not None:
|
||||
self.name = exctype.__name__
|
||||
else:
|
||||
self.name = None
|
||||
|
||||
self.condition = condition
|
||||
self.expression = expression
|
||||
self.notify_on_unhandled_exceptions = notify_on_unhandled_exceptions
|
||||
self.notify_on_handled_exceptions = notify_on_handled_exceptions
|
||||
self.notify_on_first_raise_only = notify_on_first_raise_only
|
||||
self.notify_on_user_unhandled_exceptions = notify_on_user_unhandled_exceptions
|
||||
self.ignore_libraries = ignore_libraries
|
||||
|
||||
self.type = exctype
|
||||
|
||||
def __str__(self):
|
||||
return self.qname
|
||||
|
||||
@property
|
||||
def has_condition(self):
|
||||
return self.condition is not None
|
||||
|
||||
def handle_hit_condition(self, frame):
|
||||
return False
|
||||
|
||||
|
||||
class LineBreakpoint(object):
|
||||
def __init__(self, breakpoint_id, line, condition, func_name, expression, suspend_policy="NONE", hit_condition=None, is_logpoint=False):
|
||||
self.breakpoint_id = breakpoint_id
|
||||
self.line = line
|
||||
self.condition = condition
|
||||
self.func_name = func_name
|
||||
self.expression = expression
|
||||
self.suspend_policy = suspend_policy
|
||||
self.hit_condition = hit_condition
|
||||
self._hit_count = 0
|
||||
self._hit_condition_lock = threading.Lock()
|
||||
self.is_logpoint = is_logpoint
|
||||
|
||||
@property
|
||||
def has_condition(self):
|
||||
return bool(self.condition) or bool(self.hit_condition)
|
||||
|
||||
def handle_hit_condition(self, frame):
|
||||
if not self.hit_condition:
|
||||
return False
|
||||
ret = False
|
||||
with self._hit_condition_lock:
|
||||
self._hit_count += 1
|
||||
expr = self.hit_condition.replace("@HIT@", str(self._hit_count))
|
||||
try:
|
||||
ret = bool(eval(expr, frame.f_globals, frame.f_locals))
|
||||
except Exception:
|
||||
ret = False
|
||||
return ret
|
||||
|
||||
|
||||
class FunctionBreakpoint(object):
|
||||
def __init__(self, func_name, condition, expression, suspend_policy="NONE", hit_condition=None, is_logpoint=False):
|
||||
self.condition = condition
|
||||
self.func_name = func_name
|
||||
self.expression = expression
|
||||
self.suspend_policy = suspend_policy
|
||||
self.hit_condition = hit_condition
|
||||
self._hit_count = 0
|
||||
self._hit_condition_lock = threading.Lock()
|
||||
self.is_logpoint = is_logpoint
|
||||
|
||||
@property
|
||||
def has_condition(self):
|
||||
return bool(self.condition) or bool(self.hit_condition)
|
||||
|
||||
def handle_hit_condition(self, frame):
|
||||
if not self.hit_condition:
|
||||
return False
|
||||
ret = False
|
||||
with self._hit_condition_lock:
|
||||
self._hit_count += 1
|
||||
expr = self.hit_condition.replace("@HIT@", str(self._hit_count))
|
||||
try:
|
||||
ret = bool(eval(expr, frame.f_globals, frame.f_locals))
|
||||
except Exception:
|
||||
ret = False
|
||||
return ret
|
||||
|
||||
|
||||
def get_exception_breakpoint(exctype, exceptions):
|
||||
if not exctype:
|
||||
exception_full_qname = None
|
||||
else:
|
||||
exception_full_qname = str(exctype.__module__) + "." + exctype.__name__
|
||||
|
||||
exc = None
|
||||
if exceptions is not None:
|
||||
try:
|
||||
return exceptions[exception_full_qname]
|
||||
except KeyError:
|
||||
for exception_breakpoint in exceptions.values():
|
||||
if exception_breakpoint.type is not None and issubclass(exctype, exception_breakpoint.type):
|
||||
if exc is None or issubclass(exception_breakpoint.type, exc.type):
|
||||
exc = exception_breakpoint
|
||||
return exc
|
||||
|
||||
|
||||
def stop_on_unhandled_exception(py_db, thread, additional_info, arg):
|
||||
exctype, value, tb = arg
|
||||
break_on_uncaught_exceptions = py_db.break_on_uncaught_exceptions
|
||||
if break_on_uncaught_exceptions:
|
||||
exception_breakpoint = py_db.get_exception_breakpoint(exctype, break_on_uncaught_exceptions)
|
||||
else:
|
||||
exception_breakpoint = None
|
||||
|
||||
if not exception_breakpoint:
|
||||
return
|
||||
|
||||
if tb is None: # sometimes it can be None, e.g. with GTK
|
||||
return
|
||||
|
||||
if exctype is KeyboardInterrupt:
|
||||
return
|
||||
|
||||
if exctype is SystemExit and py_db.ignore_system_exit_code(value):
|
||||
return
|
||||
|
||||
frames = []
|
||||
user_frame = None
|
||||
|
||||
while tb is not None:
|
||||
if not py_db.exclude_exception_by_filter(exception_breakpoint, tb):
|
||||
user_frame = tb.tb_frame
|
||||
frames.append(tb.tb_frame)
|
||||
tb = tb.tb_next
|
||||
|
||||
if user_frame is None:
|
||||
return
|
||||
|
||||
frames_byid = dict([(id(frame), frame) for frame in frames])
|
||||
add_exception_to_frame(user_frame, arg)
|
||||
if exception_breakpoint.condition is not None:
|
||||
eval_result = py_db.handle_breakpoint_condition(additional_info, exception_breakpoint, user_frame)
|
||||
if not eval_result:
|
||||
return
|
||||
|
||||
if exception_breakpoint.expression is not None:
|
||||
py_db.handle_breakpoint_expression(exception_breakpoint, additional_info, user_frame)
|
||||
|
||||
try:
|
||||
additional_info.pydev_message = exception_breakpoint.qname
|
||||
except:
|
||||
additional_info.pydev_message = exception_breakpoint.qname.encode("utf-8")
|
||||
|
||||
pydev_log.debug("Handling post-mortem stop on exception breakpoint %s" % (exception_breakpoint.qname,))
|
||||
|
||||
py_db.do_stop_on_unhandled_exception(thread, user_frame, frames_byid, arg)
|
||||
|
||||
|
||||
def get_exception_class(kls):
|
||||
try:
|
||||
return eval(kls)
|
||||
except:
|
||||
return pydevd_import_class.import_name(kls)
|
||||
@@ -0,0 +1,938 @@
|
||||
"""
|
||||
Bytecode analysing utils. Originally added for using in smart step into.
|
||||
|
||||
Note: not importable from Python 2.
|
||||
"""
|
||||
|
||||
from _pydev_bundle import pydev_log
|
||||
from types import CodeType
|
||||
from _pydevd_frame_eval.vendored.bytecode.instr import _Variable, Label
|
||||
from _pydevd_frame_eval.vendored import bytecode
|
||||
from _pydevd_frame_eval.vendored.bytecode import cfg as bytecode_cfg
|
||||
import dis
|
||||
import opcode as _opcode
|
||||
|
||||
from _pydevd_bundle.pydevd_constants import KeyifyList, DebugInfoHolder, IS_PY311_OR_GREATER
|
||||
from bisect import bisect
|
||||
from collections import deque
|
||||
import traceback
|
||||
|
||||
# When True, throws errors on unknown bytecodes, when False, ignore those as if they didn't change the stack.
|
||||
STRICT_MODE = False
|
||||
|
||||
GO_INTO_INNER_CODES = True
|
||||
|
||||
DEBUG = False
|
||||
|
||||
_BINARY_OPS = set([opname for opname in dis.opname if opname.startswith("BINARY_")])
|
||||
|
||||
_BINARY_OP_MAP = {
|
||||
"BINARY_POWER": "__pow__",
|
||||
"BINARY_MULTIPLY": "__mul__",
|
||||
"BINARY_MATRIX_MULTIPLY": "__matmul__",
|
||||
"BINARY_FLOOR_DIVIDE": "__floordiv__",
|
||||
"BINARY_TRUE_DIVIDE": "__div__",
|
||||
"BINARY_MODULO": "__mod__",
|
||||
"BINARY_ADD": "__add__",
|
||||
"BINARY_SUBTRACT": "__sub__",
|
||||
"BINARY_LSHIFT": "__lshift__",
|
||||
"BINARY_RSHIFT": "__rshift__",
|
||||
"BINARY_AND": "__and__",
|
||||
"BINARY_OR": "__or__",
|
||||
"BINARY_XOR": "__xor__",
|
||||
"BINARY_SUBSCR": "__getitem__",
|
||||
"BINARY_DIVIDE": "__div__",
|
||||
}
|
||||
|
||||
_COMP_OP_MAP = {
|
||||
"<": "__lt__",
|
||||
"<=": "__le__",
|
||||
"==": "__eq__",
|
||||
"!=": "__ne__",
|
||||
">": "__gt__",
|
||||
">=": "__ge__",
|
||||
"in": "__contains__",
|
||||
"not in": "__contains__",
|
||||
}
|
||||
|
||||
|
||||
class Target(object):
|
||||
__slots__ = ["arg", "lineno", "endlineno", "startcol", "endcol", "offset", "children_targets"]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
arg,
|
||||
lineno,
|
||||
offset,
|
||||
children_targets=(),
|
||||
# These are optional (only Python 3.11 onwards).
|
||||
endlineno=-1,
|
||||
startcol=-1,
|
||||
endcol=-1,
|
||||
):
|
||||
self.arg = arg
|
||||
self.lineno = lineno
|
||||
self.endlineno = endlineno
|
||||
self.startcol = startcol
|
||||
self.endcol = endcol
|
||||
|
||||
self.offset = offset
|
||||
self.children_targets = children_targets
|
||||
|
||||
def __repr__(self):
|
||||
ret = []
|
||||
for s in self.__slots__:
|
||||
ret.append("%s: %s" % (s, getattr(self, s)))
|
||||
return "Target(%s)" % ", ".join(ret)
|
||||
|
||||
__str__ = __repr__
|
||||
|
||||
|
||||
class _TargetIdHashable(object):
|
||||
def __init__(self, target):
|
||||
self.target = target
|
||||
|
||||
def __eq__(self, other):
|
||||
if not hasattr(other, "target"):
|
||||
return
|
||||
return other.target is self.target
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
return id(self.target)
|
||||
|
||||
|
||||
class _StackInterpreter(object):
|
||||
"""
|
||||
Good reference: https://github.com/python/cpython/blob/fcb55c0037baab6f98f91ee38ce84b6f874f034a/Python/ceval.c
|
||||
"""
|
||||
|
||||
def __init__(self, bytecode):
|
||||
self.bytecode = bytecode
|
||||
self._stack = deque()
|
||||
self.function_calls = []
|
||||
self.load_attrs = {}
|
||||
self.func = set()
|
||||
self.func_name_id_to_code_object = {}
|
||||
|
||||
def __str__(self):
|
||||
return "Stack:\nFunction calls:\n%s\nLoad attrs:\n%s\n" % (self.function_calls, list(self.load_attrs.values()))
|
||||
|
||||
def _getname(self, instr):
|
||||
if instr.opcode in _opcode.hascompare:
|
||||
cmp_op = dis.cmp_op[instr.arg]
|
||||
if cmp_op not in ("exception match", "BAD"):
|
||||
return _COMP_OP_MAP.get(cmp_op, cmp_op)
|
||||
return instr.arg
|
||||
|
||||
def _getcallname(self, instr):
|
||||
if instr.name == "BINARY_SUBSCR":
|
||||
return "__getitem__().__call__"
|
||||
if instr.name == "CALL_FUNCTION":
|
||||
# Note: previously a '__call__().__call__' was returned, but this was a bit weird
|
||||
# and on Python 3.9 this construct could appear for some internal things where
|
||||
# it wouldn't be expected.
|
||||
# Note: it'd be what we had in func()().
|
||||
return None
|
||||
if instr.name == "MAKE_FUNCTION":
|
||||
return "__func__().__call__"
|
||||
if instr.name == "LOAD_ASSERTION_ERROR":
|
||||
return "AssertionError"
|
||||
name = self._getname(instr)
|
||||
if isinstance(name, CodeType):
|
||||
name = name.co_qualname # Note: only available for Python 3.11
|
||||
if isinstance(name, _Variable):
|
||||
name = name.name
|
||||
if isinstance(name, tuple):
|
||||
# Load attr in Python 3.12 comes with (bool, name)
|
||||
if len(name) == 2 and isinstance(name[0], bool) and isinstance(name[1], str):
|
||||
name = name[1]
|
||||
|
||||
if not isinstance(name, str):
|
||||
return None
|
||||
if name.endswith(">"): # xxx.<listcomp>, xxx.<lambda>, ...
|
||||
return name.split(".")[-1]
|
||||
return name
|
||||
|
||||
def _no_stack_change(self, instr):
|
||||
pass # Can be aliased when the instruction does nothing.
|
||||
|
||||
def on_LOAD_GLOBAL(self, instr):
|
||||
self._stack.append(instr)
|
||||
|
||||
def on_POP_TOP(self, instr):
|
||||
try:
|
||||
self._stack.pop()
|
||||
except IndexError:
|
||||
pass # Ok (in the end of blocks)
|
||||
|
||||
def on_LOAD_ATTR(self, instr):
|
||||
self.on_POP_TOP(instr) # replaces the current top
|
||||
self._stack.append(instr)
|
||||
self.load_attrs[_TargetIdHashable(instr)] = Target(self._getname(instr), instr.lineno, instr.offset)
|
||||
|
||||
on_LOOKUP_METHOD = on_LOAD_ATTR # Improvement in PyPy
|
||||
|
||||
def on_LOAD_CONST(self, instr):
|
||||
self._stack.append(instr)
|
||||
|
||||
on_LOAD_DEREF = on_LOAD_CONST
|
||||
on_LOAD_NAME = on_LOAD_CONST
|
||||
on_LOAD_CLOSURE = on_LOAD_CONST
|
||||
on_LOAD_CLASSDEREF = on_LOAD_CONST
|
||||
|
||||
# Although it actually changes the stack, it's inconsequential for us as a function call can't
|
||||
# really be found there.
|
||||
on_IMPORT_NAME = _no_stack_change
|
||||
on_IMPORT_FROM = _no_stack_change
|
||||
on_IMPORT_STAR = _no_stack_change
|
||||
on_SETUP_ANNOTATIONS = _no_stack_change
|
||||
|
||||
def on_STORE_FAST(self, instr):
|
||||
try:
|
||||
self._stack.pop()
|
||||
except IndexError:
|
||||
pass # Ok, we may have a block just with the store
|
||||
|
||||
# Note: it stores in the locals and doesn't put anything in the stack.
|
||||
|
||||
on_STORE_GLOBAL = on_STORE_FAST
|
||||
on_STORE_DEREF = on_STORE_FAST
|
||||
on_STORE_ATTR = on_STORE_FAST
|
||||
on_STORE_NAME = on_STORE_FAST
|
||||
|
||||
on_DELETE_NAME = on_POP_TOP
|
||||
on_DELETE_ATTR = on_POP_TOP
|
||||
on_DELETE_GLOBAL = on_POP_TOP
|
||||
on_DELETE_FAST = on_POP_TOP
|
||||
on_DELETE_DEREF = on_POP_TOP
|
||||
|
||||
on_DICT_UPDATE = on_POP_TOP
|
||||
on_SET_UPDATE = on_POP_TOP
|
||||
|
||||
on_GEN_START = on_POP_TOP
|
||||
|
||||
def on_NOP(self, instr):
|
||||
pass
|
||||
|
||||
def _handle_call_from_instr(self, func_name_instr, func_call_instr):
|
||||
self.load_attrs.pop(_TargetIdHashable(func_name_instr), None)
|
||||
call_name = self._getcallname(func_name_instr)
|
||||
target = None
|
||||
if not call_name:
|
||||
pass # Ignore if we can't identify a name
|
||||
elif call_name in ("<listcomp>", "<genexpr>", "<setcomp>", "<dictcomp>"):
|
||||
code_obj = self.func_name_id_to_code_object[_TargetIdHashable(func_name_instr)]
|
||||
if code_obj is not None and GO_INTO_INNER_CODES:
|
||||
children_targets = _get_smart_step_into_targets(code_obj)
|
||||
if children_targets:
|
||||
# i.e.: we have targets inside of a <listcomp> or <genexpr>.
|
||||
# Note that to actually match this in the debugger we need to do matches on 2 frames,
|
||||
# the one with the <listcomp> and then the actual target inside the <listcomp>.
|
||||
target = Target(call_name, func_name_instr.lineno, func_call_instr.offset, children_targets)
|
||||
self.function_calls.append(target)
|
||||
|
||||
else:
|
||||
# Ok, regular call
|
||||
target = Target(call_name, func_name_instr.lineno, func_call_instr.offset)
|
||||
self.function_calls.append(target)
|
||||
|
||||
if DEBUG and target is not None:
|
||||
print("Created target", target)
|
||||
self._stack.append(func_call_instr) # Keep the func call as the result
|
||||
|
||||
def on_COMPARE_OP(self, instr):
|
||||
try:
|
||||
_right = self._stack.pop()
|
||||
except IndexError:
|
||||
return
|
||||
try:
|
||||
_left = self._stack.pop()
|
||||
except IndexError:
|
||||
return
|
||||
|
||||
cmp_op = dis.cmp_op[instr.arg]
|
||||
if cmp_op not in ("exception match", "BAD"):
|
||||
self.function_calls.append(Target(self._getname(instr), instr.lineno, instr.offset))
|
||||
|
||||
self._stack.append(instr)
|
||||
|
||||
def on_IS_OP(self, instr):
|
||||
try:
|
||||
self._stack.pop()
|
||||
except IndexError:
|
||||
return
|
||||
try:
|
||||
self._stack.pop()
|
||||
except IndexError:
|
||||
return
|
||||
|
||||
def on_BINARY_SUBSCR(self, instr):
|
||||
try:
|
||||
_sub = self._stack.pop()
|
||||
except IndexError:
|
||||
return
|
||||
try:
|
||||
_container = self._stack.pop()
|
||||
except IndexError:
|
||||
return
|
||||
self.function_calls.append(Target(_BINARY_OP_MAP[instr.name], instr.lineno, instr.offset))
|
||||
self._stack.append(instr)
|
||||
|
||||
on_BINARY_MATRIX_MULTIPLY = on_BINARY_SUBSCR
|
||||
on_BINARY_POWER = on_BINARY_SUBSCR
|
||||
on_BINARY_MULTIPLY = on_BINARY_SUBSCR
|
||||
on_BINARY_FLOOR_DIVIDE = on_BINARY_SUBSCR
|
||||
on_BINARY_TRUE_DIVIDE = on_BINARY_SUBSCR
|
||||
on_BINARY_MODULO = on_BINARY_SUBSCR
|
||||
on_BINARY_ADD = on_BINARY_SUBSCR
|
||||
on_BINARY_SUBTRACT = on_BINARY_SUBSCR
|
||||
on_BINARY_LSHIFT = on_BINARY_SUBSCR
|
||||
on_BINARY_RSHIFT = on_BINARY_SUBSCR
|
||||
on_BINARY_AND = on_BINARY_SUBSCR
|
||||
on_BINARY_OR = on_BINARY_SUBSCR
|
||||
on_BINARY_XOR = on_BINARY_SUBSCR
|
||||
|
||||
def on_LOAD_METHOD(self, instr):
|
||||
self.on_POP_TOP(instr) # Remove the previous as we're loading something from it.
|
||||
self._stack.append(instr)
|
||||
|
||||
def on_MAKE_FUNCTION(self, instr):
|
||||
if not IS_PY311_OR_GREATER:
|
||||
# The qualifier name is no longer put in the stack.
|
||||
qualname = self._stack.pop()
|
||||
code_obj_instr = self._stack.pop()
|
||||
else:
|
||||
# In 3.11 the code object has a co_qualname which we can use.
|
||||
qualname = code_obj_instr = self._stack.pop()
|
||||
|
||||
arg = instr.arg
|
||||
if arg & 0x08:
|
||||
_func_closure = self._stack.pop()
|
||||
if arg & 0x04:
|
||||
_func_annotations = self._stack.pop()
|
||||
if arg & 0x02:
|
||||
_func_kwdefaults = self._stack.pop()
|
||||
if arg & 0x01:
|
||||
_func_defaults = self._stack.pop()
|
||||
|
||||
call_name = self._getcallname(qualname)
|
||||
if call_name in ("<genexpr>", "<listcomp>", "<setcomp>", "<dictcomp>"):
|
||||
if isinstance(code_obj_instr.arg, CodeType):
|
||||
self.func_name_id_to_code_object[_TargetIdHashable(qualname)] = code_obj_instr.arg
|
||||
self._stack.append(qualname)
|
||||
|
||||
def on_LOAD_FAST(self, instr):
|
||||
self._stack.append(instr)
|
||||
|
||||
on_LOAD_FAST_AND_CLEAR = on_LOAD_FAST
|
||||
on_LOAD_FAST_CHECK = on_LOAD_FAST
|
||||
|
||||
def on_LOAD_ASSERTION_ERROR(self, instr):
|
||||
self._stack.append(instr)
|
||||
|
||||
on_LOAD_BUILD_CLASS = on_LOAD_FAST
|
||||
|
||||
def on_CALL_METHOD(self, instr):
|
||||
# pop the actual args
|
||||
for _ in range(instr.arg):
|
||||
self._stack.pop()
|
||||
|
||||
func_name_instr = self._stack.pop()
|
||||
self._handle_call_from_instr(func_name_instr, instr)
|
||||
|
||||
def on_CALL(self, instr):
|
||||
# pop the actual args
|
||||
for _ in range(instr.arg):
|
||||
self._stack.pop()
|
||||
|
||||
func_name_instr = self._stack.pop()
|
||||
if self._getcallname(func_name_instr) is None:
|
||||
func_name_instr = self._stack.pop()
|
||||
|
||||
if self._stack:
|
||||
peeked = self._stack[-1]
|
||||
if peeked.name == "PUSH_NULL":
|
||||
self._stack.pop()
|
||||
|
||||
self._handle_call_from_instr(func_name_instr, instr)
|
||||
|
||||
def on_CALL_INTRINSIC_1(self, instr):
|
||||
try:
|
||||
func_name_instr = self._stack.pop()
|
||||
except IndexError:
|
||||
return
|
||||
|
||||
if self._stack:
|
||||
peeked = self._stack[-1]
|
||||
if peeked.name == "PUSH_NULL":
|
||||
self._stack.pop()
|
||||
|
||||
self._handle_call_from_instr(func_name_instr, instr)
|
||||
|
||||
def on_PUSH_NULL(self, instr):
|
||||
self._stack.append(instr)
|
||||
|
||||
def on_KW_NAMES(self, instr):
|
||||
return
|
||||
|
||||
def on_RETURN_CONST(self, instr):
|
||||
return
|
||||
|
||||
def on_CALL_FUNCTION(self, instr):
|
||||
arg = instr.arg
|
||||
|
||||
argc = arg & 0xFF # positional args
|
||||
argc += (arg >> 8) * 2 # keyword args
|
||||
|
||||
# pop the actual args
|
||||
for _ in range(argc):
|
||||
try:
|
||||
self._stack.pop()
|
||||
except IndexError:
|
||||
return
|
||||
|
||||
try:
|
||||
func_name_instr = self._stack.pop()
|
||||
except IndexError:
|
||||
return
|
||||
self._handle_call_from_instr(func_name_instr, instr)
|
||||
|
||||
def on_CALL_FUNCTION_KW(self, instr):
|
||||
# names of kw args
|
||||
_names_of_kw_args = self._stack.pop()
|
||||
|
||||
# pop the actual args
|
||||
arg = instr.arg
|
||||
|
||||
argc = arg & 0xFF # positional args
|
||||
argc += (arg >> 8) * 2 # keyword args
|
||||
|
||||
for _ in range(argc):
|
||||
self._stack.pop()
|
||||
|
||||
func_name_instr = self._stack.pop()
|
||||
self._handle_call_from_instr(func_name_instr, instr)
|
||||
|
||||
def on_CALL_FUNCTION_VAR(self, instr):
|
||||
# var name
|
||||
_var_arg = self._stack.pop()
|
||||
|
||||
# pop the actual args
|
||||
arg = instr.arg
|
||||
|
||||
argc = arg & 0xFF # positional args
|
||||
argc += (arg >> 8) * 2 # keyword args
|
||||
|
||||
for _ in range(argc):
|
||||
self._stack.pop()
|
||||
|
||||
func_name_instr = self._stack.pop()
|
||||
self._handle_call_from_instr(func_name_instr, instr)
|
||||
|
||||
def on_CALL_FUNCTION_VAR_KW(self, instr):
|
||||
# names of kw args
|
||||
_names_of_kw_args = self._stack.pop()
|
||||
|
||||
arg = instr.arg
|
||||
|
||||
argc = arg & 0xFF # positional args
|
||||
argc += (arg >> 8) * 2 # keyword args
|
||||
|
||||
# also pop **kwargs
|
||||
self._stack.pop()
|
||||
|
||||
# pop the actual args
|
||||
for _ in range(argc):
|
||||
self._stack.pop()
|
||||
|
||||
func_name_instr = self._stack.pop()
|
||||
self._handle_call_from_instr(func_name_instr, instr)
|
||||
|
||||
def on_CALL_FUNCTION_EX(self, instr):
|
||||
if instr.arg & 0x01:
|
||||
_kwargs = self._stack.pop()
|
||||
_callargs = self._stack.pop()
|
||||
func_name_instr = self._stack.pop()
|
||||
self._handle_call_from_instr(func_name_instr, instr)
|
||||
|
||||
on_GET_AITER = _no_stack_change
|
||||
on_GET_ANEXT = _no_stack_change
|
||||
on_END_FOR = _no_stack_change
|
||||
on_END_ASYNC_FOR = _no_stack_change
|
||||
on_BEFORE_ASYNC_WITH = _no_stack_change
|
||||
on_SETUP_ASYNC_WITH = _no_stack_change
|
||||
on_YIELD_FROM = _no_stack_change
|
||||
on_SETUP_LOOP = _no_stack_change
|
||||
on_FOR_ITER = _no_stack_change
|
||||
on_BREAK_LOOP = _no_stack_change
|
||||
on_JUMP_ABSOLUTE = _no_stack_change
|
||||
on_RERAISE = _no_stack_change
|
||||
on_LIST_TO_TUPLE = _no_stack_change
|
||||
on_CALL_FINALLY = _no_stack_change
|
||||
on_POP_FINALLY = _no_stack_change
|
||||
|
||||
def on_JUMP_IF_FALSE_OR_POP(self, instr):
|
||||
try:
|
||||
self._stack.pop()
|
||||
except IndexError:
|
||||
return
|
||||
|
||||
on_JUMP_IF_TRUE_OR_POP = on_JUMP_IF_FALSE_OR_POP
|
||||
|
||||
def on_JUMP_IF_NOT_EXC_MATCH(self, instr):
|
||||
try:
|
||||
self._stack.pop()
|
||||
except IndexError:
|
||||
return
|
||||
try:
|
||||
self._stack.pop()
|
||||
except IndexError:
|
||||
return
|
||||
|
||||
def on_SWAP(self, instr):
|
||||
i = instr.arg
|
||||
try:
|
||||
self._stack[-i], self._stack[-1] = self._stack[-1], self._stack[-i]
|
||||
except:
|
||||
pass
|
||||
|
||||
def on_ROT_TWO(self, instr):
|
||||
try:
|
||||
p0 = self._stack.pop()
|
||||
except IndexError:
|
||||
return
|
||||
|
||||
try:
|
||||
p1 = self._stack.pop()
|
||||
except:
|
||||
self._stack.append(p0)
|
||||
return
|
||||
|
||||
self._stack.append(p0)
|
||||
self._stack.append(p1)
|
||||
|
||||
def on_ROT_THREE(self, instr):
|
||||
try:
|
||||
p0 = self._stack.pop()
|
||||
except IndexError:
|
||||
return
|
||||
|
||||
try:
|
||||
p1 = self._stack.pop()
|
||||
except:
|
||||
self._stack.append(p0)
|
||||
return
|
||||
|
||||
try:
|
||||
p2 = self._stack.pop()
|
||||
except:
|
||||
self._stack.append(p0)
|
||||
self._stack.append(p1)
|
||||
return
|
||||
|
||||
self._stack.append(p0)
|
||||
self._stack.append(p1)
|
||||
self._stack.append(p2)
|
||||
|
||||
def on_ROT_FOUR(self, instr):
|
||||
try:
|
||||
p0 = self._stack.pop()
|
||||
except IndexError:
|
||||
return
|
||||
|
||||
try:
|
||||
p1 = self._stack.pop()
|
||||
except:
|
||||
self._stack.append(p0)
|
||||
return
|
||||
|
||||
try:
|
||||
p2 = self._stack.pop()
|
||||
except:
|
||||
self._stack.append(p0)
|
||||
self._stack.append(p1)
|
||||
return
|
||||
|
||||
try:
|
||||
p3 = self._stack.pop()
|
||||
except:
|
||||
self._stack.append(p0)
|
||||
self._stack.append(p1)
|
||||
self._stack.append(p2)
|
||||
return
|
||||
|
||||
self._stack.append(p0)
|
||||
self._stack.append(p1)
|
||||
self._stack.append(p2)
|
||||
self._stack.append(p3)
|
||||
|
||||
def on_BUILD_LIST_FROM_ARG(self, instr):
|
||||
self._stack.append(instr)
|
||||
|
||||
def on_BUILD_MAP(self, instr):
|
||||
for _i in range(instr.arg):
|
||||
self._stack.pop()
|
||||
self._stack.pop()
|
||||
self._stack.append(instr)
|
||||
|
||||
def on_BUILD_CONST_KEY_MAP(self, instr):
|
||||
self.on_POP_TOP(instr) # keys
|
||||
for _i in range(instr.arg):
|
||||
self.on_POP_TOP(instr) # value
|
||||
self._stack.append(instr)
|
||||
|
||||
on_YIELD_VALUE = on_POP_TOP
|
||||
on_RETURN_VALUE = on_POP_TOP
|
||||
on_POP_JUMP_IF_FALSE = on_POP_TOP
|
||||
on_POP_JUMP_IF_TRUE = on_POP_TOP
|
||||
on_DICT_MERGE = on_POP_TOP
|
||||
on_LIST_APPEND = on_POP_TOP
|
||||
on_SET_ADD = on_POP_TOP
|
||||
on_LIST_EXTEND = on_POP_TOP
|
||||
on_UNPACK_EX = on_POP_TOP
|
||||
|
||||
# ok: doesn't change the stack (converts top to getiter(top))
|
||||
on_GET_ITER = _no_stack_change
|
||||
on_GET_AWAITABLE = _no_stack_change
|
||||
on_GET_YIELD_FROM_ITER = _no_stack_change
|
||||
|
||||
def on_RETURN_GENERATOR(self, instr):
|
||||
self._stack.append(instr)
|
||||
|
||||
on_RETURN_GENERATOR = _no_stack_change
|
||||
on_RESUME = _no_stack_change
|
||||
|
||||
def on_MAP_ADD(self, instr):
|
||||
self.on_POP_TOP(instr)
|
||||
self.on_POP_TOP(instr)
|
||||
|
||||
def on_UNPACK_SEQUENCE(self, instr):
|
||||
self._stack.pop()
|
||||
for _i in range(instr.arg):
|
||||
self._stack.append(instr)
|
||||
|
||||
def on_BUILD_LIST(self, instr):
|
||||
for _i in range(instr.arg):
|
||||
self.on_POP_TOP(instr)
|
||||
self._stack.append(instr)
|
||||
|
||||
on_BUILD_TUPLE = on_BUILD_LIST
|
||||
on_BUILD_STRING = on_BUILD_LIST
|
||||
on_BUILD_TUPLE_UNPACK_WITH_CALL = on_BUILD_LIST
|
||||
on_BUILD_TUPLE_UNPACK = on_BUILD_LIST
|
||||
on_BUILD_LIST_UNPACK = on_BUILD_LIST
|
||||
on_BUILD_MAP_UNPACK_WITH_CALL = on_BUILD_LIST
|
||||
on_BUILD_MAP_UNPACK = on_BUILD_LIST
|
||||
on_BUILD_SET = on_BUILD_LIST
|
||||
on_BUILD_SET_UNPACK = on_BUILD_LIST
|
||||
|
||||
on_SETUP_FINALLY = _no_stack_change
|
||||
on_POP_FINALLY = _no_stack_change
|
||||
on_BEGIN_FINALLY = _no_stack_change
|
||||
on_END_FINALLY = _no_stack_change
|
||||
|
||||
def on_RAISE_VARARGS(self, instr):
|
||||
for _i in range(instr.arg):
|
||||
self.on_POP_TOP(instr)
|
||||
|
||||
on_POP_BLOCK = _no_stack_change
|
||||
on_JUMP_FORWARD = _no_stack_change
|
||||
on_JUMP_BACKWARD = _no_stack_change
|
||||
on_JUMP_BACKWARD_NO_INTERRUPT = _no_stack_change
|
||||
on_POP_EXCEPT = _no_stack_change
|
||||
on_SETUP_EXCEPT = _no_stack_change
|
||||
on_WITH_EXCEPT_START = _no_stack_change
|
||||
|
||||
on_END_FINALLY = _no_stack_change
|
||||
on_BEGIN_FINALLY = _no_stack_change
|
||||
on_SETUP_WITH = _no_stack_change
|
||||
on_WITH_CLEANUP_START = _no_stack_change
|
||||
on_WITH_CLEANUP_FINISH = _no_stack_change
|
||||
on_FORMAT_VALUE = _no_stack_change
|
||||
on_EXTENDED_ARG = _no_stack_change
|
||||
|
||||
def on_INPLACE_ADD(self, instr):
|
||||
# This would actually pop 2 and leave the value in the stack.
|
||||
# In a += 1 it pop `a` and `1` and leave the resulting value
|
||||
# for a load. In our case, let's just pop the `1` and leave the `a`
|
||||
# instead of leaving the INPLACE_ADD bytecode.
|
||||
try:
|
||||
self._stack.pop()
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
on_INPLACE_POWER = on_INPLACE_ADD
|
||||
on_INPLACE_MULTIPLY = on_INPLACE_ADD
|
||||
on_INPLACE_MATRIX_MULTIPLY = on_INPLACE_ADD
|
||||
on_INPLACE_TRUE_DIVIDE = on_INPLACE_ADD
|
||||
on_INPLACE_FLOOR_DIVIDE = on_INPLACE_ADD
|
||||
on_INPLACE_MODULO = on_INPLACE_ADD
|
||||
on_INPLACE_SUBTRACT = on_INPLACE_ADD
|
||||
on_INPLACE_RSHIFT = on_INPLACE_ADD
|
||||
on_INPLACE_LSHIFT = on_INPLACE_ADD
|
||||
on_INPLACE_AND = on_INPLACE_ADD
|
||||
on_INPLACE_OR = on_INPLACE_ADD
|
||||
on_INPLACE_XOR = on_INPLACE_ADD
|
||||
|
||||
def on_DUP_TOP(self, instr):
|
||||
try:
|
||||
i = self._stack[-1]
|
||||
except IndexError:
|
||||
# ok (in the start of block)
|
||||
self._stack.append(instr)
|
||||
else:
|
||||
self._stack.append(i)
|
||||
|
||||
def on_DUP_TOP_TWO(self, instr):
|
||||
if len(self._stack) == 0:
|
||||
self._stack.append(instr)
|
||||
return
|
||||
|
||||
if len(self._stack) == 1:
|
||||
i = self._stack[-1]
|
||||
self._stack.append(i)
|
||||
self._stack.append(instr)
|
||||
return
|
||||
|
||||
i = self._stack[-1]
|
||||
j = self._stack[-2]
|
||||
self._stack.append(j)
|
||||
self._stack.append(i)
|
||||
|
||||
def on_BUILD_SLICE(self, instr):
|
||||
for _ in range(instr.arg):
|
||||
try:
|
||||
self._stack.pop()
|
||||
except IndexError:
|
||||
pass
|
||||
self._stack.append(instr)
|
||||
|
||||
def on_STORE_SUBSCR(self, instr):
|
||||
try:
|
||||
self._stack.pop()
|
||||
self._stack.pop()
|
||||
self._stack.pop()
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
def on_DELETE_SUBSCR(self, instr):
|
||||
try:
|
||||
self._stack.pop()
|
||||
self._stack.pop()
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
# Note: on Python 3 this is only found on interactive mode to print the results of
|
||||
# some evaluation.
|
||||
on_PRINT_EXPR = on_POP_TOP
|
||||
|
||||
on_LABEL = _no_stack_change
|
||||
on_UNARY_POSITIVE = _no_stack_change
|
||||
on_UNARY_NEGATIVE = _no_stack_change
|
||||
on_UNARY_NOT = _no_stack_change
|
||||
on_UNARY_INVERT = _no_stack_change
|
||||
|
||||
on_CACHE = _no_stack_change
|
||||
on_PRECALL = _no_stack_change
|
||||
|
||||
|
||||
def _get_smart_step_into_targets(code):
|
||||
"""
|
||||
:return list(Target)
|
||||
"""
|
||||
b = bytecode.Bytecode.from_code(code)
|
||||
cfg = bytecode_cfg.ControlFlowGraph.from_bytecode(b)
|
||||
|
||||
ret = []
|
||||
|
||||
for block in cfg:
|
||||
if DEBUG:
|
||||
print("\nStart block----")
|
||||
stack = _StackInterpreter(block)
|
||||
for instr in block:
|
||||
if isinstance(instr, (Label,)):
|
||||
# No name for these
|
||||
continue
|
||||
try:
|
||||
func_name = "on_%s" % (instr.name,)
|
||||
func = getattr(stack, func_name, None)
|
||||
|
||||
if func is None:
|
||||
if STRICT_MODE:
|
||||
raise AssertionError("%s not found." % (func_name,))
|
||||
else:
|
||||
if DEBUG:
|
||||
print("Skipping: %s." % (func_name,))
|
||||
|
||||
continue
|
||||
func(instr)
|
||||
|
||||
if DEBUG:
|
||||
if instr.name != "CACHE": # Filter the ones we don't want to see.
|
||||
print("\nHandled: ", instr, ">>", stack._getname(instr), "<<")
|
||||
print("New stack:")
|
||||
for entry in stack._stack:
|
||||
print(" arg:", stack._getname(entry), "(", entry, ")")
|
||||
except:
|
||||
if STRICT_MODE:
|
||||
raise # Error in strict mode.
|
||||
else:
|
||||
# In non-strict mode, log it (if in verbose mode) and keep on going.
|
||||
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 2:
|
||||
pydev_log.exception("Exception computing step into targets (handled).")
|
||||
|
||||
ret.extend(stack.function_calls)
|
||||
# No longer considering attr loads as calls (while in theory sometimes it's possible
|
||||
# that something as `some.attr` can turn out to be a property which could be stepped
|
||||
# in, it's not that common in practice and can be surprising for users, so, disabling
|
||||
# step into from stepping into properties).
|
||||
# ret.extend(stack.load_attrs.values())
|
||||
|
||||
if DEBUG:
|
||||
print("\nEnd block----")
|
||||
return ret
|
||||
|
||||
|
||||
# Note that the offset is unique within the frame (so, we can use it as the target id).
|
||||
# Also, as the offset is the instruction offset within the frame, it's possible to
|
||||
# to inspect the parent frame for frame.f_lasti to know where we actually are (as the
|
||||
# caller name may not always match the new frame name).
|
||||
class Variant(object):
|
||||
__slots__ = ["name", "is_visited", "line", "offset", "call_order", "children_variants", "parent", "endlineno", "startcol", "endcol"]
|
||||
|
||||
def __init__(self, name, is_visited, line, offset, call_order, children_variants=None, endlineno=-1, startcol=-1, endcol=-1):
|
||||
self.name = name
|
||||
self.is_visited = is_visited
|
||||
self.line = line
|
||||
self.endlineno = endlineno
|
||||
self.startcol = startcol
|
||||
self.endcol = endcol
|
||||
self.offset = offset
|
||||
self.call_order = call_order
|
||||
self.children_variants = children_variants
|
||||
self.parent = None
|
||||
if children_variants:
|
||||
for variant in children_variants:
|
||||
variant.parent = self
|
||||
|
||||
def __repr__(self):
|
||||
ret = []
|
||||
for s in self.__slots__:
|
||||
if s == "parent":
|
||||
try:
|
||||
parent = self.parent
|
||||
except AttributeError:
|
||||
ret.append("%s: <not set>" % (s,))
|
||||
else:
|
||||
if parent is None:
|
||||
ret.append("parent: None")
|
||||
else:
|
||||
ret.append("parent: %s (%s)" % (parent.name, parent.offset))
|
||||
continue
|
||||
|
||||
if s == "children_variants":
|
||||
ret.append("children_variants: %s" % (len(self.children_variants) if self.children_variants else 0))
|
||||
continue
|
||||
|
||||
try:
|
||||
ret.append("%s= %s" % (s, getattr(self, s)))
|
||||
except AttributeError:
|
||||
ret.append("%s: <not set>" % (s,))
|
||||
return "Variant(%s)" % ", ".join(ret)
|
||||
|
||||
__str__ = __repr__
|
||||
|
||||
|
||||
def _convert_target_to_variant(target, start_line, end_line, call_order_cache: dict, lasti: int, base: int):
|
||||
name = target.arg
|
||||
if not isinstance(name, str):
|
||||
return
|
||||
if target.lineno > end_line:
|
||||
return
|
||||
if target.lineno < start_line:
|
||||
return
|
||||
|
||||
call_order = call_order_cache.get(name, 0) + 1
|
||||
call_order_cache[name] = call_order
|
||||
is_visited = target.offset <= lasti
|
||||
|
||||
children_targets = target.children_targets
|
||||
children_variants = None
|
||||
if children_targets:
|
||||
children_variants = [
|
||||
_convert_target_to_variant(child, start_line, end_line, call_order_cache, lasti, base) for child in target.children_targets
|
||||
]
|
||||
|
||||
return Variant(
|
||||
name,
|
||||
is_visited,
|
||||
target.lineno - base,
|
||||
target.offset,
|
||||
call_order,
|
||||
children_variants,
|
||||
# Only really matter in Python 3.11
|
||||
target.endlineno - base if target.endlineno >= 0 else -1,
|
||||
target.startcol,
|
||||
target.endcol,
|
||||
)
|
||||
|
||||
|
||||
def calculate_smart_step_into_variants(frame, start_line, end_line, base=0):
|
||||
"""
|
||||
Calculate smart step into variants for the given line range.
|
||||
:param frame:
|
||||
:type frame: :py:class:`types.FrameType`
|
||||
:param start_line:
|
||||
:param end_line:
|
||||
:return: A list of call names from the first to the last.
|
||||
:note: it's guaranteed that the offsets appear in order.
|
||||
:raise: :py:class:`RuntimeError` if failed to parse the bytecode or if dis cannot be used.
|
||||
"""
|
||||
if IS_PY311_OR_GREATER:
|
||||
from . import pydevd_bytecode_utils_py311
|
||||
|
||||
return pydevd_bytecode_utils_py311.calculate_smart_step_into_variants(frame, start_line, end_line, base)
|
||||
|
||||
variants = []
|
||||
code = frame.f_code
|
||||
lasti = frame.f_lasti
|
||||
|
||||
call_order_cache = {}
|
||||
if DEBUG:
|
||||
print("dis.dis:")
|
||||
if IS_PY311_OR_GREATER:
|
||||
dis.dis(code, show_caches=False)
|
||||
else:
|
||||
dis.dis(code)
|
||||
|
||||
for target in _get_smart_step_into_targets(code):
|
||||
variant = _convert_target_to_variant(target, start_line, end_line, call_order_cache, lasti, base)
|
||||
if variant is None:
|
||||
continue
|
||||
variants.append(variant)
|
||||
|
||||
return variants
|
||||
|
||||
|
||||
def get_smart_step_into_variant_from_frame_offset(frame_f_lasti, variants):
|
||||
"""
|
||||
Given the frame.f_lasti, return the related `Variant`.
|
||||
|
||||
:note: if the offset is found before any variant available or no variants are
|
||||
available, None is returned.
|
||||
|
||||
:rtype: Variant|NoneType
|
||||
"""
|
||||
if not variants:
|
||||
return None
|
||||
|
||||
i = bisect(KeyifyList(variants, lambda entry: entry.offset), frame_f_lasti)
|
||||
|
||||
if i == 0:
|
||||
return None
|
||||
|
||||
else:
|
||||
return variants[i - 1]
|
||||
+105
@@ -0,0 +1,105 @@
|
||||
from _pydevd_bundle.pydevd_constants import IS_PY311_OR_GREATER
|
||||
import dis
|
||||
from types import CodeType
|
||||
from collections import namedtuple
|
||||
|
||||
DEBUG = False
|
||||
|
||||
_Pos = namedtuple("_Pos", "lineno endlineno startcol endcol")
|
||||
|
||||
|
||||
def _is_inside(item_pos: _Pos, container_pos: _Pos):
|
||||
if item_pos.lineno < container_pos.lineno or item_pos.endlineno > container_pos.endlineno:
|
||||
return False
|
||||
|
||||
if item_pos.lineno == container_pos.lineno:
|
||||
if item_pos.startcol < container_pos.startcol:
|
||||
return False
|
||||
|
||||
if item_pos.endlineno == container_pos.endlineno:
|
||||
if item_pos.endcol > container_pos.endcol:
|
||||
return False
|
||||
|
||||
# Not outside, must be inside.
|
||||
return True
|
||||
|
||||
|
||||
def _get_smart_step_into_targets(code):
|
||||
import linecache
|
||||
from .pydevd_bytecode_utils import Target
|
||||
|
||||
filename = code.co_filename
|
||||
|
||||
targets_root = []
|
||||
children = []
|
||||
for instr in dis.Bytecode(code):
|
||||
if instr.opname == "LOAD_CONST":
|
||||
if isinstance(instr.argval, CodeType):
|
||||
children.append(_get_smart_step_into_targets(instr.argval))
|
||||
|
||||
elif instr.opname in ("CALL", "CALL_INTRINSIC_1"):
|
||||
positions = instr.positions
|
||||
if positions.lineno is None:
|
||||
continue
|
||||
if positions.end_lineno is None:
|
||||
continue
|
||||
lines = []
|
||||
for lineno in range(positions.lineno, positions.end_lineno + 1):
|
||||
lines.append(linecache.getline(filename, lineno))
|
||||
|
||||
startcol = positions.col_offset
|
||||
endcol = positions.end_col_offset
|
||||
|
||||
if positions.lineno == positions.end_lineno:
|
||||
lines[0] = lines[0][startcol:endcol]
|
||||
else:
|
||||
lines[0] = lines[0][startcol:]
|
||||
lines[-1] = lines[-1][:endcol]
|
||||
|
||||
pos = _Pos(positions.lineno, positions.end_lineno, startcol, endcol)
|
||||
targets_root.append(Target("".join(lines), positions.lineno, instr.offset, [], positions.end_lineno, startcol, endcol))
|
||||
|
||||
for targets in children:
|
||||
for child_target in targets:
|
||||
pos = _Pos(child_target.lineno, child_target.endlineno, child_target.startcol, child_target.endcol)
|
||||
|
||||
for outer_target in targets_root:
|
||||
outer_pos = _Pos(outer_target.lineno, outer_target.endlineno, outer_target.startcol, outer_target.endcol)
|
||||
if _is_inside(pos, outer_pos):
|
||||
outer_target.children_targets.append(child_target)
|
||||
break
|
||||
return targets_root
|
||||
|
||||
|
||||
def calculate_smart_step_into_variants(frame, start_line, end_line, base=0):
|
||||
"""
|
||||
Calculate smart step into variants for the given line range.
|
||||
:param frame:
|
||||
:type frame: :py:class:`types.FrameType`
|
||||
:param start_line:
|
||||
:param end_line:
|
||||
:return: A list of call names from the first to the last.
|
||||
:note: it's guaranteed that the offsets appear in order.
|
||||
:raise: :py:class:`RuntimeError` if failed to parse the bytecode or if dis cannot be used.
|
||||
"""
|
||||
from .pydevd_bytecode_utils import _convert_target_to_variant
|
||||
|
||||
variants = []
|
||||
code = frame.f_code
|
||||
lasti = frame.f_lasti
|
||||
|
||||
call_order_cache = {}
|
||||
if DEBUG:
|
||||
print("dis.dis:")
|
||||
if IS_PY311_OR_GREATER:
|
||||
dis.dis(code, show_caches=False)
|
||||
else:
|
||||
dis.dis(code)
|
||||
|
||||
for target in _get_smart_step_into_targets(code):
|
||||
variant = _convert_target_to_variant(target, start_line, end_line, call_order_cache, lasti, base)
|
||||
if variant is None:
|
||||
continue
|
||||
variants.append(variant)
|
||||
|
||||
return variants
|
||||
@@ -0,0 +1,605 @@
|
||||
"""
|
||||
Decompiler that can be used with the debugger (where statements correctly represent the
|
||||
line numbers).
|
||||
|
||||
Note: this is a work in progress / proof of concept / not ready to be used.
|
||||
"""
|
||||
|
||||
import dis
|
||||
|
||||
from _pydevd_bundle.pydevd_collect_bytecode_info import iter_instructions
|
||||
from _pydev_bundle import pydev_log
|
||||
import sys
|
||||
import inspect
|
||||
from io import StringIO
|
||||
|
||||
|
||||
class _Stack(object):
|
||||
def __init__(self):
|
||||
self._contents = []
|
||||
|
||||
def push(self, obj):
|
||||
# print('push', obj)
|
||||
self._contents.append(obj)
|
||||
|
||||
def pop(self):
|
||||
return self._contents.pop(-1)
|
||||
|
||||
|
||||
INDENT_MARKER = object()
|
||||
DEDENT_MARKER = object()
|
||||
_SENTINEL = object()
|
||||
|
||||
DEBUG = False
|
||||
|
||||
|
||||
class _Token(object):
|
||||
def __init__(self, i_line, instruction=None, tok=_SENTINEL, priority=0, after=None, end_of_line=False):
|
||||
"""
|
||||
:param i_line:
|
||||
:param instruction:
|
||||
:param tok:
|
||||
:param priority:
|
||||
:param after:
|
||||
:param end_of_line:
|
||||
Marker to signal only after all the other tokens have been written.
|
||||
"""
|
||||
self.i_line = i_line
|
||||
if tok is not _SENTINEL:
|
||||
self.tok = tok
|
||||
else:
|
||||
if instruction is not None:
|
||||
if inspect.iscode(instruction.argval):
|
||||
self.tok = ""
|
||||
else:
|
||||
self.tok = str(instruction.argval)
|
||||
else:
|
||||
raise AssertionError("Either the tok or the instruction is needed.")
|
||||
self.instruction = instruction
|
||||
self.priority = priority
|
||||
self.end_of_line = end_of_line
|
||||
self._after_tokens = set()
|
||||
self._after_handler_tokens = set()
|
||||
if after:
|
||||
self.mark_after(after)
|
||||
|
||||
def mark_after(self, v):
|
||||
if isinstance(v, _Token):
|
||||
self._after_tokens.add(v)
|
||||
elif isinstance(v, _BaseHandler):
|
||||
self._after_handler_tokens.add(v)
|
||||
|
||||
else:
|
||||
raise AssertionError("Unhandled: %s" % (v,))
|
||||
|
||||
def get_after_tokens(self):
|
||||
ret = self._after_tokens.copy()
|
||||
for handler in self._after_handler_tokens:
|
||||
ret.update(handler.tokens)
|
||||
return ret
|
||||
|
||||
def __repr__(self):
|
||||
return "Token(%s, after: %s)" % (self.tok, self.get_after_tokens())
|
||||
|
||||
__str__ = __repr__
|
||||
|
||||
|
||||
class _Writer(object):
|
||||
def __init__(self):
|
||||
self.line_to_contents = {}
|
||||
self.all_tokens = set()
|
||||
|
||||
def get_line(self, line):
|
||||
lst = self.line_to_contents.get(line)
|
||||
if lst is None:
|
||||
lst = self.line_to_contents[line] = []
|
||||
return lst
|
||||
|
||||
def indent(self, line):
|
||||
self.get_line(line).append(INDENT_MARKER)
|
||||
|
||||
def dedent(self, line):
|
||||
self.get_line(line).append(DEDENT_MARKER)
|
||||
|
||||
def write(self, line, token):
|
||||
if token in self.all_tokens:
|
||||
return
|
||||
self.all_tokens.add(token)
|
||||
assert isinstance(token, _Token)
|
||||
lst = self.get_line(line)
|
||||
lst.append(token)
|
||||
|
||||
|
||||
class _BaseHandler(object):
|
||||
def __init__(self, i_line, instruction, stack, writer, disassembler):
|
||||
self.i_line = i_line
|
||||
self.instruction = instruction
|
||||
self.stack = stack
|
||||
self.writer = writer
|
||||
self.disassembler = disassembler
|
||||
self.tokens = []
|
||||
self._handle()
|
||||
|
||||
def _write_tokens(self):
|
||||
for token in self.tokens:
|
||||
self.writer.write(token.i_line, token)
|
||||
|
||||
def _handle(self):
|
||||
raise NotImplementedError(self)
|
||||
|
||||
def __repr__(self, *args, **kwargs):
|
||||
try:
|
||||
return "%s line:%s" % (self.instruction, self.i_line)
|
||||
except:
|
||||
return object.__repr__(self)
|
||||
|
||||
__str__ = __repr__
|
||||
|
||||
|
||||
_op_name_to_handler = {}
|
||||
|
||||
|
||||
def _register(cls):
|
||||
_op_name_to_handler[cls.opname] = cls
|
||||
return cls
|
||||
|
||||
|
||||
class _BasePushHandler(_BaseHandler):
|
||||
def _handle(self):
|
||||
self.stack.push(self)
|
||||
|
||||
|
||||
class _BaseLoadHandler(_BasePushHandler):
|
||||
def _handle(self):
|
||||
_BasePushHandler._handle(self)
|
||||
self.tokens = [_Token(self.i_line, self.instruction)]
|
||||
|
||||
|
||||
@_register
|
||||
class _LoadBuildClass(_BasePushHandler):
|
||||
opname = "LOAD_BUILD_CLASS"
|
||||
|
||||
|
||||
@_register
|
||||
class _LoadConst(_BaseLoadHandler):
|
||||
opname = "LOAD_CONST"
|
||||
|
||||
|
||||
@_register
|
||||
class _LoadName(_BaseLoadHandler):
|
||||
opname = "LOAD_NAME"
|
||||
|
||||
|
||||
@_register
|
||||
class _LoadGlobal(_BaseLoadHandler):
|
||||
opname = "LOAD_GLOBAL"
|
||||
|
||||
|
||||
@_register
|
||||
class _LoadFast(_BaseLoadHandler):
|
||||
opname = "LOAD_FAST"
|
||||
|
||||
|
||||
@_register
|
||||
class _GetIter(_BaseHandler):
|
||||
"""
|
||||
Implements TOS = iter(TOS).
|
||||
"""
|
||||
|
||||
opname = "GET_ITER"
|
||||
iter_target = None
|
||||
|
||||
def _handle(self):
|
||||
self.iter_target = self.stack.pop()
|
||||
self.tokens.extend(self.iter_target.tokens)
|
||||
self.stack.push(self)
|
||||
|
||||
|
||||
@_register
|
||||
class _ForIter(_BaseHandler):
|
||||
"""
|
||||
TOS is an iterator. Call its __next__() method. If this yields a new value, push it on the stack
|
||||
(leaving the iterator below it). If the iterator indicates it is exhausted TOS is popped, and
|
||||
the byte code counter is incremented by delta.
|
||||
"""
|
||||
|
||||
opname = "FOR_ITER"
|
||||
|
||||
iter_in = None
|
||||
|
||||
def _handle(self):
|
||||
self.iter_in = self.stack.pop()
|
||||
self.stack.push(self)
|
||||
|
||||
def store_in_name(self, store_name):
|
||||
for_token = _Token(self.i_line, None, "for ")
|
||||
self.tokens.append(for_token)
|
||||
prev = for_token
|
||||
|
||||
t_name = _Token(store_name.i_line, store_name.instruction, after=prev)
|
||||
self.tokens.append(t_name)
|
||||
prev = t_name
|
||||
|
||||
in_token = _Token(store_name.i_line, None, " in ", after=prev)
|
||||
self.tokens.append(in_token)
|
||||
prev = in_token
|
||||
|
||||
max_line = store_name.i_line
|
||||
if self.iter_in:
|
||||
for t in self.iter_in.tokens:
|
||||
t.mark_after(prev)
|
||||
max_line = max(max_line, t.i_line)
|
||||
prev = t
|
||||
self.tokens.extend(self.iter_in.tokens)
|
||||
|
||||
colon_token = _Token(self.i_line, None, ":", after=prev)
|
||||
self.tokens.append(colon_token)
|
||||
prev = for_token
|
||||
|
||||
self._write_tokens()
|
||||
|
||||
|
||||
@_register
|
||||
class _StoreName(_BaseHandler):
|
||||
"""
|
||||
Implements name = TOS. namei is the index of name in the attribute co_names of the code object.
|
||||
The compiler tries to use STORE_FAST or STORE_GLOBAL if possible.
|
||||
"""
|
||||
|
||||
opname = "STORE_NAME"
|
||||
|
||||
def _handle(self):
|
||||
v = self.stack.pop()
|
||||
|
||||
if isinstance(v, _ForIter):
|
||||
v.store_in_name(self)
|
||||
else:
|
||||
if not isinstance(v, _MakeFunction) or v.is_lambda:
|
||||
line = self.i_line
|
||||
for t in v.tokens:
|
||||
line = min(line, t.i_line)
|
||||
|
||||
t_name = _Token(line, self.instruction)
|
||||
t_equal = _Token(line, None, "=", after=t_name)
|
||||
|
||||
self.tokens.append(t_name)
|
||||
self.tokens.append(t_equal)
|
||||
|
||||
for t in v.tokens:
|
||||
t.mark_after(t_equal)
|
||||
self.tokens.extend(v.tokens)
|
||||
|
||||
self._write_tokens()
|
||||
|
||||
|
||||
@_register
|
||||
class _ReturnValue(_BaseHandler):
|
||||
"""
|
||||
Returns with TOS to the caller of the function.
|
||||
"""
|
||||
|
||||
opname = "RETURN_VALUE"
|
||||
|
||||
def _handle(self):
|
||||
v = self.stack.pop()
|
||||
return_token = _Token(self.i_line, None, "return ", end_of_line=True)
|
||||
self.tokens.append(return_token)
|
||||
for token in v.tokens:
|
||||
token.mark_after(return_token)
|
||||
self.tokens.extend(v.tokens)
|
||||
|
||||
self._write_tokens()
|
||||
|
||||
|
||||
@_register
|
||||
class _CallFunction(_BaseHandler):
|
||||
"""
|
||||
|
||||
CALL_FUNCTION(argc)
|
||||
|
||||
Calls a callable object with positional arguments. argc indicates the number of positional
|
||||
arguments. The top of the stack contains positional arguments, with the right-most argument
|
||||
on top. Below the arguments is a callable object to call. CALL_FUNCTION pops all arguments
|
||||
and the callable object off the stack, calls the callable object with those arguments, and
|
||||
pushes the return value returned by the callable object.
|
||||
|
||||
Changed in version 3.6: This opcode is used only for calls with positional arguments.
|
||||
|
||||
"""
|
||||
|
||||
opname = "CALL_FUNCTION"
|
||||
|
||||
def _handle(self):
|
||||
args = []
|
||||
for _i in range(self.instruction.argval + 1):
|
||||
arg = self.stack.pop()
|
||||
args.append(arg)
|
||||
it = reversed(args)
|
||||
name = next(it)
|
||||
max_line = name.i_line
|
||||
for t in name.tokens:
|
||||
self.tokens.append(t)
|
||||
|
||||
tok_open_parens = _Token(name.i_line, None, "(", after=name)
|
||||
self.tokens.append(tok_open_parens)
|
||||
|
||||
prev = tok_open_parens
|
||||
for i, arg in enumerate(it):
|
||||
for t in arg.tokens:
|
||||
t.mark_after(name)
|
||||
t.mark_after(prev)
|
||||
max_line = max(max_line, t.i_line)
|
||||
self.tokens.append(t)
|
||||
prev = arg
|
||||
|
||||
if i > 0:
|
||||
comma_token = _Token(prev.i_line, None, ",", after=prev)
|
||||
self.tokens.append(comma_token)
|
||||
prev = comma_token
|
||||
|
||||
tok_close_parens = _Token(max_line, None, ")", after=prev)
|
||||
self.tokens.append(tok_close_parens)
|
||||
|
||||
self._write_tokens()
|
||||
|
||||
self.stack.push(self)
|
||||
|
||||
|
||||
@_register
|
||||
class _MakeFunctionPy3(_BaseHandler):
|
||||
"""
|
||||
Pushes a new function object on the stack. From bottom to top, the consumed stack must consist
|
||||
of values if the argument carries a specified flag value
|
||||
|
||||
0x01 a tuple of default values for positional-only and positional-or-keyword parameters in positional order
|
||||
|
||||
0x02 a dictionary of keyword-only parameters' default values
|
||||
|
||||
0x04 an annotation dictionary
|
||||
|
||||
0x08 a tuple containing cells for free variables, making a closure
|
||||
|
||||
the code associated with the function (at TOS1)
|
||||
|
||||
the qualified name of the function (at TOS)
|
||||
"""
|
||||
|
||||
opname = "MAKE_FUNCTION"
|
||||
is_lambda = False
|
||||
|
||||
def _handle(self):
|
||||
stack = self.stack
|
||||
self.qualified_name = stack.pop()
|
||||
self.code = stack.pop()
|
||||
|
||||
default_node = None
|
||||
if self.instruction.argval & 0x01:
|
||||
default_node = stack.pop()
|
||||
|
||||
is_lambda = self.is_lambda = "<lambda>" in [x.tok for x in self.qualified_name.tokens]
|
||||
|
||||
if not is_lambda:
|
||||
def_token = _Token(self.i_line, None, "def ")
|
||||
self.tokens.append(def_token)
|
||||
|
||||
for token in self.qualified_name.tokens:
|
||||
self.tokens.append(token)
|
||||
if not is_lambda:
|
||||
token.mark_after(def_token)
|
||||
prev = token
|
||||
|
||||
open_parens_token = _Token(self.i_line, None, "(", after=prev)
|
||||
self.tokens.append(open_parens_token)
|
||||
prev = open_parens_token
|
||||
|
||||
code = self.code.instruction.argval
|
||||
|
||||
if default_node:
|
||||
defaults = ([_SENTINEL] * (len(code.co_varnames) - len(default_node.instruction.argval))) + list(
|
||||
default_node.instruction.argval
|
||||
)
|
||||
else:
|
||||
defaults = [_SENTINEL] * len(code.co_varnames)
|
||||
|
||||
for i, arg in enumerate(code.co_varnames):
|
||||
if i > 0:
|
||||
comma_token = _Token(prev.i_line, None, ", ", after=prev)
|
||||
self.tokens.append(comma_token)
|
||||
prev = comma_token
|
||||
|
||||
arg_token = _Token(self.i_line, None, arg, after=prev)
|
||||
self.tokens.append(arg_token)
|
||||
|
||||
default = defaults[i]
|
||||
if default is not _SENTINEL:
|
||||
eq_token = _Token(default_node.i_line, None, "=", after=prev)
|
||||
self.tokens.append(eq_token)
|
||||
prev = eq_token
|
||||
|
||||
default_token = _Token(default_node.i_line, None, str(default), after=prev)
|
||||
self.tokens.append(default_token)
|
||||
prev = default_token
|
||||
|
||||
tok_close_parens = _Token(prev.i_line, None, "):", after=prev)
|
||||
self.tokens.append(tok_close_parens)
|
||||
|
||||
self._write_tokens()
|
||||
|
||||
stack.push(self)
|
||||
self.writer.indent(prev.i_line + 1)
|
||||
self.writer.dedent(max(self.disassembler.merge_code(code)))
|
||||
|
||||
|
||||
_MakeFunction = _MakeFunctionPy3
|
||||
|
||||
|
||||
def _print_after_info(line_contents, stream=None):
|
||||
if stream is None:
|
||||
stream = sys.stdout
|
||||
for token in line_contents:
|
||||
after_tokens = token.get_after_tokens()
|
||||
if after_tokens:
|
||||
s = "%s after: %s\n" % (repr(token.tok), ('"' + '", "'.join(t.tok for t in token.get_after_tokens()) + '"'))
|
||||
stream.write(s)
|
||||
else:
|
||||
stream.write("%s (NO REQUISITES)" % repr(token.tok))
|
||||
|
||||
|
||||
def _compose_line_contents(line_contents, previous_line_tokens):
|
||||
lst = []
|
||||
handled = set()
|
||||
|
||||
add_to_end_of_line = []
|
||||
delete_indexes = []
|
||||
for i, token in enumerate(line_contents):
|
||||
if token.end_of_line:
|
||||
add_to_end_of_line.append(token)
|
||||
delete_indexes.append(i)
|
||||
for i in reversed(delete_indexes):
|
||||
del line_contents[i]
|
||||
del delete_indexes
|
||||
|
||||
while line_contents:
|
||||
added = False
|
||||
delete_indexes = []
|
||||
|
||||
for i, token in enumerate(line_contents):
|
||||
after_tokens = token.get_after_tokens()
|
||||
for after in after_tokens:
|
||||
if after not in handled and after not in previous_line_tokens:
|
||||
break
|
||||
else:
|
||||
added = True
|
||||
previous_line_tokens.add(token)
|
||||
handled.add(token)
|
||||
lst.append(token.tok)
|
||||
delete_indexes.append(i)
|
||||
|
||||
for i in reversed(delete_indexes):
|
||||
del line_contents[i]
|
||||
|
||||
if not added:
|
||||
if add_to_end_of_line:
|
||||
line_contents.extend(add_to_end_of_line)
|
||||
del add_to_end_of_line[:]
|
||||
continue
|
||||
|
||||
# Something is off, let's just add as is.
|
||||
for token in line_contents:
|
||||
if token not in handled:
|
||||
lst.append(token.tok)
|
||||
|
||||
stream = StringIO()
|
||||
_print_after_info(line_contents, stream)
|
||||
pydev_log.critical("Error. After markers are not correct:\n%s", stream.getvalue())
|
||||
break
|
||||
return "".join(lst)
|
||||
|
||||
|
||||
class _PyCodeToSource(object):
|
||||
def __init__(self, co, memo=None):
|
||||
if memo is None:
|
||||
memo = {}
|
||||
self.memo = memo
|
||||
self.co = co
|
||||
self.instructions = list(iter_instructions(co))
|
||||
self.stack = _Stack()
|
||||
self.writer = _Writer()
|
||||
|
||||
def _process_next(self, i_line):
|
||||
instruction = self.instructions.pop(0)
|
||||
handler_class = _op_name_to_handler.get(instruction.opname)
|
||||
if handler_class is not None:
|
||||
s = handler_class(i_line, instruction, self.stack, self.writer, self)
|
||||
if DEBUG:
|
||||
print(s)
|
||||
|
||||
else:
|
||||
if DEBUG:
|
||||
print("UNHANDLED", instruction)
|
||||
|
||||
def build_line_to_contents(self):
|
||||
co = self.co
|
||||
|
||||
op_offset_to_line = dict(dis.findlinestarts(co))
|
||||
curr_line_index = 0
|
||||
|
||||
instructions = self.instructions
|
||||
while instructions:
|
||||
instruction = instructions[0]
|
||||
new_line_index = op_offset_to_line.get(instruction.offset)
|
||||
if new_line_index is not None:
|
||||
curr_line_index = new_line_index
|
||||
|
||||
self._process_next(curr_line_index)
|
||||
return self.writer.line_to_contents
|
||||
|
||||
def merge_code(self, code):
|
||||
if DEBUG:
|
||||
print("merge code ----")
|
||||
# for d in dir(code):
|
||||
# if not d.startswith('_'):
|
||||
# print(d, getattr(code, d))
|
||||
line_to_contents = _PyCodeToSource(code, self.memo).build_line_to_contents()
|
||||
lines = []
|
||||
for line, contents in sorted(line_to_contents.items()):
|
||||
lines.append(line)
|
||||
self.writer.get_line(line).extend(contents)
|
||||
if DEBUG:
|
||||
print("end merge code ----")
|
||||
return lines
|
||||
|
||||
def disassemble(self):
|
||||
show_lines = False
|
||||
line_to_contents = self.build_line_to_contents()
|
||||
stream = StringIO()
|
||||
last_line = 0
|
||||
indent = ""
|
||||
previous_line_tokens = set()
|
||||
for i_line, contents in sorted(line_to_contents.items()):
|
||||
while last_line < i_line - 1:
|
||||
if show_lines:
|
||||
stream.write("%s.\n" % (last_line + 1,))
|
||||
else:
|
||||
stream.write("\n")
|
||||
last_line += 1
|
||||
|
||||
line_contents = []
|
||||
dedents_found = 0
|
||||
for part in contents:
|
||||
if part is INDENT_MARKER:
|
||||
if DEBUG:
|
||||
print("found indent", i_line)
|
||||
indent += " "
|
||||
continue
|
||||
if part is DEDENT_MARKER:
|
||||
if DEBUG:
|
||||
print("found dedent", i_line)
|
||||
dedents_found += 1
|
||||
continue
|
||||
line_contents.append(part)
|
||||
|
||||
s = indent + _compose_line_contents(line_contents, previous_line_tokens)
|
||||
if show_lines:
|
||||
stream.write("%s. %s\n" % (i_line, s))
|
||||
else:
|
||||
stream.write("%s\n" % s)
|
||||
|
||||
if dedents_found:
|
||||
indent = indent[: -(4 * dedents_found)]
|
||||
last_line = i_line
|
||||
|
||||
return stream.getvalue()
|
||||
|
||||
|
||||
def code_obj_to_source(co):
|
||||
"""
|
||||
Converts a code object to source code to provide a suitable representation for the compiler when
|
||||
the actual source code is not found.
|
||||
|
||||
This is a work in progress / proof of concept / not ready to be used.
|
||||
"""
|
||||
ret = _PyCodeToSource(co).disassemble()
|
||||
if DEBUG:
|
||||
print(ret)
|
||||
return ret
|
||||
+873
@@ -0,0 +1,873 @@
|
||||
import dis
|
||||
import inspect
|
||||
import sys
|
||||
from collections import namedtuple
|
||||
|
||||
from _pydev_bundle import pydev_log
|
||||
from opcode import EXTENDED_ARG, HAVE_ARGUMENT, cmp_op, hascompare, hasconst, hasfree, hasjrel, haslocal, hasname, opname
|
||||
|
||||
from io import StringIO
|
||||
|
||||
|
||||
class TryExceptInfo(object):
|
||||
def __init__(self, try_line, ignore=False):
|
||||
"""
|
||||
:param try_line:
|
||||
:param ignore:
|
||||
Usually we should ignore any block that's not a try..except
|
||||
(this can happen for finally blocks, with statements, etc, for
|
||||
which we create temporary entries).
|
||||
"""
|
||||
self.try_line = try_line
|
||||
self.ignore = ignore
|
||||
self.except_line = -1
|
||||
self.except_end_line = -1
|
||||
self.raise_lines_in_except = []
|
||||
|
||||
# Note: these may not be available if generated from source instead of bytecode.
|
||||
self.except_bytecode_offset = -1
|
||||
self.except_end_bytecode_offset = -1
|
||||
|
||||
def is_line_in_try_block(self, line):
|
||||
return self.try_line <= line < self.except_line
|
||||
|
||||
def is_line_in_except_block(self, line):
|
||||
return self.except_line <= line <= self.except_end_line
|
||||
|
||||
def __str__(self):
|
||||
lst = [
|
||||
"{try:",
|
||||
str(self.try_line),
|
||||
" except ",
|
||||
str(self.except_line),
|
||||
" end block ",
|
||||
str(self.except_end_line),
|
||||
]
|
||||
if self.raise_lines_in_except:
|
||||
lst.append(" raises: %s" % (", ".join(str(x) for x in self.raise_lines_in_except),))
|
||||
|
||||
lst.append("}")
|
||||
return "".join(lst)
|
||||
|
||||
__repr__ = __str__
|
||||
|
||||
|
||||
class ReturnInfo(object):
|
||||
def __init__(self, return_line):
|
||||
self.return_line = return_line
|
||||
|
||||
def __str__(self):
|
||||
return "{return: %s}" % (self.return_line,)
|
||||
|
||||
__repr__ = __str__
|
||||
|
||||
|
||||
def _get_line(op_offset_to_line, op_offset, firstlineno, search=False):
|
||||
op_offset_original = op_offset
|
||||
while op_offset >= 0:
|
||||
ret = op_offset_to_line.get(op_offset)
|
||||
if ret is not None:
|
||||
return ret - firstlineno
|
||||
if not search:
|
||||
return ret
|
||||
else:
|
||||
op_offset -= 1
|
||||
raise AssertionError("Unable to find line for offset: %s.Info: %s" % (op_offset_original, op_offset_to_line))
|
||||
|
||||
|
||||
def debug(s):
|
||||
pass
|
||||
|
||||
|
||||
_Instruction = namedtuple("_Instruction", "opname, opcode, starts_line, argval, is_jump_target, offset, argrepr")
|
||||
|
||||
|
||||
def iter_instructions(co):
|
||||
iter_in = dis.Bytecode(co)
|
||||
iter_in = list(iter_in)
|
||||
|
||||
bytecode_to_instruction = {}
|
||||
for instruction in iter_in:
|
||||
bytecode_to_instruction[instruction.offset] = instruction
|
||||
|
||||
if iter_in:
|
||||
for instruction in iter_in:
|
||||
yield instruction
|
||||
|
||||
|
||||
def collect_return_info(co, use_func_first_line=False):
|
||||
if not hasattr(co, "co_lines") and not hasattr(co, "co_lnotab"):
|
||||
return []
|
||||
|
||||
if use_func_first_line:
|
||||
firstlineno = co.co_firstlineno
|
||||
else:
|
||||
firstlineno = 0
|
||||
|
||||
lst = []
|
||||
op_offset_to_line = dict(dis.findlinestarts(co))
|
||||
for instruction in iter_instructions(co):
|
||||
curr_op_name = instruction.opname
|
||||
if curr_op_name in ("RETURN_VALUE", "RETURN_CONST"):
|
||||
lst.append(ReturnInfo(_get_line(op_offset_to_line, instruction.offset, firstlineno, search=True)))
|
||||
|
||||
return lst
|
||||
|
||||
|
||||
if sys.version_info[:2] <= (3, 9):
|
||||
|
||||
class _TargetInfo(object):
|
||||
def __init__(self, except_end_instruction, jump_if_not_exc_instruction=None):
|
||||
self.except_end_instruction = except_end_instruction
|
||||
self.jump_if_not_exc_instruction = jump_if_not_exc_instruction
|
||||
|
||||
def __str__(self):
|
||||
msg = ["_TargetInfo("]
|
||||
msg.append(self.except_end_instruction.opname)
|
||||
if self.jump_if_not_exc_instruction:
|
||||
msg.append(" - ")
|
||||
msg.append(self.jump_if_not_exc_instruction.opname)
|
||||
msg.append("(")
|
||||
msg.append(str(self.jump_if_not_exc_instruction.argval))
|
||||
msg.append(")")
|
||||
msg.append(")")
|
||||
return "".join(msg)
|
||||
|
||||
def _get_except_target_info(instructions, exception_end_instruction_index, offset_to_instruction_idx):
|
||||
next_3 = [
|
||||
j_instruction.opname for j_instruction in instructions[exception_end_instruction_index : exception_end_instruction_index + 3]
|
||||
]
|
||||
# print('next_3:', [(j_instruction.opname, j_instruction.argval) for j_instruction in instructions[exception_end_instruction_index:exception_end_instruction_index + 3]])
|
||||
if next_3 == ["POP_TOP", "POP_TOP", "POP_TOP"]: # try..except without checking exception.
|
||||
try:
|
||||
jump_instruction = instructions[exception_end_instruction_index - 1]
|
||||
if jump_instruction.opname not in ("JUMP_FORWARD", "JUMP_ABSOLUTE"):
|
||||
return None
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
if jump_instruction.opname == "JUMP_ABSOLUTE":
|
||||
# On latest versions of Python 3 the interpreter has a go-backwards step,
|
||||
# used to show the initial line of a for/while, etc (which is this
|
||||
# JUMP_ABSOLUTE)... we're not really interested in it, but rather on where
|
||||
# it points to.
|
||||
except_end_instruction = instructions[offset_to_instruction_idx[jump_instruction.argval]]
|
||||
idx = offset_to_instruction_idx[except_end_instruction.argval]
|
||||
# Search for the POP_EXCEPT which should be at the end of the block.
|
||||
for pop_except_instruction in reversed(instructions[:idx]):
|
||||
if pop_except_instruction.opname == "POP_EXCEPT":
|
||||
except_end_instruction = pop_except_instruction
|
||||
return _TargetInfo(except_end_instruction)
|
||||
else:
|
||||
return None # i.e.: Continue outer loop
|
||||
|
||||
else:
|
||||
# JUMP_FORWARD
|
||||
i = offset_to_instruction_idx[jump_instruction.argval]
|
||||
try:
|
||||
# i.e.: the jump is to the instruction after the block finishes (so, we need to
|
||||
# get the previous instruction as that should be the place where the exception
|
||||
# block finishes).
|
||||
except_end_instruction = instructions[i - 1]
|
||||
except:
|
||||
pydev_log.critical("Error when computing try..except block end.")
|
||||
return None
|
||||
return _TargetInfo(except_end_instruction)
|
||||
|
||||
elif next_3 and next_3[0] == "DUP_TOP": # try..except AssertionError.
|
||||
iter_in = instructions[exception_end_instruction_index + 1 :]
|
||||
for j, jump_if_not_exc_instruction in enumerate(iter_in):
|
||||
if jump_if_not_exc_instruction.opname == "JUMP_IF_NOT_EXC_MATCH":
|
||||
# Python 3.9
|
||||
except_end_instruction = instructions[offset_to_instruction_idx[jump_if_not_exc_instruction.argval]]
|
||||
return _TargetInfo(except_end_instruction, jump_if_not_exc_instruction)
|
||||
|
||||
elif jump_if_not_exc_instruction.opname == "COMPARE_OP" and jump_if_not_exc_instruction.argval == "exception match":
|
||||
# Python 3.8 and before
|
||||
try:
|
||||
next_instruction = iter_in[j + 1]
|
||||
except:
|
||||
continue
|
||||
if next_instruction.opname == "POP_JUMP_IF_FALSE":
|
||||
except_end_instruction = instructions[offset_to_instruction_idx[next_instruction.argval]]
|
||||
return _TargetInfo(except_end_instruction, next_instruction)
|
||||
else:
|
||||
return None # i.e.: Continue outer loop
|
||||
|
||||
else:
|
||||
# i.e.: we're not interested in try..finally statements, only try..except.
|
||||
return None
|
||||
|
||||
def collect_try_except_info(co, use_func_first_line=False):
|
||||
# We no longer have 'END_FINALLY', so, we need to do things differently in Python 3.9
|
||||
if not hasattr(co, "co_lines") and not hasattr(co, "co_lnotab"):
|
||||
return []
|
||||
|
||||
if use_func_first_line:
|
||||
firstlineno = co.co_firstlineno
|
||||
else:
|
||||
firstlineno = 0
|
||||
|
||||
try_except_info_lst = []
|
||||
|
||||
op_offset_to_line = dict(entry for entry in dis.findlinestarts(co) if entry[1] is not None)
|
||||
|
||||
offset_to_instruction_idx = {}
|
||||
|
||||
instructions = list(iter_instructions(co))
|
||||
|
||||
for i, instruction in enumerate(instructions):
|
||||
offset_to_instruction_idx[instruction.offset] = i
|
||||
|
||||
for i, instruction in enumerate(instructions):
|
||||
curr_op_name = instruction.opname
|
||||
if curr_op_name in ("SETUP_FINALLY", "SETUP_EXCEPT"): # SETUP_EXCEPT before Python 3.8, SETUP_FINALLY Python 3.8 onwards.
|
||||
exception_end_instruction_index = offset_to_instruction_idx[instruction.argval]
|
||||
|
||||
jump_instruction = instructions[exception_end_instruction_index - 1]
|
||||
if jump_instruction.opname not in ("JUMP_FORWARD", "JUMP_ABSOLUTE"):
|
||||
continue
|
||||
|
||||
except_end_instruction = None
|
||||
indexes_checked = set()
|
||||
indexes_checked.add(exception_end_instruction_index)
|
||||
target_info = _get_except_target_info(instructions, exception_end_instruction_index, offset_to_instruction_idx)
|
||||
while target_info is not None:
|
||||
# Handle a try..except..except..except.
|
||||
jump_instruction = target_info.jump_if_not_exc_instruction
|
||||
except_end_instruction = target_info.except_end_instruction
|
||||
|
||||
if jump_instruction is not None:
|
||||
check_index = offset_to_instruction_idx[jump_instruction.argval]
|
||||
if check_index in indexes_checked:
|
||||
break
|
||||
indexes_checked.add(check_index)
|
||||
target_info = _get_except_target_info(instructions, check_index, offset_to_instruction_idx)
|
||||
else:
|
||||
break
|
||||
|
||||
if except_end_instruction is not None:
|
||||
try_except_info = TryExceptInfo(
|
||||
_get_line(op_offset_to_line, instruction.offset, firstlineno, search=True), ignore=False
|
||||
)
|
||||
try_except_info.except_bytecode_offset = instruction.argval
|
||||
try_except_info.except_line = _get_line(
|
||||
op_offset_to_line, try_except_info.except_bytecode_offset, firstlineno, search=True
|
||||
)
|
||||
|
||||
try_except_info.except_end_bytecode_offset = except_end_instruction.offset
|
||||
try_except_info.except_end_line = _get_line(op_offset_to_line, except_end_instruction.offset, firstlineno, search=True)
|
||||
try_except_info_lst.append(try_except_info)
|
||||
|
||||
for raise_instruction in instructions[i : offset_to_instruction_idx[try_except_info.except_end_bytecode_offset]]:
|
||||
if raise_instruction.opname == "RAISE_VARARGS":
|
||||
if raise_instruction.argval == 0:
|
||||
try_except_info.raise_lines_in_except.append(
|
||||
_get_line(op_offset_to_line, raise_instruction.offset, firstlineno, search=True)
|
||||
)
|
||||
|
||||
return try_except_info_lst
|
||||
|
||||
elif sys.version_info[:2] == (3, 10):
|
||||
|
||||
class _TargetInfo(object):
|
||||
def __init__(self, except_end_instruction, jump_if_not_exc_instruction=None):
|
||||
self.except_end_instruction = except_end_instruction
|
||||
self.jump_if_not_exc_instruction = jump_if_not_exc_instruction
|
||||
|
||||
def __str__(self):
|
||||
msg = ["_TargetInfo("]
|
||||
msg.append(self.except_end_instruction.opname)
|
||||
if self.jump_if_not_exc_instruction:
|
||||
msg.append(" - ")
|
||||
msg.append(self.jump_if_not_exc_instruction.opname)
|
||||
msg.append("(")
|
||||
msg.append(str(self.jump_if_not_exc_instruction.argval))
|
||||
msg.append(")")
|
||||
msg.append(")")
|
||||
return "".join(msg)
|
||||
|
||||
def _get_except_target_info(instructions, exception_end_instruction_index, offset_to_instruction_idx):
|
||||
next_3 = [
|
||||
j_instruction.opname for j_instruction in instructions[exception_end_instruction_index : exception_end_instruction_index + 3]
|
||||
]
|
||||
# print('next_3:', [(j_instruction.opname, j_instruction.argval) for j_instruction in instructions[exception_end_instruction_index:exception_end_instruction_index + 3]])
|
||||
if next_3 == ["POP_TOP", "POP_TOP", "POP_TOP"]: # try..except without checking exception.
|
||||
# Previously there was a jump which was able to point where the exception would end. This
|
||||
# is no longer true, now a bare except doesn't really have any indication in the bytecode
|
||||
# where the end would be expected if the exception wasn't raised, so, we just blindly
|
||||
# search for a POP_EXCEPT from the current position.
|
||||
for pop_except_instruction in instructions[exception_end_instruction_index + 3 :]:
|
||||
if pop_except_instruction.opname == "POP_EXCEPT":
|
||||
except_end_instruction = pop_except_instruction
|
||||
return _TargetInfo(except_end_instruction)
|
||||
|
||||
elif next_3 and next_3[0] == "DUP_TOP": # try..except AssertionError.
|
||||
iter_in = instructions[exception_end_instruction_index + 1 :]
|
||||
for jump_if_not_exc_instruction in iter_in:
|
||||
if jump_if_not_exc_instruction.opname == "JUMP_IF_NOT_EXC_MATCH":
|
||||
# Python 3.9
|
||||
except_end_instruction = instructions[offset_to_instruction_idx[jump_if_not_exc_instruction.argval]]
|
||||
return _TargetInfo(except_end_instruction, jump_if_not_exc_instruction)
|
||||
else:
|
||||
return None # i.e.: Continue outer loop
|
||||
|
||||
else:
|
||||
# i.e.: we're not interested in try..finally statements, only try..except.
|
||||
return None
|
||||
|
||||
def collect_try_except_info(co, use_func_first_line=False):
|
||||
# We no longer have 'END_FINALLY', so, we need to do things differently in Python 3.9
|
||||
if not hasattr(co, "co_lines") and not hasattr(co, "co_lnotab"):
|
||||
return []
|
||||
|
||||
if use_func_first_line:
|
||||
firstlineno = co.co_firstlineno
|
||||
else:
|
||||
firstlineno = 0
|
||||
|
||||
try_except_info_lst = []
|
||||
|
||||
op_offset_to_line = dict(entry for entry in dis.findlinestarts(co) if entry[1] is not None)
|
||||
|
||||
offset_to_instruction_idx = {}
|
||||
|
||||
instructions = list(iter_instructions(co))
|
||||
|
||||
for i, instruction in enumerate(instructions):
|
||||
offset_to_instruction_idx[instruction.offset] = i
|
||||
|
||||
for i, instruction in enumerate(instructions):
|
||||
curr_op_name = instruction.opname
|
||||
if curr_op_name == "SETUP_FINALLY":
|
||||
exception_end_instruction_index = offset_to_instruction_idx[instruction.argval]
|
||||
|
||||
jump_instruction = instructions[exception_end_instruction_index]
|
||||
if jump_instruction.opname != "DUP_TOP":
|
||||
continue
|
||||
|
||||
except_end_instruction = None
|
||||
indexes_checked = set()
|
||||
indexes_checked.add(exception_end_instruction_index)
|
||||
target_info = _get_except_target_info(instructions, exception_end_instruction_index, offset_to_instruction_idx)
|
||||
while target_info is not None:
|
||||
# Handle a try..except..except..except.
|
||||
jump_instruction = target_info.jump_if_not_exc_instruction
|
||||
except_end_instruction = target_info.except_end_instruction
|
||||
|
||||
if jump_instruction is not None:
|
||||
check_index = offset_to_instruction_idx[jump_instruction.argval]
|
||||
if check_index in indexes_checked:
|
||||
break
|
||||
indexes_checked.add(check_index)
|
||||
target_info = _get_except_target_info(instructions, check_index, offset_to_instruction_idx)
|
||||
else:
|
||||
break
|
||||
|
||||
if except_end_instruction is not None:
|
||||
try_except_info = TryExceptInfo(
|
||||
_get_line(op_offset_to_line, instruction.offset, firstlineno, search=True), ignore=False
|
||||
)
|
||||
try_except_info.except_bytecode_offset = instruction.argval
|
||||
try_except_info.except_line = _get_line(
|
||||
op_offset_to_line, try_except_info.except_bytecode_offset, firstlineno, search=True
|
||||
)
|
||||
|
||||
try_except_info.except_end_bytecode_offset = except_end_instruction.offset
|
||||
|
||||
# On Python 3.10 the final line of the except end isn't really correct, rather,
|
||||
# it's engineered to be the same line of the except and not the end line of the
|
||||
# block, so, the approach taken is to search for the biggest line between the
|
||||
# except and the end instruction
|
||||
except_end_line = -1
|
||||
start_i = offset_to_instruction_idx[try_except_info.except_bytecode_offset]
|
||||
end_i = offset_to_instruction_idx[except_end_instruction.offset]
|
||||
for instruction in instructions[start_i : end_i + 1]:
|
||||
found_at_line = op_offset_to_line.get(instruction.offset)
|
||||
if found_at_line is not None and found_at_line > except_end_line:
|
||||
except_end_line = found_at_line
|
||||
try_except_info.except_end_line = except_end_line - firstlineno
|
||||
|
||||
try_except_info_lst.append(try_except_info)
|
||||
|
||||
for raise_instruction in instructions[i : offset_to_instruction_idx[try_except_info.except_end_bytecode_offset]]:
|
||||
if raise_instruction.opname == "RAISE_VARARGS":
|
||||
if raise_instruction.argval == 0:
|
||||
try_except_info.raise_lines_in_except.append(
|
||||
_get_line(op_offset_to_line, raise_instruction.offset, firstlineno, search=True)
|
||||
)
|
||||
|
||||
return try_except_info_lst
|
||||
|
||||
elif sys.version_info[:2] >= (3, 11):
|
||||
|
||||
def collect_try_except_info(co, use_func_first_line=False):
|
||||
"""
|
||||
Note: if the filename is available and we can get the source,
|
||||
`collect_try_except_info_from_source` is preferred (this is kept as
|
||||
a fallback for cases where sources aren't available).
|
||||
"""
|
||||
return []
|
||||
|
||||
|
||||
import ast as ast_module
|
||||
|
||||
|
||||
class _Visitor(ast_module.NodeVisitor):
|
||||
def __init__(self):
|
||||
self.try_except_infos = []
|
||||
self._stack = []
|
||||
self._in_except_stack = []
|
||||
self.max_line = -1
|
||||
|
||||
def generic_visit(self, node):
|
||||
if hasattr(node, "lineno"):
|
||||
if node.lineno > self.max_line:
|
||||
self.max_line = node.lineno
|
||||
return ast_module.NodeVisitor.generic_visit(self, node)
|
||||
|
||||
def visit_Try(self, node):
|
||||
info = TryExceptInfo(node.lineno, ignore=True)
|
||||
self._stack.append(info)
|
||||
self.generic_visit(node)
|
||||
assert info is self._stack.pop()
|
||||
if not info.ignore:
|
||||
self.try_except_infos.insert(0, info)
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
visit_TryExcept = visit_Try
|
||||
|
||||
def visit_ExceptHandler(self, node):
|
||||
info = self._stack[-1]
|
||||
info.ignore = False
|
||||
if info.except_line == -1:
|
||||
info.except_line = node.lineno
|
||||
self._in_except_stack.append(info)
|
||||
self.generic_visit(node)
|
||||
if hasattr(node, "end_lineno"):
|
||||
info.except_end_line = node.end_lineno
|
||||
else:
|
||||
info.except_end_line = self.max_line
|
||||
self._in_except_stack.pop()
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
|
||||
def visit_Raise(self, node):
|
||||
for info in self._in_except_stack:
|
||||
if node.exc is None:
|
||||
info.raise_lines_in_except.append(node.lineno)
|
||||
self.generic_visit(node)
|
||||
|
||||
else:
|
||||
|
||||
def visit_Raise(self, node):
|
||||
for info in self._in_except_stack:
|
||||
if node.type is None and node.tback is None:
|
||||
info.raise_lines_in_except.append(node.lineno)
|
||||
self.generic_visit(node)
|
||||
|
||||
|
||||
def collect_try_except_info_from_source(filename):
|
||||
with open(filename, "rb") as stream:
|
||||
contents = stream.read()
|
||||
return collect_try_except_info_from_contents(contents, filename)
|
||||
|
||||
|
||||
def collect_try_except_info_from_contents(contents, filename="<unknown>"):
|
||||
ast = ast_module.parse(contents, filename)
|
||||
visitor = _Visitor()
|
||||
visitor.visit(ast)
|
||||
return visitor.try_except_infos
|
||||
|
||||
|
||||
RESTART_FROM_LOOKAHEAD = object()
|
||||
SEPARATOR = object()
|
||||
|
||||
|
||||
class _MsgPart(object):
|
||||
def __init__(self, line, tok):
|
||||
assert line >= 0
|
||||
self.line = line
|
||||
self.tok = tok
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "_MsgPart(line: %s tok: %s)" % (self.line, self.tok)
|
||||
|
||||
__repr__ = __str__
|
||||
|
||||
@classmethod
|
||||
def add_to_line_to_contents(cls, obj, line_to_contents, line=None):
|
||||
if isinstance(obj, (list, tuple)):
|
||||
for o in obj:
|
||||
cls.add_to_line_to_contents(o, line_to_contents, line=line)
|
||||
return
|
||||
|
||||
if isinstance(obj, str):
|
||||
assert line is not None
|
||||
line = int(line)
|
||||
lst = line_to_contents.setdefault(line, [])
|
||||
lst.append(obj)
|
||||
return
|
||||
|
||||
if isinstance(obj, _MsgPart):
|
||||
if isinstance(obj.tok, (list, tuple)):
|
||||
cls.add_to_line_to_contents(obj.tok, line_to_contents, line=obj.line)
|
||||
return
|
||||
|
||||
if isinstance(obj.tok, str):
|
||||
lst = line_to_contents.setdefault(obj.line, [])
|
||||
lst.append(obj.tok)
|
||||
return
|
||||
|
||||
raise AssertionError("Unhandled: %" % (obj,))
|
||||
|
||||
|
||||
class _Disassembler(object):
|
||||
def __init__(self, co, firstlineno, level=0):
|
||||
self.co = co
|
||||
self.firstlineno = firstlineno
|
||||
self.level = level
|
||||
self.instructions = list(iter_instructions(co))
|
||||
op_offset_to_line = self.op_offset_to_line = dict(entry for entry in dis.findlinestarts(co) if entry[1] is not None)
|
||||
|
||||
# Update offsets so that all offsets have the line index (and update it based on
|
||||
# the passed firstlineno).
|
||||
line_index = co.co_firstlineno - firstlineno
|
||||
for instruction in self.instructions:
|
||||
new_line_index = op_offset_to_line.get(instruction.offset)
|
||||
if new_line_index is not None:
|
||||
line_index = new_line_index - firstlineno
|
||||
op_offset_to_line[instruction.offset] = line_index
|
||||
else:
|
||||
op_offset_to_line[instruction.offset] = line_index
|
||||
|
||||
BIG_LINE_INT = 9999999
|
||||
SMALL_LINE_INT = -1
|
||||
|
||||
def min_line(self, *args):
|
||||
m = self.BIG_LINE_INT
|
||||
for arg in args:
|
||||
if isinstance(arg, (list, tuple)):
|
||||
m = min(m, self.min_line(*arg))
|
||||
|
||||
elif isinstance(arg, _MsgPart):
|
||||
m = min(m, arg.line)
|
||||
|
||||
elif hasattr(arg, "offset"):
|
||||
m = min(m, self.op_offset_to_line[arg.offset])
|
||||
return m
|
||||
|
||||
def max_line(self, *args):
|
||||
m = self.SMALL_LINE_INT
|
||||
for arg in args:
|
||||
if isinstance(arg, (list, tuple)):
|
||||
m = max(m, self.max_line(*arg))
|
||||
|
||||
elif isinstance(arg, _MsgPart):
|
||||
m = max(m, arg.line)
|
||||
|
||||
elif hasattr(arg, "offset"):
|
||||
m = max(m, self.op_offset_to_line[arg.offset])
|
||||
return m
|
||||
|
||||
def _lookahead(self):
|
||||
"""
|
||||
This handles and converts some common constructs from bytecode to actual source code.
|
||||
|
||||
It may change the list of instructions.
|
||||
"""
|
||||
msg = self._create_msg_part
|
||||
found = []
|
||||
fullrepr = None
|
||||
|
||||
# Collect all the load instructions
|
||||
for next_instruction in self.instructions:
|
||||
if next_instruction.opname in ("LOAD_GLOBAL", "LOAD_FAST", "LOAD_CONST", "LOAD_NAME"):
|
||||
found.append(next_instruction)
|
||||
else:
|
||||
break
|
||||
|
||||
if not found:
|
||||
return None
|
||||
|
||||
if next_instruction.opname == "LOAD_ATTR":
|
||||
prev_instruction = found[-1]
|
||||
# Remove the current LOAD_ATTR
|
||||
assert self.instructions.pop(len(found)) is next_instruction
|
||||
|
||||
# Add the LOAD_ATTR to the previous LOAD
|
||||
self.instructions[len(found) - 1] = _Instruction(
|
||||
prev_instruction.opname,
|
||||
prev_instruction.opcode,
|
||||
prev_instruction.starts_line,
|
||||
prev_instruction.argval,
|
||||
False, # prev_instruction.is_jump_target,
|
||||
prev_instruction.offset,
|
||||
(msg(prev_instruction), msg(prev_instruction, "."), msg(next_instruction)),
|
||||
)
|
||||
return RESTART_FROM_LOOKAHEAD
|
||||
|
||||
if next_instruction.opname in ("CALL_FUNCTION", "PRECALL", "CALL"):
|
||||
if len(found) == next_instruction.argval + 1:
|
||||
force_restart = False
|
||||
delta = 0
|
||||
else:
|
||||
force_restart = True
|
||||
if len(found) > next_instruction.argval + 1:
|
||||
delta = len(found) - (next_instruction.argval + 1)
|
||||
else:
|
||||
return None # This is odd
|
||||
|
||||
del_upto = delta + next_instruction.argval + 2 # +2 = NAME / CALL_FUNCTION
|
||||
if next_instruction.opname == "PRECALL":
|
||||
del_upto += 1 # Also remove the CALL right after the PRECALL.
|
||||
del self.instructions[delta:del_upto]
|
||||
|
||||
found = iter(found[delta:])
|
||||
call_func = next(found)
|
||||
args = list(found)
|
||||
fullrepr = [
|
||||
msg(call_func),
|
||||
msg(call_func, "("),
|
||||
]
|
||||
prev = call_func
|
||||
for i, arg in enumerate(args):
|
||||
if i > 0:
|
||||
fullrepr.append(msg(prev, ", "))
|
||||
prev = arg
|
||||
fullrepr.append(msg(arg))
|
||||
|
||||
fullrepr.append(msg(prev, ")"))
|
||||
|
||||
if force_restart:
|
||||
self.instructions.insert(
|
||||
delta,
|
||||
_Instruction(
|
||||
call_func.opname,
|
||||
call_func.opcode,
|
||||
call_func.starts_line,
|
||||
call_func.argval,
|
||||
False, # call_func.is_jump_target,
|
||||
call_func.offset,
|
||||
tuple(fullrepr),
|
||||
),
|
||||
)
|
||||
return RESTART_FROM_LOOKAHEAD
|
||||
|
||||
elif next_instruction.opname == "BUILD_TUPLE":
|
||||
if len(found) == next_instruction.argval:
|
||||
force_restart = False
|
||||
delta = 0
|
||||
else:
|
||||
force_restart = True
|
||||
if len(found) > next_instruction.argval:
|
||||
delta = len(found) - (next_instruction.argval)
|
||||
else:
|
||||
return None # This is odd
|
||||
|
||||
del self.instructions[delta : delta + next_instruction.argval + 1] # +1 = BUILD_TUPLE
|
||||
|
||||
found = iter(found[delta:])
|
||||
|
||||
args = [instruction for instruction in found]
|
||||
if args:
|
||||
first_instruction = args[0]
|
||||
else:
|
||||
first_instruction = next_instruction
|
||||
prev = first_instruction
|
||||
|
||||
fullrepr = []
|
||||
fullrepr.append(msg(prev, "("))
|
||||
for i, arg in enumerate(args):
|
||||
if i > 0:
|
||||
fullrepr.append(msg(prev, ", "))
|
||||
prev = arg
|
||||
fullrepr.append(msg(arg))
|
||||
|
||||
fullrepr.append(msg(prev, ")"))
|
||||
|
||||
if force_restart:
|
||||
self.instructions.insert(
|
||||
delta,
|
||||
_Instruction(
|
||||
first_instruction.opname,
|
||||
first_instruction.opcode,
|
||||
first_instruction.starts_line,
|
||||
first_instruction.argval,
|
||||
False, # first_instruction.is_jump_target,
|
||||
first_instruction.offset,
|
||||
tuple(fullrepr),
|
||||
),
|
||||
)
|
||||
return RESTART_FROM_LOOKAHEAD
|
||||
|
||||
if fullrepr is not None and self.instructions:
|
||||
if self.instructions[0].opname == "POP_TOP":
|
||||
self.instructions.pop(0)
|
||||
|
||||
if self.instructions[0].opname in ("STORE_FAST", "STORE_NAME"):
|
||||
next_instruction = self.instructions.pop(0)
|
||||
return msg(next_instruction), msg(next_instruction, " = "), fullrepr
|
||||
|
||||
if self.instructions[0].opname == "RETURN_VALUE":
|
||||
next_instruction = self.instructions.pop(0)
|
||||
return msg(next_instruction, "return ", line=self.min_line(next_instruction, fullrepr)), fullrepr
|
||||
|
||||
return fullrepr
|
||||
|
||||
def _decorate_jump_target(self, instruction, instruction_repr):
|
||||
if instruction.is_jump_target:
|
||||
return ("|", str(instruction.offset), "|", instruction_repr)
|
||||
|
||||
return instruction_repr
|
||||
|
||||
def _create_msg_part(self, instruction, tok=None, line=None):
|
||||
dec = self._decorate_jump_target
|
||||
if line is None or line in (self.BIG_LINE_INT, self.SMALL_LINE_INT):
|
||||
line = self.op_offset_to_line[instruction.offset]
|
||||
|
||||
argrepr = instruction.argrepr
|
||||
if isinstance(argrepr, str) and argrepr.startswith("NULL + "):
|
||||
argrepr = argrepr[7:]
|
||||
if isinstance(argrepr, str) and argrepr.endswith("+ NULL"):
|
||||
argrepr = argrepr[:-7]
|
||||
return _MsgPart(line, tok if tok is not None else dec(instruction, argrepr))
|
||||
|
||||
def _next_instruction_to_str(self, line_to_contents):
|
||||
# indent = ''
|
||||
# if self.level > 0:
|
||||
# indent += ' ' * self.level
|
||||
# print(indent, 'handle', self.instructions[0])
|
||||
|
||||
if self.instructions:
|
||||
ret = self._lookahead()
|
||||
if ret:
|
||||
return ret
|
||||
|
||||
msg = self._create_msg_part
|
||||
|
||||
instruction = self.instructions.pop(0)
|
||||
|
||||
if instruction.opname in ("RESUME", "NULL"):
|
||||
return None
|
||||
|
||||
if instruction.opname == "RETURN_CONST":
|
||||
return (msg(instruction, "return ", line=self.min_line(instruction)), msg(instruction))
|
||||
|
||||
if instruction.opname in ("LOAD_GLOBAL", "LOAD_FAST", "LOAD_CONST", "LOAD_NAME"):
|
||||
next_instruction = self.instructions[0]
|
||||
if next_instruction.opname in ("STORE_FAST", "STORE_NAME"):
|
||||
self.instructions.pop(0)
|
||||
return (msg(next_instruction), msg(next_instruction, " = "), msg(instruction))
|
||||
|
||||
if next_instruction.opname == "RETURN_VALUE":
|
||||
self.instructions.pop(0)
|
||||
return (msg(instruction, "return ", line=self.min_line(instruction)), msg(instruction))
|
||||
|
||||
if next_instruction.opname == "RAISE_VARARGS" and next_instruction.argval == 1:
|
||||
self.instructions.pop(0)
|
||||
return (msg(instruction, "raise ", line=self.min_line(instruction)), msg(instruction))
|
||||
|
||||
if instruction.opname == "LOAD_CONST":
|
||||
if inspect.iscode(instruction.argval):
|
||||
code_line_to_contents = _Disassembler(instruction.argval, self.firstlineno, self.level + 1).build_line_to_contents()
|
||||
|
||||
for contents in code_line_to_contents.values():
|
||||
contents.insert(0, " ")
|
||||
for line, contents in code_line_to_contents.items():
|
||||
line_to_contents.setdefault(line, []).extend(contents)
|
||||
return msg(instruction, "LOAD_CONST(code)")
|
||||
|
||||
if instruction.opname == "RAISE_VARARGS":
|
||||
if instruction.argval == 0:
|
||||
return msg(instruction, "raise")
|
||||
|
||||
if instruction.opname == "SETUP_FINALLY":
|
||||
return msg(instruction, ("try(", instruction.argrepr, "):"))
|
||||
|
||||
if instruction.argrepr:
|
||||
return msg(instruction, (instruction.opname, "(", instruction.argrepr, ")"))
|
||||
|
||||
if instruction.argval:
|
||||
return msg(
|
||||
instruction,
|
||||
"%s{%s}"
|
||||
% (
|
||||
instruction.opname,
|
||||
instruction.argval,
|
||||
),
|
||||
)
|
||||
|
||||
return msg(instruction, instruction.opname)
|
||||
|
||||
def build_line_to_contents(self):
|
||||
# print('----')
|
||||
# for instruction in self.instructions:
|
||||
# print(instruction)
|
||||
# print('----\n\n')
|
||||
|
||||
line_to_contents = {}
|
||||
|
||||
instructions = self.instructions
|
||||
while instructions:
|
||||
s = self._next_instruction_to_str(line_to_contents)
|
||||
if s is RESTART_FROM_LOOKAHEAD:
|
||||
continue
|
||||
if s is None:
|
||||
continue
|
||||
|
||||
_MsgPart.add_to_line_to_contents(s, line_to_contents)
|
||||
m = self.max_line(s)
|
||||
if m != self.SMALL_LINE_INT:
|
||||
line_to_contents.setdefault(m, []).append(SEPARATOR)
|
||||
return line_to_contents
|
||||
|
||||
def disassemble(self):
|
||||
line_to_contents = self.build_line_to_contents()
|
||||
stream = StringIO()
|
||||
last_line = 0
|
||||
show_lines = False
|
||||
for line, contents in sorted(line_to_contents.items()):
|
||||
while last_line < line - 1:
|
||||
if show_lines:
|
||||
stream.write("%s.\n" % (last_line + 1,))
|
||||
else:
|
||||
stream.write("\n")
|
||||
last_line += 1
|
||||
|
||||
if show_lines:
|
||||
stream.write("%s. " % (line,))
|
||||
|
||||
for i, content in enumerate(contents):
|
||||
if content == SEPARATOR:
|
||||
if i != len(contents) - 1:
|
||||
stream.write(", ")
|
||||
else:
|
||||
stream.write(content)
|
||||
|
||||
stream.write("\n")
|
||||
|
||||
last_line = line
|
||||
|
||||
return stream.getvalue()
|
||||
|
||||
|
||||
def code_to_bytecode_representation(co, use_func_first_line=False):
|
||||
"""
|
||||
A simple disassemble of bytecode.
|
||||
|
||||
It does not attempt to provide the full Python source code, rather, it provides a low-level
|
||||
representation of the bytecode, respecting the lines (so, its target is making the bytecode
|
||||
easier to grasp and not providing the original source code).
|
||||
|
||||
Note that it does show jump locations/targets and converts some common bytecode constructs to
|
||||
Python code to make it a bit easier to understand.
|
||||
"""
|
||||
# Reference for bytecodes:
|
||||
# https://docs.python.org/3/library/dis.html
|
||||
if use_func_first_line:
|
||||
firstlineno = co.co_firstlineno
|
||||
else:
|
||||
firstlineno = 0
|
||||
|
||||
return _Disassembler(co, firstlineno).disassemble()
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,200 @@
|
||||
CMD_RUN = 101
|
||||
CMD_LIST_THREADS = 102
|
||||
CMD_THREAD_CREATE = 103
|
||||
CMD_THREAD_KILL = 104
|
||||
CMD_THREAD_SUSPEND = 105
|
||||
CMD_THREAD_RUN = 106
|
||||
CMD_STEP_INTO = 107
|
||||
CMD_STEP_OVER = 108
|
||||
CMD_STEP_RETURN = 109
|
||||
CMD_GET_VARIABLE = 110
|
||||
CMD_SET_BREAK = 111
|
||||
CMD_REMOVE_BREAK = 112
|
||||
CMD_EVALUATE_EXPRESSION = 113
|
||||
CMD_GET_FRAME = 114
|
||||
CMD_EXEC_EXPRESSION = 115
|
||||
CMD_WRITE_TO_CONSOLE = 116
|
||||
CMD_CHANGE_VARIABLE = 117
|
||||
CMD_RUN_TO_LINE = 118
|
||||
CMD_RELOAD_CODE = 119
|
||||
CMD_GET_COMPLETIONS = 120
|
||||
|
||||
# Note: renumbered (conflicted on merge)
|
||||
CMD_CONSOLE_EXEC = 121
|
||||
CMD_ADD_EXCEPTION_BREAK = 122
|
||||
CMD_REMOVE_EXCEPTION_BREAK = 123
|
||||
CMD_LOAD_SOURCE = 124
|
||||
CMD_ADD_DJANGO_EXCEPTION_BREAK = 125
|
||||
CMD_REMOVE_DJANGO_EXCEPTION_BREAK = 126
|
||||
CMD_SET_NEXT_STATEMENT = 127
|
||||
CMD_SMART_STEP_INTO = 128
|
||||
CMD_EXIT = 129
|
||||
CMD_SIGNATURE_CALL_TRACE = 130
|
||||
|
||||
CMD_SET_PY_EXCEPTION = 131
|
||||
CMD_GET_FILE_CONTENTS = 132
|
||||
CMD_SET_PROPERTY_TRACE = 133
|
||||
# Pydev debug console commands
|
||||
CMD_EVALUATE_CONSOLE_EXPRESSION = 134
|
||||
CMD_RUN_CUSTOM_OPERATION = 135
|
||||
CMD_GET_BREAKPOINT_EXCEPTION = 136
|
||||
CMD_STEP_CAUGHT_EXCEPTION = 137
|
||||
CMD_SEND_CURR_EXCEPTION_TRACE = 138
|
||||
CMD_SEND_CURR_EXCEPTION_TRACE_PROCEEDED = 139
|
||||
CMD_IGNORE_THROWN_EXCEPTION_AT = 140
|
||||
CMD_ENABLE_DONT_TRACE = 141
|
||||
CMD_SHOW_CONSOLE = 142
|
||||
|
||||
CMD_GET_ARRAY = 143
|
||||
CMD_STEP_INTO_MY_CODE = 144
|
||||
CMD_GET_CONCURRENCY_EVENT = 145
|
||||
CMD_SHOW_RETURN_VALUES = 146
|
||||
CMD_INPUT_REQUESTED = 147
|
||||
CMD_GET_DESCRIPTION = 148
|
||||
|
||||
CMD_PROCESS_CREATED = 149
|
||||
CMD_SHOW_CYTHON_WARNING = 150
|
||||
CMD_LOAD_FULL_VALUE = 151
|
||||
|
||||
CMD_GET_THREAD_STACK = 152
|
||||
|
||||
# This is mostly for unit-tests to diagnose errors on ci.
|
||||
CMD_THREAD_DUMP_TO_STDERR = 153
|
||||
|
||||
# Sent from the client to signal that we should stop when we start executing user code.
|
||||
CMD_STOP_ON_START = 154
|
||||
|
||||
# When the debugger is stopped in an exception, this command will provide the details of the current exception (in the current thread).
|
||||
CMD_GET_EXCEPTION_DETAILS = 155
|
||||
|
||||
# Allows configuring pydevd settings (can be called multiple times and only keys
|
||||
# available in the json will be configured -- keys not passed will not change the
|
||||
# previous configuration).
|
||||
CMD_PYDEVD_JSON_CONFIG = 156
|
||||
|
||||
CMD_THREAD_SUSPEND_SINGLE_NOTIFICATION = 157
|
||||
CMD_THREAD_RESUME_SINGLE_NOTIFICATION = 158
|
||||
|
||||
CMD_STEP_OVER_MY_CODE = 159
|
||||
CMD_STEP_RETURN_MY_CODE = 160
|
||||
|
||||
CMD_SET_PY_EXCEPTION_JSON = 161
|
||||
CMD_SET_PATH_MAPPING_JSON = 162
|
||||
|
||||
CMD_GET_SMART_STEP_INTO_VARIANTS = 163 # XXX: PyCharm has 160 for this (we're currently incompatible anyways).
|
||||
|
||||
CMD_REDIRECT_OUTPUT = 200
|
||||
CMD_GET_NEXT_STATEMENT_TARGETS = 201
|
||||
CMD_SET_PROJECT_ROOTS = 202
|
||||
|
||||
CMD_MODULE_EVENT = 203
|
||||
CMD_PROCESS_EVENT = 204
|
||||
|
||||
CMD_AUTHENTICATE = 205
|
||||
|
||||
CMD_STEP_INTO_COROUTINE = 206
|
||||
|
||||
CMD_LOAD_SOURCE_FROM_FRAME_ID = 207
|
||||
|
||||
CMD_SET_FUNCTION_BREAK = 208
|
||||
|
||||
CMD_VERSION = 501
|
||||
CMD_RETURN = 502
|
||||
CMD_SET_PROTOCOL = 503
|
||||
CMD_ERROR = 901
|
||||
|
||||
# this number can be changed if there's need to do so
|
||||
# if the io is too big, we'll not send all (could make the debugger too non-responsive)
|
||||
MAX_IO_MSG_SIZE = 10000
|
||||
|
||||
VERSION_STRING = "@@BUILD_NUMBER@@"
|
||||
|
||||
from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding
|
||||
|
||||
file_system_encoding = getfilesystemencoding()
|
||||
filesystem_encoding_is_utf8 = file_system_encoding.lower() in ("utf-8", "utf_8", "utf8")
|
||||
|
||||
ID_TO_MEANING = {
|
||||
"101": "CMD_RUN",
|
||||
"102": "CMD_LIST_THREADS",
|
||||
"103": "CMD_THREAD_CREATE",
|
||||
"104": "CMD_THREAD_KILL",
|
||||
"105": "CMD_THREAD_SUSPEND",
|
||||
"106": "CMD_THREAD_RUN",
|
||||
"107": "CMD_STEP_INTO",
|
||||
"108": "CMD_STEP_OVER",
|
||||
"109": "CMD_STEP_RETURN",
|
||||
"110": "CMD_GET_VARIABLE",
|
||||
"111": "CMD_SET_BREAK",
|
||||
"112": "CMD_REMOVE_BREAK",
|
||||
"113": "CMD_EVALUATE_EXPRESSION",
|
||||
"114": "CMD_GET_FRAME",
|
||||
"115": "CMD_EXEC_EXPRESSION",
|
||||
"116": "CMD_WRITE_TO_CONSOLE",
|
||||
"117": "CMD_CHANGE_VARIABLE",
|
||||
"118": "CMD_RUN_TO_LINE",
|
||||
"119": "CMD_RELOAD_CODE",
|
||||
"120": "CMD_GET_COMPLETIONS",
|
||||
"121": "CMD_CONSOLE_EXEC",
|
||||
"122": "CMD_ADD_EXCEPTION_BREAK",
|
||||
"123": "CMD_REMOVE_EXCEPTION_BREAK",
|
||||
"124": "CMD_LOAD_SOURCE",
|
||||
"125": "CMD_ADD_DJANGO_EXCEPTION_BREAK",
|
||||
"126": "CMD_REMOVE_DJANGO_EXCEPTION_BREAK",
|
||||
"127": "CMD_SET_NEXT_STATEMENT",
|
||||
"128": "CMD_SMART_STEP_INTO",
|
||||
"129": "CMD_EXIT",
|
||||
"130": "CMD_SIGNATURE_CALL_TRACE",
|
||||
"131": "CMD_SET_PY_EXCEPTION",
|
||||
"132": "CMD_GET_FILE_CONTENTS",
|
||||
"133": "CMD_SET_PROPERTY_TRACE",
|
||||
"134": "CMD_EVALUATE_CONSOLE_EXPRESSION",
|
||||
"135": "CMD_RUN_CUSTOM_OPERATION",
|
||||
"136": "CMD_GET_BREAKPOINT_EXCEPTION",
|
||||
"137": "CMD_STEP_CAUGHT_EXCEPTION",
|
||||
"138": "CMD_SEND_CURR_EXCEPTION_TRACE",
|
||||
"139": "CMD_SEND_CURR_EXCEPTION_TRACE_PROCEEDED",
|
||||
"140": "CMD_IGNORE_THROWN_EXCEPTION_AT",
|
||||
"141": "CMD_ENABLE_DONT_TRACE",
|
||||
"142": "CMD_SHOW_CONSOLE",
|
||||
"143": "CMD_GET_ARRAY",
|
||||
"144": "CMD_STEP_INTO_MY_CODE",
|
||||
"145": "CMD_GET_CONCURRENCY_EVENT",
|
||||
"146": "CMD_SHOW_RETURN_VALUES",
|
||||
"147": "CMD_INPUT_REQUESTED",
|
||||
"148": "CMD_GET_DESCRIPTION",
|
||||
"149": "CMD_PROCESS_CREATED", # Note: this is actually a notification of a sub-process created.
|
||||
"150": "CMD_SHOW_CYTHON_WARNING",
|
||||
"151": "CMD_LOAD_FULL_VALUE",
|
||||
"152": "CMD_GET_THREAD_STACK",
|
||||
"153": "CMD_THREAD_DUMP_TO_STDERR",
|
||||
"154": "CMD_STOP_ON_START",
|
||||
"155": "CMD_GET_EXCEPTION_DETAILS",
|
||||
"156": "CMD_PYDEVD_JSON_CONFIG",
|
||||
"157": "CMD_THREAD_SUSPEND_SINGLE_NOTIFICATION",
|
||||
"158": "CMD_THREAD_RESUME_SINGLE_NOTIFICATION",
|
||||
"159": "CMD_STEP_OVER_MY_CODE",
|
||||
"160": "CMD_STEP_RETURN_MY_CODE",
|
||||
"161": "CMD_SET_PY_EXCEPTION_JSON",
|
||||
"162": "CMD_SET_PATH_MAPPING_JSON",
|
||||
"163": "CMD_GET_SMART_STEP_INTO_VARIANTS",
|
||||
"200": "CMD_REDIRECT_OUTPUT",
|
||||
"201": "CMD_GET_NEXT_STATEMENT_TARGETS",
|
||||
"202": "CMD_SET_PROJECT_ROOTS",
|
||||
"203": "CMD_MODULE_EVENT",
|
||||
"204": "CMD_PROCESS_EVENT", # DAP process event.
|
||||
"205": "CMD_AUTHENTICATE",
|
||||
"206": "CMD_STEP_INTO_COROUTINE",
|
||||
"207": "CMD_LOAD_SOURCE_FROM_FRAME_ID",
|
||||
"501": "CMD_VERSION",
|
||||
"502": "CMD_RETURN",
|
||||
"503": "CMD_SET_PROTOCOL",
|
||||
"901": "CMD_ERROR",
|
||||
}
|
||||
|
||||
|
||||
def constant_to_str(constant):
|
||||
s = ID_TO_MEANING.get(str(constant))
|
||||
if not s:
|
||||
s = "<Unknown: %s>" % (constant,)
|
||||
return s
|
||||
+181
@@ -0,0 +1,181 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
class ArgHandlerWithParam:
|
||||
"""
|
||||
Handler for some arguments which needs a value
|
||||
"""
|
||||
|
||||
def __init__(self, arg_name, convert_val=None, default_val=None):
|
||||
self.arg_name = arg_name
|
||||
self.arg_v_rep = "--%s" % (arg_name,)
|
||||
self.convert_val = convert_val
|
||||
self.default_val = default_val
|
||||
|
||||
def to_argv(self, lst, setup):
|
||||
v = setup.get(self.arg_name)
|
||||
if v is not None and v != self.default_val:
|
||||
lst.append(self.arg_v_rep)
|
||||
lst.append("%s" % (v,))
|
||||
|
||||
def handle_argv(self, argv, i, setup):
|
||||
assert argv[i] == self.arg_v_rep
|
||||
del argv[i]
|
||||
|
||||
val = argv[i]
|
||||
if self.convert_val:
|
||||
val = self.convert_val(val)
|
||||
|
||||
setup[self.arg_name] = val
|
||||
del argv[i]
|
||||
|
||||
|
||||
class ArgHandlerBool:
|
||||
"""
|
||||
If a given flag is received, mark it as 'True' in setup.
|
||||
"""
|
||||
|
||||
def __init__(self, arg_name, default_val=False):
|
||||
self.arg_name = arg_name
|
||||
self.arg_v_rep = "--%s" % (arg_name,)
|
||||
self.default_val = default_val
|
||||
|
||||
def to_argv(self, lst, setup):
|
||||
v = setup.get(self.arg_name)
|
||||
if v:
|
||||
lst.append(self.arg_v_rep)
|
||||
|
||||
def handle_argv(self, argv, i, setup):
|
||||
assert argv[i] == self.arg_v_rep
|
||||
del argv[i]
|
||||
setup[self.arg_name] = True
|
||||
|
||||
|
||||
def convert_ppid(ppid):
|
||||
ret = int(ppid)
|
||||
if ret != 0:
|
||||
if ret == os.getpid():
|
||||
raise AssertionError("ppid passed is the same as the current process pid (%s)!" % (ret,))
|
||||
return ret
|
||||
|
||||
|
||||
ACCEPTED_ARG_HANDLERS = [
|
||||
ArgHandlerWithParam("port", int, 0),
|
||||
ArgHandlerWithParam("ppid", convert_ppid, 0),
|
||||
ArgHandlerWithParam("vm_type"),
|
||||
ArgHandlerWithParam("client"),
|
||||
ArgHandlerWithParam("access-token"),
|
||||
ArgHandlerWithParam("client-access-token"),
|
||||
ArgHandlerWithParam("debug-mode"),
|
||||
ArgHandlerWithParam("preimport"),
|
||||
# Logging
|
||||
ArgHandlerWithParam("log-file"),
|
||||
ArgHandlerWithParam("log-level", int, None),
|
||||
ArgHandlerBool("server"),
|
||||
ArgHandlerBool("multiproc"), # Used by PyCharm (reuses connection: ssh tunneling)
|
||||
ArgHandlerBool("multiprocess"), # Used by PyDev (creates new connection to ide)
|
||||
ArgHandlerBool("save-signatures"),
|
||||
ArgHandlerBool("save-threading"),
|
||||
ArgHandlerBool("save-asyncio"),
|
||||
ArgHandlerBool("print-in-debugger-startup"),
|
||||
ArgHandlerBool("cmd-line"),
|
||||
ArgHandlerBool("module"),
|
||||
ArgHandlerBool("skip-notify-stdin"),
|
||||
# The ones below should've been just one setting to specify the protocol, but for compatibility
|
||||
# reasons they're passed as a flag but are mutually exclusive.
|
||||
ArgHandlerBool("json-dap"), # Protocol used by ptvsd to communicate with pydevd (a single json message in each read)
|
||||
ArgHandlerBool("json-dap-http"), # Actual DAP (json messages over http protocol).
|
||||
ArgHandlerBool("protocol-quoted-line"), # Custom protocol with quoted lines.
|
||||
ArgHandlerBool("protocol-http"), # Custom protocol with http.
|
||||
]
|
||||
|
||||
ARGV_REP_TO_HANDLER = {}
|
||||
for handler in ACCEPTED_ARG_HANDLERS:
|
||||
ARGV_REP_TO_HANDLER[handler.arg_v_rep] = handler
|
||||
|
||||
|
||||
def get_pydevd_file():
|
||||
import pydevd
|
||||
|
||||
f = pydevd.__file__
|
||||
if f.endswith(".pyc"):
|
||||
f = f[:-1]
|
||||
elif f.endswith("$py.class"):
|
||||
f = f[: -len("$py.class")] + ".py"
|
||||
return f
|
||||
|
||||
|
||||
def setup_to_argv(setup, skip_names=None):
|
||||
"""
|
||||
:param dict setup:
|
||||
A dict previously gotten from process_command_line.
|
||||
|
||||
:param set skip_names:
|
||||
The names in the setup which shouldn't be converted to argv.
|
||||
|
||||
:note: does not handle --file nor --DEBUG.
|
||||
"""
|
||||
if skip_names is None:
|
||||
skip_names = set()
|
||||
ret = [get_pydevd_file()]
|
||||
|
||||
for handler in ACCEPTED_ARG_HANDLERS:
|
||||
if handler.arg_name in setup and handler.arg_name not in skip_names:
|
||||
handler.to_argv(ret, setup)
|
||||
return ret
|
||||
|
||||
|
||||
def process_command_line(argv):
|
||||
"""parses the arguments.
|
||||
removes our arguments from the command line"""
|
||||
setup = {}
|
||||
for handler in ACCEPTED_ARG_HANDLERS:
|
||||
setup[handler.arg_name] = handler.default_val
|
||||
setup["file"] = ""
|
||||
setup["qt-support"] = ""
|
||||
|
||||
initial_argv = tuple(argv)
|
||||
|
||||
i = 0
|
||||
del argv[0]
|
||||
while i < len(argv):
|
||||
handler = ARGV_REP_TO_HANDLER.get(argv[i])
|
||||
if handler is not None:
|
||||
handler.handle_argv(argv, i, setup)
|
||||
|
||||
elif argv[i].startswith("--qt-support"):
|
||||
# The --qt-support is special because we want to keep backward compatibility:
|
||||
# Previously, just passing '--qt-support' meant that we should use the auto-discovery mode
|
||||
# whereas now, if --qt-support is passed, it should be passed as --qt-support=<mode>, where
|
||||
# mode can be one of 'auto', 'none', 'pyqt5', 'pyqt4', 'pyside', 'pyside2'.
|
||||
if argv[i] == "--qt-support":
|
||||
setup["qt-support"] = "auto"
|
||||
|
||||
elif argv[i].startswith("--qt-support="):
|
||||
qt_support = argv[i][len("--qt-support=") :]
|
||||
valid_modes = ("none", "auto", "pyqt5", "pyqt4", "pyside", "pyside2")
|
||||
if qt_support not in valid_modes:
|
||||
raise ValueError("qt-support mode invalid: " + qt_support)
|
||||
if qt_support == "none":
|
||||
# On none, actually set an empty string to evaluate to False.
|
||||
setup["qt-support"] = ""
|
||||
else:
|
||||
setup["qt-support"] = qt_support
|
||||
else:
|
||||
raise ValueError("Unexpected definition for qt-support flag: " + argv[i])
|
||||
|
||||
del argv[i]
|
||||
|
||||
elif argv[i] == "--file":
|
||||
# --file is special because it's the last one (so, no handler for it).
|
||||
del argv[i]
|
||||
setup["file"] = argv[i]
|
||||
i = len(argv) # pop out, file is our last argument
|
||||
|
||||
elif argv[i] == "--DEBUG":
|
||||
sys.stderr.write("pydevd: --DEBUG parameter deprecated. Use `--debug-level=3` instead.\n")
|
||||
|
||||
else:
|
||||
raise ValueError("Unexpected option: %s when processing: %s" % (argv[i], initial_argv))
|
||||
return setup
|
||||
+482
@@ -0,0 +1,482 @@
|
||||
import time
|
||||
|
||||
from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding
|
||||
from _pydev_bundle._pydev_saved_modules import threading
|
||||
from _pydevd_bundle import pydevd_xml
|
||||
from _pydevd_bundle.pydevd_constants import GlobalDebuggerHolder
|
||||
from _pydevd_bundle.pydevd_constants import get_thread_id
|
||||
from _pydevd_bundle.pydevd_net_command import NetCommand
|
||||
from _pydevd_bundle.pydevd_concurrency_analyser.pydevd_thread_wrappers import ObjectWrapper, wrap_attr
|
||||
import pydevd_file_utils
|
||||
from _pydev_bundle import pydev_log
|
||||
import sys
|
||||
|
||||
file_system_encoding = getfilesystemencoding()
|
||||
|
||||
from urllib.parse import quote
|
||||
|
||||
threadingCurrentThread = threading.current_thread
|
||||
|
||||
DONT_TRACE_THREADING = ["threading.py", "pydevd.py"]
|
||||
INNER_METHODS = ["_stop"]
|
||||
INNER_FILES = ["threading.py"]
|
||||
THREAD_METHODS = ["start", "_stop", "join"]
|
||||
LOCK_METHODS = ["__init__", "acquire", "release", "__enter__", "__exit__"]
|
||||
QUEUE_METHODS = ["put", "get"]
|
||||
|
||||
# return time since epoch in milliseconds
|
||||
cur_time = lambda: int(round(time.time() * 1000000))
|
||||
|
||||
|
||||
def get_text_list_for_frame(frame):
|
||||
# partial copy-paste from make_thread_suspend_str
|
||||
curFrame = frame
|
||||
cmdTextList = []
|
||||
try:
|
||||
while curFrame:
|
||||
# print cmdText
|
||||
myId = str(id(curFrame))
|
||||
# print "id is ", myId
|
||||
|
||||
if curFrame.f_code is None:
|
||||
break # Iron Python sometimes does not have it!
|
||||
|
||||
myName = curFrame.f_code.co_name # method name (if in method) or ? if global
|
||||
if myName is None:
|
||||
break # Iron Python sometimes does not have it!
|
||||
|
||||
# print "name is ", myName
|
||||
|
||||
absolute_filename = pydevd_file_utils.get_abs_path_real_path_and_base_from_frame(curFrame)[0]
|
||||
|
||||
my_file, _applied_mapping = pydevd_file_utils.map_file_to_client(absolute_filename)
|
||||
|
||||
# print "file is ", my_file
|
||||
# my_file = inspect.getsourcefile(curFrame) or inspect.getfile(frame)
|
||||
|
||||
myLine = str(curFrame.f_lineno)
|
||||
# print "line is ", myLine
|
||||
|
||||
# the variables are all gotten 'on-demand'
|
||||
# variables = pydevd_xml.frame_vars_to_xml(curFrame.f_locals)
|
||||
|
||||
variables = ""
|
||||
cmdTextList.append('<frame id="%s" name="%s" ' % (myId, pydevd_xml.make_valid_xml_value(myName)))
|
||||
cmdTextList.append('file="%s" line="%s">' % (quote(my_file, "/>_= \t"), myLine))
|
||||
cmdTextList.append(variables)
|
||||
cmdTextList.append("</frame>")
|
||||
curFrame = curFrame.f_back
|
||||
except:
|
||||
pydev_log.exception()
|
||||
|
||||
return cmdTextList
|
||||
|
||||
|
||||
def send_concurrency_message(event_class, time, name, thread_id, type, event, file, line, frame, lock_id=0, parent=None):
|
||||
dbg = GlobalDebuggerHolder.global_dbg
|
||||
if dbg is None:
|
||||
return
|
||||
cmdTextList = ["<xml>"]
|
||||
|
||||
cmdTextList.append("<" + event_class)
|
||||
cmdTextList.append(' time="%s"' % pydevd_xml.make_valid_xml_value(str(time)))
|
||||
cmdTextList.append(' name="%s"' % pydevd_xml.make_valid_xml_value(name))
|
||||
cmdTextList.append(' thread_id="%s"' % pydevd_xml.make_valid_xml_value(thread_id))
|
||||
cmdTextList.append(' type="%s"' % pydevd_xml.make_valid_xml_value(type))
|
||||
if type == "lock":
|
||||
cmdTextList.append(' lock_id="%s"' % pydevd_xml.make_valid_xml_value(str(lock_id)))
|
||||
if parent is not None:
|
||||
cmdTextList.append(' parent="%s"' % pydevd_xml.make_valid_xml_value(parent))
|
||||
cmdTextList.append(' event="%s"' % pydevd_xml.make_valid_xml_value(event))
|
||||
cmdTextList.append(' file="%s"' % pydevd_xml.make_valid_xml_value(file))
|
||||
cmdTextList.append(' line="%s"' % pydevd_xml.make_valid_xml_value(str(line)))
|
||||
cmdTextList.append("></" + event_class + ">")
|
||||
|
||||
cmdTextList += get_text_list_for_frame(frame)
|
||||
cmdTextList.append("</xml>")
|
||||
|
||||
text = "".join(cmdTextList)
|
||||
if dbg.writer is not None:
|
||||
dbg.writer.add_command(NetCommand(145, 0, text))
|
||||
|
||||
|
||||
def log_new_thread(global_debugger, t):
|
||||
event_time = cur_time() - global_debugger.thread_analyser.start_time
|
||||
send_concurrency_message(
|
||||
"threading_event", event_time, t.name, get_thread_id(t), "thread", "start", "code_name", 0, None, parent=get_thread_id(t)
|
||||
)
|
||||
|
||||
|
||||
class ThreadingLogger:
|
||||
def __init__(self):
|
||||
self.start_time = cur_time()
|
||||
|
||||
def set_start_time(self, time):
|
||||
self.start_time = time
|
||||
|
||||
def log_event(self, frame):
|
||||
write_log = False
|
||||
self_obj = None
|
||||
if "self" in frame.f_locals:
|
||||
self_obj = frame.f_locals["self"]
|
||||
if isinstance(self_obj, threading.Thread) or self_obj.__class__ == ObjectWrapper:
|
||||
write_log = True
|
||||
if hasattr(frame, "f_back") and frame.f_back is not None:
|
||||
back = frame.f_back
|
||||
if hasattr(back, "f_back") and back.f_back is not None:
|
||||
back = back.f_back
|
||||
if "self" in back.f_locals:
|
||||
if isinstance(back.f_locals["self"], threading.Thread):
|
||||
write_log = True
|
||||
try:
|
||||
if write_log:
|
||||
t = threadingCurrentThread()
|
||||
back = frame.f_back
|
||||
if not back:
|
||||
return
|
||||
name, _, back_base = pydevd_file_utils.get_abs_path_real_path_and_base_from_frame(back)
|
||||
event_time = cur_time() - self.start_time
|
||||
method_name = frame.f_code.co_name
|
||||
|
||||
if isinstance(self_obj, threading.Thread):
|
||||
if not hasattr(self_obj, "_pydev_run_patched"):
|
||||
wrap_attr(self_obj, "run")
|
||||
if (method_name in THREAD_METHODS) and (
|
||||
back_base not in DONT_TRACE_THREADING or (method_name in INNER_METHODS and back_base in INNER_FILES)
|
||||
):
|
||||
thread_id = get_thread_id(self_obj)
|
||||
name = self_obj.getName()
|
||||
real_method = frame.f_code.co_name
|
||||
parent = None
|
||||
if real_method == "_stop":
|
||||
if back_base in INNER_FILES and back.f_code.co_name == "_wait_for_tstate_lock":
|
||||
back = back.f_back.f_back
|
||||
real_method = "stop"
|
||||
if hasattr(self_obj, "_pydev_join_called"):
|
||||
parent = get_thread_id(t)
|
||||
elif real_method == "join":
|
||||
# join called in the current thread, not in self object
|
||||
if not self_obj.is_alive():
|
||||
return
|
||||
thread_id = get_thread_id(t)
|
||||
name = t.name
|
||||
self_obj._pydev_join_called = True
|
||||
|
||||
if real_method == "start":
|
||||
parent = get_thread_id(t)
|
||||
send_concurrency_message(
|
||||
"threading_event",
|
||||
event_time,
|
||||
name,
|
||||
thread_id,
|
||||
"thread",
|
||||
real_method,
|
||||
back.f_code.co_filename,
|
||||
back.f_lineno,
|
||||
back,
|
||||
parent=parent,
|
||||
)
|
||||
# print(event_time, self_obj.getName(), thread_id, "thread",
|
||||
# real_method, back.f_code.co_filename, back.f_lineno)
|
||||
|
||||
if method_name == "pydev_after_run_call":
|
||||
if hasattr(frame, "f_back") and frame.f_back is not None:
|
||||
back = frame.f_back
|
||||
if hasattr(back, "f_back") and back.f_back is not None:
|
||||
back = back.f_back
|
||||
if "self" in back.f_locals:
|
||||
if isinstance(back.f_locals["self"], threading.Thread):
|
||||
my_self_obj = frame.f_back.f_back.f_locals["self"]
|
||||
my_back = frame.f_back.f_back
|
||||
my_thread_id = get_thread_id(my_self_obj)
|
||||
send_massage = True
|
||||
if hasattr(my_self_obj, "_pydev_join_called"):
|
||||
send_massage = False
|
||||
# we can't detect stop after join in Python 2 yet
|
||||
if send_massage:
|
||||
send_concurrency_message(
|
||||
"threading_event",
|
||||
event_time,
|
||||
"Thread",
|
||||
my_thread_id,
|
||||
"thread",
|
||||
"stop",
|
||||
my_back.f_code.co_filename,
|
||||
my_back.f_lineno,
|
||||
my_back,
|
||||
parent=None,
|
||||
)
|
||||
|
||||
if self_obj.__class__ == ObjectWrapper:
|
||||
if back_base in DONT_TRACE_THREADING:
|
||||
# do not trace methods called from threading
|
||||
return
|
||||
back_back_base = pydevd_file_utils.get_abs_path_real_path_and_base_from_frame(back.f_back)[2]
|
||||
back = back.f_back
|
||||
if back_back_base in DONT_TRACE_THREADING:
|
||||
# back_back_base is the file, where the method was called froms
|
||||
return
|
||||
if method_name == "__init__":
|
||||
send_concurrency_message(
|
||||
"threading_event",
|
||||
event_time,
|
||||
t.name,
|
||||
get_thread_id(t),
|
||||
"lock",
|
||||
method_name,
|
||||
back.f_code.co_filename,
|
||||
back.f_lineno,
|
||||
back,
|
||||
lock_id=str(id(frame.f_locals["self"])),
|
||||
)
|
||||
if "attr" in frame.f_locals and (frame.f_locals["attr"] in LOCK_METHODS or frame.f_locals["attr"] in QUEUE_METHODS):
|
||||
real_method = frame.f_locals["attr"]
|
||||
if method_name == "call_begin":
|
||||
real_method += "_begin"
|
||||
elif method_name == "call_end":
|
||||
real_method += "_end"
|
||||
else:
|
||||
return
|
||||
if real_method == "release_end":
|
||||
# do not log release end. Maybe use it later
|
||||
return
|
||||
send_concurrency_message(
|
||||
"threading_event",
|
||||
event_time,
|
||||
t.name,
|
||||
get_thread_id(t),
|
||||
"lock",
|
||||
real_method,
|
||||
back.f_code.co_filename,
|
||||
back.f_lineno,
|
||||
back,
|
||||
lock_id=str(id(self_obj)),
|
||||
)
|
||||
|
||||
if real_method in ("put_end", "get_end"):
|
||||
# fake release for queue, cause we don't call it directly
|
||||
send_concurrency_message(
|
||||
"threading_event",
|
||||
event_time,
|
||||
t.name,
|
||||
get_thread_id(t),
|
||||
"lock",
|
||||
"release",
|
||||
back.f_code.co_filename,
|
||||
back.f_lineno,
|
||||
back,
|
||||
lock_id=str(id(self_obj)),
|
||||
)
|
||||
# print(event_time, t.name, get_thread_id(t), "lock",
|
||||
# real_method, back.f_code.co_filename, back.f_lineno)
|
||||
|
||||
except Exception:
|
||||
pydev_log.exception()
|
||||
|
||||
|
||||
class NameManager:
|
||||
def __init__(self, name_prefix):
|
||||
self.tasks = {}
|
||||
self.last = 0
|
||||
self.prefix = name_prefix
|
||||
|
||||
def get(self, id):
|
||||
if id not in self.tasks:
|
||||
self.last += 1
|
||||
self.tasks[id] = self.prefix + "-" + str(self.last)
|
||||
return self.tasks[id]
|
||||
|
||||
|
||||
class AsyncioLogger:
|
||||
def __init__(self):
|
||||
self.task_mgr = NameManager("Task")
|
||||
self.coro_mgr = NameManager("Coro")
|
||||
self.start_time = cur_time()
|
||||
|
||||
def get_task_id(self, frame):
|
||||
asyncio = sys.modules.get("asyncio")
|
||||
if asyncio is None:
|
||||
# If asyncio was not imported, there's nothing to be done
|
||||
# (also fixes issue where multiprocessing is imported due
|
||||
# to asyncio).
|
||||
return None
|
||||
while frame is not None:
|
||||
if "self" in frame.f_locals:
|
||||
self_obj = frame.f_locals["self"]
|
||||
if isinstance(self_obj, asyncio.Task):
|
||||
method_name = frame.f_code.co_name
|
||||
if method_name == "_step":
|
||||
return id(self_obj)
|
||||
frame = frame.f_back
|
||||
return None
|
||||
|
||||
def log_event(self, frame):
|
||||
event_time = cur_time() - self.start_time
|
||||
|
||||
# Debug loop iterations
|
||||
# if isinstance(self_obj, asyncio.base_events.BaseEventLoop):
|
||||
# if method_name == "_run_once":
|
||||
# print("Loop iteration")
|
||||
|
||||
if not hasattr(frame, "f_back") or frame.f_back is None:
|
||||
return
|
||||
|
||||
asyncio = sys.modules.get("asyncio")
|
||||
if asyncio is None:
|
||||
# If asyncio was not imported, there's nothing to be done
|
||||
# (also fixes issue where multiprocessing is imported due
|
||||
# to asyncio).
|
||||
return
|
||||
|
||||
back = frame.f_back
|
||||
|
||||
if "self" in frame.f_locals:
|
||||
self_obj = frame.f_locals["self"]
|
||||
if isinstance(self_obj, asyncio.Task):
|
||||
method_name = frame.f_code.co_name
|
||||
if method_name == "set_result":
|
||||
task_id = id(self_obj)
|
||||
task_name = self.task_mgr.get(str(task_id))
|
||||
send_concurrency_message(
|
||||
"asyncio_event", event_time, task_name, task_name, "thread", "stop", frame.f_code.co_filename, frame.f_lineno, frame
|
||||
)
|
||||
|
||||
method_name = back.f_code.co_name
|
||||
if method_name == "__init__":
|
||||
task_id = id(self_obj)
|
||||
task_name = self.task_mgr.get(str(task_id))
|
||||
send_concurrency_message(
|
||||
"asyncio_event",
|
||||
event_time,
|
||||
task_name,
|
||||
task_name,
|
||||
"thread",
|
||||
"start",
|
||||
frame.f_code.co_filename,
|
||||
frame.f_lineno,
|
||||
frame,
|
||||
)
|
||||
|
||||
method_name = frame.f_code.co_name
|
||||
if isinstance(self_obj, asyncio.Lock):
|
||||
if method_name in ("acquire", "release"):
|
||||
task_id = self.get_task_id(frame)
|
||||
task_name = self.task_mgr.get(str(task_id))
|
||||
|
||||
if method_name == "acquire":
|
||||
if not self_obj._waiters and not self_obj.locked():
|
||||
send_concurrency_message(
|
||||
"asyncio_event",
|
||||
event_time,
|
||||
task_name,
|
||||
task_name,
|
||||
"lock",
|
||||
method_name + "_begin",
|
||||
frame.f_code.co_filename,
|
||||
frame.f_lineno,
|
||||
frame,
|
||||
lock_id=str(id(self_obj)),
|
||||
)
|
||||
if self_obj.locked():
|
||||
method_name += "_begin"
|
||||
else:
|
||||
method_name += "_end"
|
||||
elif method_name == "release":
|
||||
method_name += "_end"
|
||||
|
||||
send_concurrency_message(
|
||||
"asyncio_event",
|
||||
event_time,
|
||||
task_name,
|
||||
task_name,
|
||||
"lock",
|
||||
method_name,
|
||||
frame.f_code.co_filename,
|
||||
frame.f_lineno,
|
||||
frame,
|
||||
lock_id=str(id(self_obj)),
|
||||
)
|
||||
|
||||
if isinstance(self_obj, asyncio.Queue):
|
||||
if method_name in ("put", "get", "_put", "_get"):
|
||||
task_id = self.get_task_id(frame)
|
||||
task_name = self.task_mgr.get(str(task_id))
|
||||
|
||||
if method_name == "put":
|
||||
send_concurrency_message(
|
||||
"asyncio_event",
|
||||
event_time,
|
||||
task_name,
|
||||
task_name,
|
||||
"lock",
|
||||
"acquire_begin",
|
||||
frame.f_code.co_filename,
|
||||
frame.f_lineno,
|
||||
frame,
|
||||
lock_id=str(id(self_obj)),
|
||||
)
|
||||
elif method_name == "_put":
|
||||
send_concurrency_message(
|
||||
"asyncio_event",
|
||||
event_time,
|
||||
task_name,
|
||||
task_name,
|
||||
"lock",
|
||||
"acquire_end",
|
||||
frame.f_code.co_filename,
|
||||
frame.f_lineno,
|
||||
frame,
|
||||
lock_id=str(id(self_obj)),
|
||||
)
|
||||
send_concurrency_message(
|
||||
"asyncio_event",
|
||||
event_time,
|
||||
task_name,
|
||||
task_name,
|
||||
"lock",
|
||||
"release",
|
||||
frame.f_code.co_filename,
|
||||
frame.f_lineno,
|
||||
frame,
|
||||
lock_id=str(id(self_obj)),
|
||||
)
|
||||
elif method_name == "get":
|
||||
back = frame.f_back
|
||||
if back.f_code.co_name != "send":
|
||||
send_concurrency_message(
|
||||
"asyncio_event",
|
||||
event_time,
|
||||
task_name,
|
||||
task_name,
|
||||
"lock",
|
||||
"acquire_begin",
|
||||
frame.f_code.co_filename,
|
||||
frame.f_lineno,
|
||||
frame,
|
||||
lock_id=str(id(self_obj)),
|
||||
)
|
||||
else:
|
||||
send_concurrency_message(
|
||||
"asyncio_event",
|
||||
event_time,
|
||||
task_name,
|
||||
task_name,
|
||||
"lock",
|
||||
"acquire_end",
|
||||
frame.f_code.co_filename,
|
||||
frame.f_lineno,
|
||||
frame,
|
||||
lock_id=str(id(self_obj)),
|
||||
)
|
||||
send_concurrency_message(
|
||||
"asyncio_event",
|
||||
event_time,
|
||||
task_name,
|
||||
task_name,
|
||||
"lock",
|
||||
"release",
|
||||
frame.f_code.co_filename,
|
||||
frame.f_lineno,
|
||||
frame,
|
||||
lock_id=str(id(self_obj)),
|
||||
)
|
||||
+82
@@ -0,0 +1,82 @@
|
||||
from _pydev_bundle._pydev_saved_modules import threading
|
||||
|
||||
|
||||
def wrapper(fun):
|
||||
def pydev_after_run_call():
|
||||
pass
|
||||
|
||||
def inner(*args, **kwargs):
|
||||
fun(*args, **kwargs)
|
||||
pydev_after_run_call()
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
def wrap_attr(obj, attr):
|
||||
t_save_start = getattr(obj, attr)
|
||||
setattr(obj, attr, wrapper(t_save_start))
|
||||
obj._pydev_run_patched = True
|
||||
|
||||
|
||||
class ObjectWrapper(object):
|
||||
def __init__(self, obj):
|
||||
self.wrapped_object = obj
|
||||
try:
|
||||
import functools
|
||||
|
||||
functools.update_wrapper(self, obj)
|
||||
except:
|
||||
pass
|
||||
|
||||
def __getattr__(self, attr):
|
||||
orig_attr = getattr(self.wrapped_object, attr) # .__getattribute__(attr)
|
||||
if callable(orig_attr):
|
||||
|
||||
def patched_attr(*args, **kwargs):
|
||||
self.call_begin(attr)
|
||||
result = orig_attr(*args, **kwargs)
|
||||
self.call_end(attr)
|
||||
if result == self.wrapped_object:
|
||||
return self
|
||||
return result
|
||||
|
||||
return patched_attr
|
||||
else:
|
||||
return orig_attr
|
||||
|
||||
def call_begin(self, attr):
|
||||
pass
|
||||
|
||||
def call_end(self, attr):
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
self.call_begin("__enter__")
|
||||
self.wrapped_object.__enter__()
|
||||
self.call_end("__enter__")
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.call_begin("__exit__")
|
||||
self.wrapped_object.__exit__(exc_type, exc_val, exc_tb)
|
||||
|
||||
|
||||
def factory_wrapper(fun):
|
||||
def inner(*args, **kwargs):
|
||||
obj = fun(*args, **kwargs)
|
||||
return ObjectWrapper(obj)
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
def wrap_threads():
|
||||
# TODO: add wrappers for thread and _thread
|
||||
# import _thread as mod
|
||||
# print("Thread imported")
|
||||
# mod.start_new_thread = wrapper(mod.start_new_thread)
|
||||
threading.Lock = factory_wrapper(threading.Lock)
|
||||
threading.RLock = factory_wrapper(threading.RLock)
|
||||
|
||||
# queue patching
|
||||
import queue # @UnresolvedImport
|
||||
|
||||
queue.Queue = factory_wrapper(queue.Queue)
|
||||
@@ -0,0 +1,265 @@
|
||||
"""An helper file for the pydev debugger (REPL) console
|
||||
"""
|
||||
import sys
|
||||
import traceback
|
||||
from _pydevd_bundle.pydevconsole_code import InteractiveConsole, _EvalAwaitInNewEventLoop
|
||||
from _pydev_bundle import _pydev_completer
|
||||
from _pydev_bundle.pydev_console_utils import BaseInterpreterInterface, BaseStdIn
|
||||
from _pydev_bundle.pydev_imports import Exec
|
||||
from _pydev_bundle.pydev_override import overrides
|
||||
from _pydevd_bundle import pydevd_save_locals
|
||||
from _pydevd_bundle.pydevd_io import IOBuf
|
||||
from pydevd_tracing import get_exception_traceback_str
|
||||
from _pydevd_bundle.pydevd_xml import make_valid_xml_value
|
||||
import inspect
|
||||
from _pydevd_bundle.pydevd_save_locals import update_globals_and_locals
|
||||
|
||||
CONSOLE_OUTPUT = "output"
|
||||
CONSOLE_ERROR = "error"
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# ConsoleMessage
|
||||
# =======================================================================================================================
|
||||
class ConsoleMessage:
|
||||
"""Console Messages"""
|
||||
|
||||
def __init__(self):
|
||||
self.more = False
|
||||
# List of tuple [('error', 'error_message'), ('message_list', 'output_message')]
|
||||
self.console_messages = []
|
||||
|
||||
def add_console_message(self, message_type, message):
|
||||
"""add messages in the console_messages list"""
|
||||
for m in message.split("\n"):
|
||||
if m.strip():
|
||||
self.console_messages.append((message_type, m))
|
||||
|
||||
def update_more(self, more):
|
||||
"""more is set to true if further input is required from the user
|
||||
else more is set to false
|
||||
"""
|
||||
self.more = more
|
||||
|
||||
def to_xml(self):
|
||||
"""Create an XML for console message_list, error and more (true/false)
|
||||
<xml>
|
||||
<message_list>console message_list</message_list>
|
||||
<error>console error</error>
|
||||
<more>true/false</more>
|
||||
</xml>
|
||||
"""
|
||||
makeValid = make_valid_xml_value
|
||||
|
||||
xml = "<xml><more>%s</more>" % (self.more)
|
||||
|
||||
for message_type, message in self.console_messages:
|
||||
xml += '<%s message="%s"></%s>' % (message_type, makeValid(message), message_type)
|
||||
|
||||
xml += "</xml>"
|
||||
|
||||
return xml
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# _DebugConsoleStdIn
|
||||
# =======================================================================================================================
|
||||
class _DebugConsoleStdIn(BaseStdIn):
|
||||
@overrides(BaseStdIn.readline)
|
||||
def readline(self, *args, **kwargs):
|
||||
sys.stderr.write("Warning: Reading from stdin is still not supported in this console.\n")
|
||||
return "\n"
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# DebugConsole
|
||||
# =======================================================================================================================
|
||||
class DebugConsole(InteractiveConsole, BaseInterpreterInterface):
|
||||
"""Wrapper around code.InteractiveConsole, in order to send
|
||||
errors and outputs to the debug console
|
||||
"""
|
||||
|
||||
@overrides(BaseInterpreterInterface.create_std_in)
|
||||
def create_std_in(self, *args, **kwargs):
|
||||
try:
|
||||
if not self.__buffer_output:
|
||||
return sys.stdin
|
||||
except:
|
||||
pass
|
||||
|
||||
return _DebugConsoleStdIn() # If buffered, raw_input is not supported in this console.
|
||||
|
||||
@overrides(InteractiveConsole.push)
|
||||
def push(self, line, frame, buffer_output=True):
|
||||
"""Change built-in stdout and stderr methods by the
|
||||
new custom StdMessage.
|
||||
execute the InteractiveConsole.push.
|
||||
Change the stdout and stderr back be the original built-ins
|
||||
|
||||
:param buffer_output: if False won't redirect the output.
|
||||
|
||||
Return boolean (True if more input is required else False),
|
||||
output_messages and input_messages
|
||||
"""
|
||||
self.__buffer_output = buffer_output
|
||||
more = False
|
||||
if buffer_output:
|
||||
original_stdout = sys.stdout
|
||||
original_stderr = sys.stderr
|
||||
try:
|
||||
try:
|
||||
self.frame = frame
|
||||
if buffer_output:
|
||||
out = sys.stdout = IOBuf()
|
||||
err = sys.stderr = IOBuf()
|
||||
more = self.add_exec(line)
|
||||
except Exception:
|
||||
exc = get_exception_traceback_str()
|
||||
if buffer_output:
|
||||
err.buflist.append("Internal Error: %s" % (exc,))
|
||||
else:
|
||||
sys.stderr.write("Internal Error: %s\n" % (exc,))
|
||||
finally:
|
||||
# Remove frame references.
|
||||
self.frame = None
|
||||
frame = None
|
||||
if buffer_output:
|
||||
sys.stdout = original_stdout
|
||||
sys.stderr = original_stderr
|
||||
|
||||
if buffer_output:
|
||||
return more, out.buflist, err.buflist
|
||||
else:
|
||||
return more, [], []
|
||||
|
||||
@overrides(BaseInterpreterInterface.do_add_exec)
|
||||
def do_add_exec(self, line):
|
||||
return InteractiveConsole.push(self, line)
|
||||
|
||||
@overrides(InteractiveConsole.runcode)
|
||||
def runcode(self, code):
|
||||
"""Execute a code object.
|
||||
|
||||
When an exception occurs, self.showtraceback() is called to
|
||||
display a traceback. All exceptions are caught except
|
||||
SystemExit, which is reraised.
|
||||
|
||||
A note about KeyboardInterrupt: this exception may occur
|
||||
elsewhere in this code, and may not always be caught. The
|
||||
caller should be prepared to deal with it.
|
||||
|
||||
"""
|
||||
try:
|
||||
updated_globals = self.get_namespace()
|
||||
initial_globals = updated_globals.copy()
|
||||
|
||||
updated_locals = None
|
||||
|
||||
is_async = False
|
||||
if hasattr(inspect, "CO_COROUTINE"):
|
||||
is_async = inspect.CO_COROUTINE & code.co_flags == inspect.CO_COROUTINE
|
||||
|
||||
if is_async:
|
||||
t = _EvalAwaitInNewEventLoop(code, updated_globals, updated_locals)
|
||||
t.start()
|
||||
t.join()
|
||||
|
||||
update_globals_and_locals(updated_globals, initial_globals, self.frame)
|
||||
if t.exc:
|
||||
raise t.exc[1].with_traceback(t.exc[2])
|
||||
|
||||
else:
|
||||
try:
|
||||
exec(code, updated_globals, updated_locals)
|
||||
finally:
|
||||
update_globals_and_locals(updated_globals, initial_globals, self.frame)
|
||||
except SystemExit:
|
||||
raise
|
||||
except:
|
||||
# In case sys.excepthook called, use original excepthook #PyDev-877: Debug console freezes with Python 3.5+
|
||||
# (showtraceback does it on python 3.5 onwards)
|
||||
sys.excepthook = sys.__excepthook__
|
||||
try:
|
||||
self.showtraceback()
|
||||
finally:
|
||||
sys.__excepthook__ = sys.excepthook
|
||||
|
||||
def get_namespace(self):
|
||||
dbg_namespace = {}
|
||||
dbg_namespace.update(self.frame.f_globals)
|
||||
dbg_namespace.update(self.frame.f_locals) # locals later because it has precedence over the actual globals
|
||||
return dbg_namespace
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# InteractiveConsoleCache
|
||||
# =======================================================================================================================
|
||||
class InteractiveConsoleCache:
|
||||
thread_id = None
|
||||
frame_id = None
|
||||
interactive_console_instance = None
|
||||
|
||||
|
||||
# Note: On Jython 2.1 we can't use classmethod or staticmethod, so, just make the functions below free-functions.
|
||||
def get_interactive_console(thread_id, frame_id, frame, console_message):
|
||||
"""returns the global interactive console.
|
||||
interactive console should have been initialized by this time
|
||||
:rtype: DebugConsole
|
||||
"""
|
||||
if InteractiveConsoleCache.thread_id == thread_id and InteractiveConsoleCache.frame_id == frame_id:
|
||||
return InteractiveConsoleCache.interactive_console_instance
|
||||
|
||||
InteractiveConsoleCache.interactive_console_instance = DebugConsole()
|
||||
InteractiveConsoleCache.thread_id = thread_id
|
||||
InteractiveConsoleCache.frame_id = frame_id
|
||||
|
||||
console_stacktrace = traceback.extract_stack(frame, limit=1)
|
||||
if console_stacktrace:
|
||||
current_context = console_stacktrace[0] # top entry from stacktrace
|
||||
context_message = 'File "%s", line %s, in %s' % (current_context[0], current_context[1], current_context[2])
|
||||
console_message.add_console_message(CONSOLE_OUTPUT, "[Current context]: %s" % (context_message,))
|
||||
return InteractiveConsoleCache.interactive_console_instance
|
||||
|
||||
|
||||
def clear_interactive_console():
|
||||
InteractiveConsoleCache.thread_id = None
|
||||
InteractiveConsoleCache.frame_id = None
|
||||
InteractiveConsoleCache.interactive_console_instance = None
|
||||
|
||||
|
||||
def execute_console_command(frame, thread_id, frame_id, line, buffer_output=True):
|
||||
"""fetch an interactive console instance from the cache and
|
||||
push the received command to the console.
|
||||
|
||||
create and return an instance of console_message
|
||||
"""
|
||||
console_message = ConsoleMessage()
|
||||
|
||||
interpreter = get_interactive_console(thread_id, frame_id, frame, console_message)
|
||||
more, output_messages, error_messages = interpreter.push(line, frame, buffer_output)
|
||||
console_message.update_more(more)
|
||||
|
||||
for message in output_messages:
|
||||
console_message.add_console_message(CONSOLE_OUTPUT, message)
|
||||
|
||||
for message in error_messages:
|
||||
console_message.add_console_message(CONSOLE_ERROR, message)
|
||||
|
||||
return console_message
|
||||
|
||||
|
||||
def get_description(frame, thread_id, frame_id, expression):
|
||||
console_message = ConsoleMessage()
|
||||
interpreter = get_interactive_console(thread_id, frame_id, frame, console_message)
|
||||
try:
|
||||
interpreter.frame = frame
|
||||
return interpreter.getDescription(expression)
|
||||
finally:
|
||||
interpreter.frame = None
|
||||
|
||||
|
||||
def get_completions(frame, act_tok):
|
||||
"""fetch all completions, create xml for the same
|
||||
return the completions xml
|
||||
"""
|
||||
return _pydev_completer.generate_completions_as_xml(frame, act_tok)
|
||||
@@ -0,0 +1,847 @@
|
||||
"""
|
||||
This module holds the constants used for specifying the states of the debugger.
|
||||
"""
|
||||
|
||||
from __future__ import nested_scopes
|
||||
import platform
|
||||
import weakref
|
||||
import struct
|
||||
import warnings
|
||||
import functools
|
||||
from contextlib import contextmanager
|
||||
|
||||
STATE_RUN = 1
|
||||
STATE_SUSPEND = 2
|
||||
|
||||
PYTHON_SUSPEND = 1
|
||||
DJANGO_SUSPEND = 2
|
||||
JINJA2_SUSPEND = 3
|
||||
|
||||
int_types = (int,)
|
||||
|
||||
# types does not include a MethodWrapperType
|
||||
try:
|
||||
MethodWrapperType = type([].__str__)
|
||||
except:
|
||||
MethodWrapperType = None
|
||||
|
||||
import sys # Note: the sys import must be here anyways (others depend on it)
|
||||
|
||||
# Preload codecs to avoid imports to them later on which can potentially halt the debugger.
|
||||
import codecs as _codecs
|
||||
|
||||
for _codec in ["ascii", "utf8", "utf-8", "latin1", "latin-1", "idna"]:
|
||||
_codecs.lookup(_codec)
|
||||
|
||||
|
||||
class DebugInfoHolder:
|
||||
# we have to put it here because it can be set through the command line (so, the
|
||||
# already imported references would not have it).
|
||||
|
||||
# General information
|
||||
DEBUG_TRACE_LEVEL = 0 # 0 = critical, 1 = info, 2 = debug, 3 = verbose
|
||||
|
||||
PYDEVD_DEBUG_FILE = None
|
||||
|
||||
|
||||
# Any filename that starts with these strings is not traced nor shown to the user.
|
||||
# In Python 3.7 "<frozen ..." appears multiple times during import and should be ignored for the user.
|
||||
# In PyPy "<builtin> ..." can appear and should be ignored for the user.
|
||||
# <attrs is used internally by attrs
|
||||
# <__array_function__ is used by numpy
|
||||
IGNORE_BASENAMES_STARTING_WITH = ("<frozen ", "<builtin", "<attrs", "<__array_function__")
|
||||
|
||||
# Note: <string> has special heuristics to know whether it should be traced or not (it's part of
|
||||
# user code when it's the <string> used in python -c and part of the library otherwise).
|
||||
|
||||
# Any filename that starts with these strings is considered user (project) code. Note
|
||||
# that files for which we have a source mapping are also considered as a part of the project.
|
||||
USER_CODE_BASENAMES_STARTING_WITH = ("<ipython",)
|
||||
|
||||
# Any filename that starts with these strings is considered library code (note: checked after USER_CODE_BASENAMES_STARTING_WITH).
|
||||
LIBRARY_CODE_BASENAMES_STARTING_WITH = ("<",)
|
||||
|
||||
IS_CPYTHON = platform.python_implementation() == "CPython"
|
||||
|
||||
# Hold a reference to the original _getframe (because psyco will change that as soon as it's imported)
|
||||
IS_IRONPYTHON = sys.platform == "cli"
|
||||
try:
|
||||
get_frame = sys._getframe
|
||||
if IS_IRONPYTHON:
|
||||
|
||||
def get_frame():
|
||||
try:
|
||||
return sys._getframe()
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
except AttributeError:
|
||||
|
||||
def get_frame():
|
||||
raise AssertionError("sys._getframe not available (possible causes: enable -X:Frames on IronPython?)")
|
||||
|
||||
|
||||
# Used to determine the maximum size of each variable passed to eclipse -- having a big value here may make
|
||||
# the communication slower -- as the variables are being gathered lazily in the latest version of eclipse,
|
||||
# this value was raised from 200 to 1000.
|
||||
MAXIMUM_VARIABLE_REPRESENTATION_SIZE = 1000
|
||||
# Prefix for saving functions return values in locals
|
||||
RETURN_VALUES_DICT = "__pydevd_ret_val_dict"
|
||||
GENERATED_LEN_ATTR_NAME = "len()"
|
||||
|
||||
import os
|
||||
|
||||
from _pydevd_bundle import pydevd_vm_type
|
||||
|
||||
# Constant detects when running on Jython/windows properly later on.
|
||||
IS_WINDOWS = sys.platform == "win32"
|
||||
IS_LINUX = sys.platform in ("linux", "linux2")
|
||||
IS_MAC = sys.platform == "darwin"
|
||||
IS_WASM = sys.platform == "emscripten" or sys.platform == "wasi"
|
||||
|
||||
IS_64BIT_PROCESS = sys.maxsize > (2**32)
|
||||
|
||||
IS_JYTHON = pydevd_vm_type.get_vm_type() == pydevd_vm_type.PydevdVmType.JYTHON
|
||||
|
||||
IS_PYPY = platform.python_implementation() == "PyPy"
|
||||
|
||||
if IS_JYTHON:
|
||||
import java.lang.System # @UnresolvedImport
|
||||
|
||||
IS_WINDOWS = java.lang.System.getProperty("os.name").lower().startswith("windows")
|
||||
|
||||
USE_CUSTOM_SYS_CURRENT_FRAMES = not hasattr(sys, "_current_frames") or IS_PYPY
|
||||
USE_CUSTOM_SYS_CURRENT_FRAMES_MAP = USE_CUSTOM_SYS_CURRENT_FRAMES and (IS_PYPY or IS_IRONPYTHON)
|
||||
|
||||
if USE_CUSTOM_SYS_CURRENT_FRAMES:
|
||||
# Some versions of Jython don't have it (but we can provide a replacement)
|
||||
if IS_JYTHON:
|
||||
from java.lang import NoSuchFieldException
|
||||
from org.python.core import ThreadStateMapping
|
||||
|
||||
try:
|
||||
cachedThreadState = ThreadStateMapping.getDeclaredField("globalThreadStates") # Dev version
|
||||
except NoSuchFieldException:
|
||||
cachedThreadState = ThreadStateMapping.getDeclaredField("cachedThreadState") # Release Jython 2.7.0
|
||||
cachedThreadState.accessible = True
|
||||
thread_states = cachedThreadState.get(ThreadStateMapping)
|
||||
|
||||
def _current_frames():
|
||||
as_array = thread_states.entrySet().toArray()
|
||||
ret = {}
|
||||
for thread_to_state in as_array:
|
||||
thread = thread_to_state.getKey()
|
||||
if thread is None:
|
||||
continue
|
||||
thread_state = thread_to_state.getValue()
|
||||
if thread_state is None:
|
||||
continue
|
||||
|
||||
frame = thread_state.frame
|
||||
if frame is None:
|
||||
continue
|
||||
|
||||
ret[thread.getId()] = frame
|
||||
return ret
|
||||
|
||||
elif USE_CUSTOM_SYS_CURRENT_FRAMES_MAP:
|
||||
constructed_tid_to_last_frame = {}
|
||||
|
||||
# IronPython doesn't have it. Let's use our workaround...
|
||||
def _current_frames():
|
||||
return constructed_tid_to_last_frame
|
||||
|
||||
else:
|
||||
raise RuntimeError("Unable to proceed (sys._current_frames not available in this Python implementation).")
|
||||
else:
|
||||
_current_frames = sys._current_frames
|
||||
|
||||
IS_PYTHON_STACKLESS = "stackless" in sys.version.lower()
|
||||
CYTHON_SUPPORTED = False
|
||||
|
||||
python_implementation = platform.python_implementation()
|
||||
if python_implementation == "CPython":
|
||||
# Only available for CPython!
|
||||
CYTHON_SUPPORTED = True
|
||||
|
||||
# =======================================================================================================================
|
||||
# Python 3?
|
||||
# =======================================================================================================================
|
||||
IS_PY36_OR_GREATER = sys.version_info >= (3, 6)
|
||||
IS_PY37_OR_GREATER = sys.version_info >= (3, 7)
|
||||
IS_PY38_OR_GREATER = sys.version_info >= (3, 8)
|
||||
IS_PY39_OR_GREATER = sys.version_info >= (3, 9)
|
||||
IS_PY310_OR_GREATER = sys.version_info >= (3, 10)
|
||||
IS_PY311_OR_GREATER = sys.version_info >= (3, 11)
|
||||
IS_PY312_OR_GREATER = sys.version_info >= (3, 12)
|
||||
IS_PY313_OR_GREATER = sys.version_info >= (3, 13)
|
||||
IS_PY314_OR_GREATER = sys.version_info >= (3, 14)
|
||||
|
||||
# Bug affecting Python 3.13.0 specifically makes some tests crash the interpreter!
|
||||
# Hopefully it'll be fixed in 3.13.1.
|
||||
IS_PY313_0 = sys.version_info[:3] == (3, 13, 0)
|
||||
|
||||
# Mark tests that need to be fixed with this.
|
||||
TODO_PY313_OR_GREATER = IS_PY313_OR_GREATER
|
||||
|
||||
# Not currently supported in Python 3.14.
|
||||
SUPPORT_ATTACH_TO_PID = not IS_PY314_OR_GREATER
|
||||
|
||||
|
||||
def version_str(v):
|
||||
return ".".join((str(x) for x in v[:3])) + "".join((str(x) for x in v[3:]))
|
||||
|
||||
|
||||
PY_VERSION_STR = version_str(sys.version_info)
|
||||
try:
|
||||
PY_IMPL_VERSION_STR = version_str(sys.implementation.version)
|
||||
except AttributeError:
|
||||
PY_IMPL_VERSION_STR = ""
|
||||
|
||||
try:
|
||||
PY_IMPL_NAME = sys.implementation.name
|
||||
except AttributeError:
|
||||
PY_IMPL_NAME = ""
|
||||
|
||||
ENV_TRUE_LOWER_VALUES = ("yes", "true", "1")
|
||||
ENV_FALSE_LOWER_VALUES = ("no", "false", "0")
|
||||
|
||||
PYDEVD_USE_SYS_MONITORING = IS_PY312_OR_GREATER and hasattr(sys, "monitoring")
|
||||
if PYDEVD_USE_SYS_MONITORING: # Default gotten, let's see if it was somehow customize by the user.
|
||||
_use_sys_monitoring_env_var = os.getenv("PYDEVD_USE_SYS_MONITORING", "").lower()
|
||||
if _use_sys_monitoring_env_var:
|
||||
# Check if the user specified something.
|
||||
if _use_sys_monitoring_env_var in ENV_FALSE_LOWER_VALUES:
|
||||
PYDEVD_USE_SYS_MONITORING = False
|
||||
elif _use_sys_monitoring_env_var in ENV_TRUE_LOWER_VALUES:
|
||||
PYDEVD_USE_SYS_MONITORING = True
|
||||
else:
|
||||
raise RuntimeError("Unrecognized value for PYDEVD_USE_SYS_MONITORING: %s" % (_use_sys_monitoring_env_var,))
|
||||
|
||||
|
||||
def is_true_in_env(env_key):
|
||||
if isinstance(env_key, tuple):
|
||||
# If a tuple, return True if any of those ends up being true.
|
||||
for v in env_key:
|
||||
if is_true_in_env(v):
|
||||
return True
|
||||
return False
|
||||
else:
|
||||
return os.getenv(env_key, "").lower() in ENV_TRUE_LOWER_VALUES
|
||||
|
||||
|
||||
def as_float_in_env(env_key, default):
|
||||
value = os.getenv(env_key)
|
||||
if value is None:
|
||||
return default
|
||||
try:
|
||||
return float(value)
|
||||
except Exception:
|
||||
raise RuntimeError("Error: expected the env variable: %s to be set to a float value. Found: %s" % (env_key, value))
|
||||
|
||||
|
||||
def as_int_in_env(env_key, default):
|
||||
value = os.getenv(env_key)
|
||||
if value is None:
|
||||
return default
|
||||
try:
|
||||
return int(value)
|
||||
except Exception:
|
||||
raise RuntimeError("Error: expected the env variable: %s to be set to a int value. Found: %s" % (env_key, value))
|
||||
|
||||
|
||||
# If true in env, use gevent mode.
|
||||
SUPPORT_GEVENT = is_true_in_env("GEVENT_SUPPORT")
|
||||
|
||||
# Opt-in support to show gevent paused greenlets. False by default because if too many greenlets are
|
||||
# paused the UI can slow-down (i.e.: if 1000 greenlets are paused, each one would be shown separate
|
||||
# as a different thread, but if the UI isn't optimized for that the experience is lacking...).
|
||||
GEVENT_SHOW_PAUSED_GREENLETS = is_true_in_env("GEVENT_SHOW_PAUSED_GREENLETS")
|
||||
|
||||
DISABLE_FILE_VALIDATION = is_true_in_env("PYDEVD_DISABLE_FILE_VALIDATION")
|
||||
|
||||
GEVENT_SUPPORT_NOT_SET_MSG = os.getenv(
|
||||
"GEVENT_SUPPORT_NOT_SET_MSG",
|
||||
"It seems that the gevent monkey-patching is being used.\n"
|
||||
"Please set an environment variable with:\n"
|
||||
"GEVENT_SUPPORT=True\n"
|
||||
"to enable gevent support in the debugger.",
|
||||
)
|
||||
|
||||
USE_LIB_COPY = SUPPORT_GEVENT
|
||||
|
||||
INTERACTIVE_MODE_AVAILABLE = sys.platform in ("darwin", "win32") or os.getenv("DISPLAY") is not None
|
||||
|
||||
# If true in env, forces cython to be used (raises error if not available).
|
||||
# If false in env, disables it.
|
||||
# If not specified, uses default heuristic to determine if it should be loaded.
|
||||
USE_CYTHON_FLAG = os.getenv("PYDEVD_USE_CYTHON")
|
||||
|
||||
if USE_CYTHON_FLAG is not None:
|
||||
USE_CYTHON_FLAG = USE_CYTHON_FLAG.lower()
|
||||
if USE_CYTHON_FLAG not in ENV_TRUE_LOWER_VALUES and USE_CYTHON_FLAG not in ENV_FALSE_LOWER_VALUES:
|
||||
raise RuntimeError(
|
||||
"Unexpected value for PYDEVD_USE_CYTHON: %s (enable with one of: %s, disable with one of: %s)"
|
||||
% (USE_CYTHON_FLAG, ENV_TRUE_LOWER_VALUES, ENV_FALSE_LOWER_VALUES)
|
||||
)
|
||||
|
||||
else:
|
||||
if not CYTHON_SUPPORTED:
|
||||
USE_CYTHON_FLAG = "no"
|
||||
|
||||
# If true in env, forces frame eval to be used (raises error if not available).
|
||||
# If false in env, disables it.
|
||||
# If not specified, uses default heuristic to determine if it should be loaded.
|
||||
PYDEVD_USE_FRAME_EVAL = os.getenv("PYDEVD_USE_FRAME_EVAL", "").lower()
|
||||
|
||||
# Values used to determine how much container items will be shown.
|
||||
# PYDEVD_CONTAINER_INITIAL_EXPANDED_ITEMS:
|
||||
# - Defines how many items will appear initially expanded after which a 'more...' will appear.
|
||||
#
|
||||
# PYDEVD_CONTAINER_BUCKET_SIZE
|
||||
# - Defines the size of each bucket inside the 'more...' item
|
||||
# i.e.: a bucket with size == 2 would show items such as:
|
||||
# - [2:4]
|
||||
# - [4:6]
|
||||
# ...
|
||||
#
|
||||
# PYDEVD_CONTAINER_RANDOM_ACCESS_MAX_ITEMS
|
||||
# - Defines the maximum number of items for dicts and sets.
|
||||
#
|
||||
PYDEVD_CONTAINER_INITIAL_EXPANDED_ITEMS = as_int_in_env("PYDEVD_CONTAINER_INITIAL_EXPANDED_ITEMS", 100)
|
||||
PYDEVD_CONTAINER_BUCKET_SIZE = as_int_in_env("PYDEVD_CONTAINER_BUCKET_SIZE", 1000)
|
||||
PYDEVD_CONTAINER_RANDOM_ACCESS_MAX_ITEMS = as_int_in_env("PYDEVD_CONTAINER_RANDOM_ACCESS_MAX_ITEMS", 500)
|
||||
PYDEVD_CONTAINER_NUMPY_MAX_ITEMS = as_int_in_env("PYDEVD_CONTAINER_NUMPY_MAX_ITEMS", 500)
|
||||
|
||||
PYDEVD_IPYTHON_COMPATIBLE_DEBUGGING = is_true_in_env("PYDEVD_IPYTHON_COMPATIBLE_DEBUGGING")
|
||||
|
||||
# If specified in PYDEVD_IPYTHON_CONTEXT it must be a string with the basename
|
||||
# and then the name of 2 methods in which the evaluate is done.
|
||||
PYDEVD_IPYTHON_CONTEXT = ("interactiveshell.py", "run_code", "run_ast_nodes")
|
||||
_ipython_ctx = os.getenv("PYDEVD_IPYTHON_CONTEXT")
|
||||
if _ipython_ctx:
|
||||
PYDEVD_IPYTHON_CONTEXT = tuple(x.strip() for x in _ipython_ctx.split(","))
|
||||
assert len(PYDEVD_IPYTHON_CONTEXT) == 3, "Invalid PYDEVD_IPYTHON_CONTEXT: %s" % (_ipython_ctx,)
|
||||
|
||||
# Use to disable loading the lib to set tracing to all threads (default is using heuristics based on where we're running).
|
||||
LOAD_NATIVE_LIB_FLAG = os.getenv("PYDEVD_LOAD_NATIVE_LIB", "").lower()
|
||||
|
||||
LOG_TIME = os.getenv("PYDEVD_LOG_TIME", "true").lower() in ENV_TRUE_LOWER_VALUES
|
||||
|
||||
SHOW_COMPILE_CYTHON_COMMAND_LINE = is_true_in_env("PYDEVD_SHOW_COMPILE_CYTHON_COMMAND_LINE")
|
||||
|
||||
LOAD_VALUES_ASYNC = is_true_in_env("PYDEVD_LOAD_VALUES_ASYNC")
|
||||
DEFAULT_VALUE = "__pydevd_value_async"
|
||||
ASYNC_EVAL_TIMEOUT_SEC = 60
|
||||
NEXT_VALUE_SEPARATOR = "__pydev_val__"
|
||||
BUILTINS_MODULE_NAME = "builtins"
|
||||
|
||||
# Pandas customization.
|
||||
PANDAS_MAX_ROWS = as_int_in_env("PYDEVD_PANDAS_MAX_ROWS", 60)
|
||||
PANDAS_MAX_COLS = as_int_in_env("PYDEVD_PANDAS_MAX_COLS", 10)
|
||||
PANDAS_MAX_COLWIDTH = as_int_in_env("PYDEVD_PANDAS_MAX_COLWIDTH", 50)
|
||||
|
||||
# If getting an attribute or computing some value is too slow, let the user know if the given timeout elapses.
|
||||
PYDEVD_WARN_SLOW_RESOLVE_TIMEOUT = as_float_in_env("PYDEVD_WARN_SLOW_RESOLVE_TIMEOUT", 0.50)
|
||||
|
||||
# This timeout is used to track the time to send a message saying that the evaluation
|
||||
# is taking too long and possible mitigations.
|
||||
PYDEVD_WARN_EVALUATION_TIMEOUT = as_float_in_env("PYDEVD_WARN_EVALUATION_TIMEOUT", 3.0)
|
||||
|
||||
# If True in env shows a thread dump when the evaluation times out.
|
||||
PYDEVD_THREAD_DUMP_ON_WARN_EVALUATION_TIMEOUT = is_true_in_env("PYDEVD_THREAD_DUMP_ON_WARN_EVALUATION_TIMEOUT")
|
||||
|
||||
# This timeout is used only when the mode that all threads are stopped/resumed at once is used
|
||||
# (i.e.: multi_threads_single_notification)
|
||||
#
|
||||
# In this mode, if some evaluation doesn't finish until this timeout, we notify the user
|
||||
# and then resume all threads until the evaluation finishes.
|
||||
#
|
||||
# A negative value will disable the timeout and a value of 0 will automatically run all threads
|
||||
# (without any notification) when the evaluation is started and pause all threads when the
|
||||
# evaluation is finished. A positive value will run run all threads after the timeout
|
||||
# elapses.
|
||||
PYDEVD_UNBLOCK_THREADS_TIMEOUT = as_float_in_env("PYDEVD_UNBLOCK_THREADS_TIMEOUT", -1.0)
|
||||
|
||||
# Timeout to interrupt a thread (so, if some evaluation doesn't finish until this
|
||||
# timeout, the thread doing the evaluation is interrupted).
|
||||
# A value <= 0 means this is disabled.
|
||||
# See: _pydevd_bundle.pydevd_timeout.create_interrupt_this_thread_callback for details
|
||||
# on how the thread interruption works (there are some caveats related to it).
|
||||
PYDEVD_INTERRUPT_THREAD_TIMEOUT = as_float_in_env("PYDEVD_INTERRUPT_THREAD_TIMEOUT", -1)
|
||||
|
||||
# If PYDEVD_APPLY_PATCHING_TO_HIDE_PYDEVD_THREADS is set to False, the patching to hide pydevd threads won't be applied.
|
||||
PYDEVD_APPLY_PATCHING_TO_HIDE_PYDEVD_THREADS = (
|
||||
os.getenv("PYDEVD_APPLY_PATCHING_TO_HIDE_PYDEVD_THREADS", "true").lower() in ENV_TRUE_LOWER_VALUES
|
||||
)
|
||||
|
||||
EXCEPTION_TYPE_UNHANDLED = "UNHANDLED"
|
||||
EXCEPTION_TYPE_USER_UNHANDLED = "USER_UNHANDLED"
|
||||
EXCEPTION_TYPE_HANDLED = "HANDLED"
|
||||
|
||||
SHOW_DEBUG_INFO_ENV = is_true_in_env(("PYCHARM_DEBUG", "PYDEV_DEBUG", "PYDEVD_DEBUG"))
|
||||
|
||||
if SHOW_DEBUG_INFO_ENV:
|
||||
# show debug info before the debugger start
|
||||
DebugInfoHolder.DEBUG_TRACE_LEVEL = 3
|
||||
|
||||
DebugInfoHolder.PYDEVD_DEBUG_FILE = os.getenv("PYDEVD_DEBUG_FILE")
|
||||
|
||||
|
||||
def protect_libraries_from_patching():
|
||||
"""
|
||||
In this function we delete some modules from `sys.modules` dictionary and import them again inside
|
||||
`_pydev_saved_modules` in order to save their original copies there. After that we can use these
|
||||
saved modules within the debugger to protect them from patching by external libraries (e.g. gevent).
|
||||
"""
|
||||
patched = [
|
||||
"threading",
|
||||
"thread",
|
||||
"_thread",
|
||||
"time",
|
||||
"socket",
|
||||
"queue",
|
||||
"select",
|
||||
"xmlrpclib",
|
||||
"SimpleXMLRPCServer",
|
||||
"BaseHTTPServer",
|
||||
"SocketServer",
|
||||
"xmlrpc.client",
|
||||
"xmlrpc.server",
|
||||
"http.server",
|
||||
"socketserver",
|
||||
]
|
||||
|
||||
for name in patched:
|
||||
try:
|
||||
__import__(name)
|
||||
except:
|
||||
pass
|
||||
|
||||
patched_modules = dict([(k, v) for k, v in sys.modules.items() if k in patched])
|
||||
|
||||
for name in patched_modules:
|
||||
del sys.modules[name]
|
||||
|
||||
# import for side effects
|
||||
import _pydev_bundle._pydev_saved_modules
|
||||
|
||||
for name in patched_modules:
|
||||
sys.modules[name] = patched_modules[name]
|
||||
|
||||
|
||||
if USE_LIB_COPY:
|
||||
protect_libraries_from_patching()
|
||||
|
||||
from _pydev_bundle._pydev_saved_modules import thread, threading
|
||||
|
||||
_fork_safe_locks = []
|
||||
|
||||
if IS_JYTHON:
|
||||
|
||||
def ForkSafeLock(rlock=False):
|
||||
if rlock:
|
||||
return threading.RLock()
|
||||
else:
|
||||
return threading.Lock()
|
||||
|
||||
else:
|
||||
|
||||
class ForkSafeLock(object):
|
||||
"""
|
||||
A lock which is fork-safe (when a fork is done, `pydevd_constants.after_fork()`
|
||||
should be called to reset the locks in the new process to avoid deadlocks
|
||||
from a lock which was locked during the fork).
|
||||
|
||||
Note:
|
||||
Unlike `threading.Lock` this class is not completely atomic, so, doing:
|
||||
|
||||
lock = ForkSafeLock()
|
||||
with lock:
|
||||
...
|
||||
|
||||
is different than using `threading.Lock` directly because the tracing may
|
||||
find an additional function call on `__enter__` and on `__exit__`, so, it's
|
||||
not recommended to use this in all places, only where the forking may be important
|
||||
(so, for instance, the locks on PyDB should not be changed to this lock because
|
||||
of that -- and those should all be collected in the new process because PyDB itself
|
||||
should be completely cleared anyways).
|
||||
|
||||
It's possible to overcome this limitation by using `ForkSafeLock.acquire` and
|
||||
`ForkSafeLock.release` instead of the context manager (as acquire/release are
|
||||
bound to the original implementation, whereas __enter__/__exit__ is not due to Python
|
||||
limitations).
|
||||
"""
|
||||
|
||||
def __init__(self, rlock=False):
|
||||
self._rlock = rlock
|
||||
self._init()
|
||||
_fork_safe_locks.append(weakref.ref(self))
|
||||
|
||||
def __enter__(self):
|
||||
return self._lock.__enter__()
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
return self._lock.__exit__(exc_type, exc_val, exc_tb)
|
||||
|
||||
def _init(self):
|
||||
if self._rlock:
|
||||
self._lock = threading.RLock()
|
||||
else:
|
||||
self._lock = thread.allocate_lock()
|
||||
|
||||
self.acquire = self._lock.acquire
|
||||
self.release = self._lock.release
|
||||
_fork_safe_locks.append(weakref.ref(self))
|
||||
|
||||
|
||||
def after_fork():
|
||||
"""
|
||||
Must be called after a fork operation (will reset the ForkSafeLock).
|
||||
"""
|
||||
global _fork_safe_locks
|
||||
locks = _fork_safe_locks[:]
|
||||
_fork_safe_locks = []
|
||||
for lock in locks:
|
||||
lock = lock()
|
||||
if lock is not None:
|
||||
lock._init()
|
||||
|
||||
|
||||
_thread_id_lock = ForkSafeLock()
|
||||
thread_get_ident = thread.get_ident
|
||||
|
||||
|
||||
def as_str(s):
|
||||
assert isinstance(s, str)
|
||||
return s
|
||||
|
||||
|
||||
@contextmanager
|
||||
def filter_all_warnings():
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings("ignore")
|
||||
yield
|
||||
|
||||
|
||||
def silence_warnings_decorator(func):
|
||||
@functools.wraps(func)
|
||||
def new_func(*args, **kwargs):
|
||||
with filter_all_warnings():
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return new_func
|
||||
|
||||
|
||||
def sorted_dict_repr(d):
|
||||
s = sorted(d.items(), key=lambda x: str(x[0]))
|
||||
return "{" + ", ".join(("%r: %r" % x) for x in s) + "}"
|
||||
|
||||
|
||||
def iter_chars(b):
|
||||
# In Python 2, we can iterate bytes or str with individual characters, but Python 3 onwards
|
||||
# changed that behavior so that when iterating bytes we actually get ints!
|
||||
if isinstance(b, bytes):
|
||||
# i.e.: do something as struct.unpack('3c', b)
|
||||
return iter(struct.unpack(str(len(b)) + "c", b))
|
||||
return iter(b)
|
||||
|
||||
|
||||
if IS_JYTHON or PYDEVD_USE_SYS_MONITORING:
|
||||
|
||||
def NO_FTRACE(frame, event, arg):
|
||||
return None
|
||||
|
||||
else:
|
||||
_curr_trace = sys.gettrace()
|
||||
|
||||
# Set a temporary trace which does nothing for us to test (otherwise setting frame.f_trace has no
|
||||
# effect).
|
||||
def _temp_trace(frame, event, arg):
|
||||
return None
|
||||
|
||||
sys.settrace(_temp_trace)
|
||||
|
||||
def _check_ftrace_set_none():
|
||||
"""
|
||||
Will throw an error when executing a line event
|
||||
"""
|
||||
sys._getframe().f_trace = None
|
||||
_line_event = 1
|
||||
_line_event = 2
|
||||
|
||||
try:
|
||||
_check_ftrace_set_none()
|
||||
|
||||
def NO_FTRACE(frame, event, arg):
|
||||
frame.f_trace = None
|
||||
return None
|
||||
|
||||
except TypeError:
|
||||
|
||||
def NO_FTRACE(frame, event, arg):
|
||||
# In Python <= 2.6 and <= 3.4, if we're tracing a method, frame.f_trace may not be set
|
||||
# to None, it must always be set to a tracing function.
|
||||
# See: tests_python.test_tracing_gotchas.test_tracing_gotchas
|
||||
#
|
||||
# Note: Python 2.7 sometimes works and sometimes it doesn't depending on the minor
|
||||
# version because of https://bugs.python.org/issue20041 (although bug reports didn't
|
||||
# include the minor version, so, mark for any Python 2.7 as I'm not completely sure
|
||||
# the fix in later 2.7 versions is the same one we're dealing with).
|
||||
return None
|
||||
|
||||
sys.settrace(_curr_trace)
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# get_pid
|
||||
# =======================================================================================================================
|
||||
def get_pid():
|
||||
try:
|
||||
return os.getpid()
|
||||
except AttributeError:
|
||||
try:
|
||||
# Jython does not have it!
|
||||
import java.lang.management.ManagementFactory # @UnresolvedImport -- just for jython
|
||||
|
||||
pid = java.lang.management.ManagementFactory.getRuntimeMXBean().getName()
|
||||
return pid.replace("@", "_")
|
||||
except:
|
||||
# ok, no pid available (will be unable to debug multiple processes)
|
||||
return "000001"
|
||||
|
||||
|
||||
def clear_cached_thread_id(thread):
|
||||
with _thread_id_lock:
|
||||
try:
|
||||
if thread.__pydevd_id__ != "console_main":
|
||||
# The console_main is a special thread id used in the console and its id should never be reset
|
||||
# (otherwise we may no longer be able to get its variables -- see: https://www.brainwy.com/tracker/PyDev/776).
|
||||
del thread.__pydevd_id__
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
|
||||
# Don't let threads be collected (so that id(thread) is guaranteed to be unique).
|
||||
_thread_id_to_thread_found = {}
|
||||
|
||||
|
||||
def _get_or_compute_thread_id_with_lock(thread, is_current_thread):
|
||||
with _thread_id_lock:
|
||||
# We do a new check with the lock in place just to be sure that nothing changed
|
||||
tid = getattr(thread, "__pydevd_id__", None)
|
||||
if tid is not None:
|
||||
return tid
|
||||
|
||||
_thread_id_to_thread_found[id(thread)] = thread
|
||||
|
||||
# Note: don't use thread.ident because a new thread may have the
|
||||
# same id from an old thread.
|
||||
pid = get_pid()
|
||||
tid = "pid_%s_id_%s" % (pid, id(thread))
|
||||
|
||||
thread.__pydevd_id__ = tid
|
||||
|
||||
return tid
|
||||
|
||||
|
||||
def get_current_thread_id(thread):
|
||||
"""
|
||||
Note: the difference from get_current_thread_id to get_thread_id is that
|
||||
for the current thread we can get the thread id while the thread.ident
|
||||
is still not set in the Thread instance.
|
||||
"""
|
||||
try:
|
||||
# Fast path without getting lock.
|
||||
tid = thread.__pydevd_id__
|
||||
if tid is None:
|
||||
# Fix for https://www.brainwy.com/tracker/PyDev/645
|
||||
# if __pydevd_id__ is None, recalculate it... also, use an heuristic
|
||||
# that gives us always the same id for the thread (using thread.ident or id(thread)).
|
||||
raise AttributeError()
|
||||
except AttributeError:
|
||||
tid = _get_or_compute_thread_id_with_lock(thread, is_current_thread=True)
|
||||
|
||||
return tid
|
||||
|
||||
|
||||
def get_thread_id(thread):
|
||||
try:
|
||||
# Fast path without getting lock.
|
||||
tid = thread.__pydevd_id__
|
||||
if tid is None:
|
||||
# Fix for https://www.brainwy.com/tracker/PyDev/645
|
||||
# if __pydevd_id__ is None, recalculate it... also, use an heuristic
|
||||
# that gives us always the same id for the thread (using thread.ident or id(thread)).
|
||||
raise AttributeError()
|
||||
except AttributeError:
|
||||
tid = _get_or_compute_thread_id_with_lock(thread, is_current_thread=False)
|
||||
|
||||
return tid
|
||||
|
||||
|
||||
def set_thread_id(thread, thread_id):
|
||||
with _thread_id_lock:
|
||||
thread.__pydevd_id__ = thread_id
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# Null
|
||||
# =======================================================================================================================
|
||||
class Null:
|
||||
"""
|
||||
Gotten from: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/68205
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
return None
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
return self
|
||||
|
||||
def __enter__(self, *args, **kwargs):
|
||||
return self
|
||||
|
||||
def __exit__(self, *args, **kwargs):
|
||||
return self
|
||||
|
||||
def __getattr__(self, mname):
|
||||
if len(mname) > 4 and mname[:2] == "__" and mname[-2:] == "__":
|
||||
# Don't pretend to implement special method names.
|
||||
raise AttributeError(mname)
|
||||
return self
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
return self
|
||||
|
||||
def __delattr__(self, name):
|
||||
return self
|
||||
|
||||
def __repr__(self):
|
||||
return "<Null>"
|
||||
|
||||
def __str__(self):
|
||||
return "Null"
|
||||
|
||||
def __len__(self):
|
||||
return 0
|
||||
|
||||
def __getitem__(self):
|
||||
return self
|
||||
|
||||
def __setitem__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def write(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def __nonzero__(self):
|
||||
return 0
|
||||
|
||||
def __iter__(self):
|
||||
return iter(())
|
||||
|
||||
|
||||
# Default instance
|
||||
NULL = Null()
|
||||
|
||||
|
||||
class KeyifyList(object):
|
||||
def __init__(self, inner, key):
|
||||
self.inner = inner
|
||||
self.key = key
|
||||
|
||||
def __len__(self):
|
||||
return len(self.inner)
|
||||
|
||||
def __getitem__(self, k):
|
||||
return self.key(self.inner[k])
|
||||
|
||||
|
||||
def call_only_once(func):
|
||||
"""
|
||||
To be used as a decorator
|
||||
|
||||
@call_only_once
|
||||
def func():
|
||||
print 'Calling func only this time'
|
||||
|
||||
Actually, in PyDev it must be called as:
|
||||
|
||||
func = call_only_once(func) to support older versions of Python.
|
||||
"""
|
||||
|
||||
def new_func(*args, **kwargs):
|
||||
if not new_func._called:
|
||||
new_func._called = True
|
||||
return func(*args, **kwargs)
|
||||
|
||||
new_func._called = False
|
||||
return new_func
|
||||
|
||||
|
||||
# Protocol where each line is a new message (text is quoted to prevent new lines).
|
||||
# payload is xml
|
||||
QUOTED_LINE_PROTOCOL = "quoted-line"
|
||||
ARGUMENT_QUOTED_LINE_PROTOCOL = "protocol-quoted-line"
|
||||
|
||||
# Uses http protocol to provide a new message.
|
||||
# i.e.: Content-Length:xxx\r\n\r\npayload
|
||||
# payload is xml
|
||||
HTTP_PROTOCOL = "http"
|
||||
ARGUMENT_HTTP_PROTOCOL = "protocol-http"
|
||||
|
||||
# Message is sent without any header.
|
||||
# payload is json
|
||||
JSON_PROTOCOL = "json"
|
||||
ARGUMENT_JSON_PROTOCOL = "json-dap"
|
||||
|
||||
# Same header as the HTTP_PROTOCOL
|
||||
# payload is json
|
||||
HTTP_JSON_PROTOCOL = "http_json"
|
||||
ARGUMENT_HTTP_JSON_PROTOCOL = "json-dap-http"
|
||||
|
||||
ARGUMENT_PPID = "ppid"
|
||||
|
||||
|
||||
class _GlobalSettings:
|
||||
protocol = QUOTED_LINE_PROTOCOL
|
||||
|
||||
|
||||
def set_protocol(protocol):
|
||||
expected = (HTTP_PROTOCOL, QUOTED_LINE_PROTOCOL, JSON_PROTOCOL, HTTP_JSON_PROTOCOL)
|
||||
assert protocol in expected, "Protocol (%s) should be one of: %s" % (protocol, expected)
|
||||
|
||||
_GlobalSettings.protocol = protocol
|
||||
|
||||
|
||||
def get_protocol():
|
||||
return _GlobalSettings.protocol
|
||||
|
||||
|
||||
def is_json_protocol():
|
||||
return _GlobalSettings.protocol in (JSON_PROTOCOL, HTTP_JSON_PROTOCOL)
|
||||
|
||||
|
||||
class GlobalDebuggerHolder:
|
||||
"""
|
||||
Holder for the global debugger.
|
||||
"""
|
||||
|
||||
global_dbg = None # Note: don't rename (the name is used in our attach to process)
|
||||
|
||||
|
||||
def get_global_debugger():
|
||||
return GlobalDebuggerHolder.global_dbg
|
||||
|
||||
|
||||
GetGlobalDebugger = get_global_debugger # Backward-compatibility
|
||||
|
||||
|
||||
def set_global_debugger(dbg):
|
||||
GlobalDebuggerHolder.global_dbg = dbg
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if Null():
|
||||
sys.stdout.write("here\n")
|
||||
@@ -0,0 +1,114 @@
|
||||
from _pydevd_bundle.pydevd_constants import get_current_thread_id, Null, ForkSafeLock
|
||||
from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame
|
||||
from _pydev_bundle._pydev_saved_modules import thread, threading
|
||||
import sys
|
||||
from _pydev_bundle import pydev_log
|
||||
|
||||
DEBUG = False
|
||||
|
||||
|
||||
class CustomFramesContainer:
|
||||
# Actual Values initialized later on.
|
||||
custom_frames_lock = None # : :type custom_frames_lock: threading.Lock
|
||||
|
||||
custom_frames = None
|
||||
|
||||
_next_frame_id = None
|
||||
|
||||
_py_db_command_thread_event = None
|
||||
|
||||
|
||||
def custom_frames_container_init(): # Note: no staticmethod on jython 2.1 (so, use free-function)
|
||||
CustomFramesContainer.custom_frames_lock = ForkSafeLock()
|
||||
|
||||
# custom_frames can only be accessed if properly locked with custom_frames_lock!
|
||||
# Key is a string identifying the frame (as well as the thread it belongs to).
|
||||
# Value is a CustomFrame.
|
||||
#
|
||||
CustomFramesContainer.custom_frames = {}
|
||||
|
||||
# Only to be used in this module
|
||||
CustomFramesContainer._next_frame_id = 0
|
||||
|
||||
# This is the event we must set to release an internal process events. It's later set by the actual debugger
|
||||
# when we do create the debugger.
|
||||
CustomFramesContainer._py_db_command_thread_event = Null()
|
||||
|
||||
|
||||
# Initialize it the first time (it may be reinitialized later on when dealing with a fork).
|
||||
custom_frames_container_init()
|
||||
|
||||
|
||||
class CustomFrame:
|
||||
def __init__(self, name, frame, thread_id):
|
||||
# 0 = string with the representation of that frame
|
||||
self.name = name
|
||||
|
||||
# 1 = the frame to show
|
||||
self.frame = frame
|
||||
|
||||
# 2 = an integer identifying the last time the frame was changed.
|
||||
self.mod_time = 0
|
||||
|
||||
# 3 = the thread id of the given frame
|
||||
self.thread_id = thread_id
|
||||
|
||||
|
||||
def add_custom_frame(frame, name, thread_id):
|
||||
"""
|
||||
It's possible to show paused frames by adding a custom frame through this API (it's
|
||||
intended to be used for coroutines, but could potentially be used for generators too).
|
||||
|
||||
:param frame:
|
||||
The topmost frame to be shown paused when a thread with thread.ident == thread_id is paused.
|
||||
|
||||
:param name:
|
||||
The name to be shown for the custom thread in the UI.
|
||||
|
||||
:param thread_id:
|
||||
The thread id to which this frame is related (must match thread.ident).
|
||||
|
||||
:return: str
|
||||
Returns the custom thread id which will be used to show the given frame paused.
|
||||
"""
|
||||
with CustomFramesContainer.custom_frames_lock:
|
||||
curr_thread_id = get_current_thread_id(threading.current_thread())
|
||||
next_id = CustomFramesContainer._next_frame_id = CustomFramesContainer._next_frame_id + 1
|
||||
|
||||
# Note: the frame id kept contains an id and thread information on the thread where the frame was added
|
||||
# so that later on we can check if the frame is from the current thread by doing frame_id.endswith('|'+thread_id).
|
||||
frame_custom_thread_id = "__frame__:%s|%s" % (next_id, curr_thread_id)
|
||||
if DEBUG:
|
||||
sys.stderr.write(
|
||||
"add_custom_frame: %s (%s) %s %s\n"
|
||||
% (frame_custom_thread_id, get_abs_path_real_path_and_base_from_frame(frame)[-1], frame.f_lineno, frame.f_code.co_name)
|
||||
)
|
||||
|
||||
CustomFramesContainer.custom_frames[frame_custom_thread_id] = CustomFrame(name, frame, thread_id)
|
||||
CustomFramesContainer._py_db_command_thread_event.set()
|
||||
return frame_custom_thread_id
|
||||
|
||||
|
||||
def update_custom_frame(frame_custom_thread_id, frame, thread_id, name=None):
|
||||
with CustomFramesContainer.custom_frames_lock:
|
||||
if DEBUG:
|
||||
sys.stderr.write("update_custom_frame: %s\n" % frame_custom_thread_id)
|
||||
try:
|
||||
old = CustomFramesContainer.custom_frames[frame_custom_thread_id]
|
||||
if name is not None:
|
||||
old.name = name
|
||||
old.mod_time += 1
|
||||
old.thread_id = thread_id
|
||||
except:
|
||||
sys.stderr.write("Unable to get frame to replace: %s\n" % (frame_custom_thread_id,))
|
||||
pydev_log.exception()
|
||||
|
||||
CustomFramesContainer._py_db_command_thread_event.set()
|
||||
|
||||
|
||||
def remove_custom_frame(frame_custom_thread_id):
|
||||
with CustomFramesContainer.custom_frames_lock:
|
||||
if DEBUG:
|
||||
sys.stderr.write("remove_custom_frame: %s\n" % frame_custom_thread_id)
|
||||
CustomFramesContainer.custom_frames.pop(frame_custom_thread_id, None)
|
||||
CustomFramesContainer._py_db_command_thread_event.set()
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,42 @@
|
||||
cdef class PyDBAdditionalThreadInfo:
|
||||
cdef public int pydev_state
|
||||
cdef public object pydev_step_stop # Actually, it's a frame or None
|
||||
cdef public int pydev_original_step_cmd
|
||||
cdef public int pydev_step_cmd
|
||||
cdef public bint pydev_notify_kill
|
||||
cdef public object pydev_smart_step_stop # Actually, it's a frame or None
|
||||
cdef public bint pydev_django_resolve_frame
|
||||
cdef public object pydev_call_from_jinja2
|
||||
cdef public object pydev_call_inside_jinja2
|
||||
cdef public int is_tracing
|
||||
cdef public tuple conditional_breakpoint_exception
|
||||
cdef public str pydev_message
|
||||
cdef public int suspend_type
|
||||
cdef public int pydev_next_line
|
||||
cdef public str pydev_func_name
|
||||
cdef public bint suspended_at_unhandled
|
||||
cdef public str trace_suspend_type
|
||||
cdef public object top_level_thread_tracer_no_back_frames
|
||||
cdef public object top_level_thread_tracer_unhandled
|
||||
cdef public object thread_tracer
|
||||
cdef public object step_in_initial_location
|
||||
cdef public int pydev_smart_parent_offset
|
||||
cdef public int pydev_smart_child_offset
|
||||
cdef public tuple pydev_smart_step_into_variants
|
||||
cdef public dict target_id_to_smart_step_into_variant
|
||||
cdef public bint pydev_use_scoped_step_frame
|
||||
cdef public object weak_thread
|
||||
cdef public bint is_in_wait_loop
|
||||
|
||||
cpdef get_topmost_frame(self, thread)
|
||||
cpdef update_stepping_info(self)
|
||||
|
||||
# Private APIs
|
||||
cpdef object _get_related_thread(self)
|
||||
cpdef bint _is_stepping(self)
|
||||
|
||||
cpdef set_additional_thread_info(thread)
|
||||
|
||||
cpdef add_additional_info(PyDBAdditionalThreadInfo info)
|
||||
cpdef remove_additional_info(PyDBAdditionalThreadInfo info)
|
||||
cpdef bint any_thread_stepping()
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,63 @@
|
||||
import sys
|
||||
|
||||
try:
|
||||
try:
|
||||
from _pydevd_bundle_ext import pydevd_cython as mod
|
||||
|
||||
except ImportError:
|
||||
from _pydevd_bundle import pydevd_cython as mod
|
||||
|
||||
except ImportError:
|
||||
import struct
|
||||
|
||||
try:
|
||||
is_python_64bit = struct.calcsize("P") == 8
|
||||
except:
|
||||
# In Jython this call fails, but this is Ok, we don't support Jython for speedups anyways.
|
||||
raise ImportError
|
||||
plat = "32"
|
||||
if is_python_64bit:
|
||||
plat = "64"
|
||||
|
||||
# We also accept things as:
|
||||
#
|
||||
# _pydevd_bundle.pydevd_cython_win32_27_32
|
||||
# _pydevd_bundle.pydevd_cython_win32_34_64
|
||||
#
|
||||
# to have multiple pre-compiled pyds distributed along the IDE
|
||||
# (generated by build_tools/build_binaries_windows.py).
|
||||
|
||||
mod_name = "pydevd_cython_%s_%s%s_%s" % (sys.platform, sys.version_info[0], sys.version_info[1], plat)
|
||||
check_name = "_pydevd_bundle.%s" % (mod_name,)
|
||||
mod = getattr(__import__(check_name), mod_name)
|
||||
|
||||
# Regardless of how it was found, make sure it's later available as the
|
||||
# initial name so that the expected types from cython in frame eval
|
||||
# are valid.
|
||||
sys.modules["_pydevd_bundle.pydevd_cython"] = mod
|
||||
|
||||
trace_dispatch = mod.trace_dispatch
|
||||
|
||||
PyDBAdditionalThreadInfo = mod.PyDBAdditionalThreadInfo
|
||||
|
||||
set_additional_thread_info = mod.set_additional_thread_info
|
||||
|
||||
any_thread_stepping = mod.any_thread_stepping
|
||||
|
||||
remove_additional_info = mod.remove_additional_info
|
||||
|
||||
global_cache_skips = mod.global_cache_skips
|
||||
|
||||
global_cache_frame_skips = mod.global_cache_frame_skips
|
||||
|
||||
_set_additional_thread_info_lock = mod._set_additional_thread_info_lock
|
||||
|
||||
fix_top_level_trace_and_get_trace_func = mod.fix_top_level_trace_and_get_trace_func
|
||||
|
||||
handle_exception = mod.handle_exception
|
||||
|
||||
should_stop_on_exception = mod.should_stop_on_exception
|
||||
|
||||
is_unhandled_exception = mod.is_unhandled_exception
|
||||
|
||||
version = getattr(mod, "version", 0)
|
||||
@@ -0,0 +1,202 @@
|
||||
from _pydev_bundle._pydev_saved_modules import threading
|
||||
from _pydev_bundle import _pydev_saved_modules
|
||||
from _pydevd_bundle.pydevd_utils import notify_about_gevent_if_needed
|
||||
import weakref
|
||||
from _pydevd_bundle.pydevd_constants import (
|
||||
IS_JYTHON,
|
||||
IS_IRONPYTHON,
|
||||
PYDEVD_APPLY_PATCHING_TO_HIDE_PYDEVD_THREADS,
|
||||
PYDEVD_USE_SYS_MONITORING,
|
||||
)
|
||||
from _pydev_bundle.pydev_log import exception as pydev_log_exception
|
||||
import sys
|
||||
from _pydev_bundle import pydev_log
|
||||
import pydevd_tracing
|
||||
from _pydevd_bundle.pydevd_collect_bytecode_info import iter_instructions
|
||||
from _pydevd_sys_monitoring import pydevd_sys_monitoring
|
||||
|
||||
if IS_JYTHON:
|
||||
import org.python.core as JyCore # @UnresolvedImport
|
||||
|
||||
|
||||
class PyDBDaemonThread(threading.Thread):
|
||||
def __init__(self, py_db, target_and_args=None):
|
||||
"""
|
||||
:param target_and_args:
|
||||
tuple(func, args, kwargs) if this should be a function and args to run.
|
||||
-- Note: use through run_as_pydevd_daemon_thread().
|
||||
"""
|
||||
threading.Thread.__init__(self)
|
||||
notify_about_gevent_if_needed()
|
||||
self._py_db = weakref.ref(py_db)
|
||||
self._kill_received = False
|
||||
mark_as_pydevd_daemon_thread(self)
|
||||
self._target_and_args = target_and_args
|
||||
|
||||
@property
|
||||
def py_db(self):
|
||||
return self._py_db()
|
||||
|
||||
def run(self):
|
||||
created_pydb_daemon = self.py_db.created_pydb_daemon_threads
|
||||
created_pydb_daemon[self] = 1
|
||||
try:
|
||||
try:
|
||||
if IS_JYTHON and not isinstance(threading.current_thread(), threading._MainThread):
|
||||
# we shouldn't update sys.modules for the main thread, cause it leads to the second importing 'threading'
|
||||
# module, and the new instance of main thread is created
|
||||
ss = JyCore.PySystemState()
|
||||
# Note: Py.setSystemState() affects only the current thread.
|
||||
JyCore.Py.setSystemState(ss)
|
||||
|
||||
self._stop_trace()
|
||||
self._on_run()
|
||||
except:
|
||||
if sys is not None and pydev_log_exception is not None:
|
||||
pydev_log_exception()
|
||||
finally:
|
||||
del created_pydb_daemon[self]
|
||||
|
||||
def _on_run(self):
|
||||
if self._target_and_args is not None:
|
||||
target, args, kwargs = self._target_and_args
|
||||
target(*args, **kwargs)
|
||||
else:
|
||||
raise NotImplementedError("Should be reimplemented by: %s" % self.__class__)
|
||||
|
||||
def do_kill_pydev_thread(self):
|
||||
if not self._kill_received:
|
||||
pydev_log.debug("%s received kill signal", self.name)
|
||||
self._kill_received = True
|
||||
|
||||
def _stop_trace(self):
|
||||
if self.pydev_do_not_trace:
|
||||
if PYDEVD_USE_SYS_MONITORING:
|
||||
pydevd_sys_monitoring.stop_monitoring(all_threads=False)
|
||||
return
|
||||
pydevd_tracing.SetTrace(None) # no debugging on this thread
|
||||
|
||||
|
||||
def _collect_load_names(func):
|
||||
found_load_names = set()
|
||||
for instruction in iter_instructions(func.__code__):
|
||||
if instruction.opname in ("LOAD_GLOBAL", "LOAD_ATTR", "LOAD_METHOD"):
|
||||
found_load_names.add(instruction.argrepr)
|
||||
return found_load_names
|
||||
|
||||
|
||||
def _patch_threading_to_hide_pydevd_threads():
|
||||
"""
|
||||
Patches the needed functions on the `threading` module so that the pydevd threads are hidden.
|
||||
|
||||
Note that we patch the functions __code__ to avoid issues if some code had already imported those
|
||||
variables prior to the patching.
|
||||
"""
|
||||
found_load_names = _collect_load_names(threading.enumerate)
|
||||
# i.e.: we'll only apply the patching if the function seems to be what we expect.
|
||||
|
||||
new_threading_enumerate = None
|
||||
|
||||
if found_load_names in (
|
||||
{"_active_limbo_lock", "_limbo", "_active", "values", "list"},
|
||||
{"_active_limbo_lock", "_limbo", "_active", "values", "NULL + list"},
|
||||
{"NULL + list", "_active", "_active_limbo_lock", "NULL|self + values", "_limbo"},
|
||||
{"_active_limbo_lock", "values + NULL|self", "_limbo", "_active", "list + NULL"},
|
||||
):
|
||||
pydev_log.debug("Applying patching to hide pydevd threads (Py3 version).")
|
||||
|
||||
def new_threading_enumerate():
|
||||
with _active_limbo_lock:
|
||||
ret = list(_active.values()) + list(_limbo.values())
|
||||
|
||||
return [t for t in ret if not getattr(t, "is_pydev_daemon_thread", False)]
|
||||
|
||||
elif found_load_names == set(("_active_limbo_lock", "_limbo", "_active", "values")):
|
||||
pydev_log.debug("Applying patching to hide pydevd threads (Py2 version).")
|
||||
|
||||
def new_threading_enumerate():
|
||||
with _active_limbo_lock:
|
||||
ret = _active.values() + _limbo.values()
|
||||
|
||||
return [t for t in ret if not getattr(t, "is_pydev_daemon_thread", False)]
|
||||
|
||||
else:
|
||||
pydev_log.info("Unable to hide pydevd threads. Found names in threading.enumerate: %s", found_load_names)
|
||||
|
||||
if new_threading_enumerate is not None:
|
||||
|
||||
def pydevd_saved_threading_enumerate():
|
||||
with threading._active_limbo_lock:
|
||||
return list(threading._active.values()) + list(threading._limbo.values())
|
||||
|
||||
_pydev_saved_modules.pydevd_saved_threading_enumerate = pydevd_saved_threading_enumerate
|
||||
|
||||
threading.enumerate.__code__ = new_threading_enumerate.__code__
|
||||
|
||||
# We also need to patch the active count (to match what we have in the enumerate).
|
||||
def new_active_count():
|
||||
# Note: as this will be executed in the `threading` module, `enumerate` will
|
||||
# actually be threading.enumerate.
|
||||
return len(enumerate())
|
||||
|
||||
threading.active_count.__code__ = new_active_count.__code__
|
||||
|
||||
# When shutting down, Python (on some versions) may do something as:
|
||||
#
|
||||
# def _pickSomeNonDaemonThread():
|
||||
# for t in enumerate():
|
||||
# if not t.daemon and t.is_alive():
|
||||
# return t
|
||||
# return None
|
||||
#
|
||||
# But in this particular case, we do want threads with `is_pydev_daemon_thread` to appear
|
||||
# explicitly due to the pydevd `CheckAliveThread` (because we want the shutdown to wait on it).
|
||||
# So, it can't rely on the `enumerate` for that anymore as it's patched to not return pydevd threads.
|
||||
if hasattr(threading, "_pickSomeNonDaemonThread"):
|
||||
|
||||
def new_pick_some_non_daemon_thread():
|
||||
with _active_limbo_lock:
|
||||
# Ok for py2 and py3.
|
||||
threads = list(_active.values()) + list(_limbo.values())
|
||||
|
||||
for t in threads:
|
||||
if not t.daemon and t.is_alive():
|
||||
return t
|
||||
return None
|
||||
|
||||
threading._pickSomeNonDaemonThread.__code__ = new_pick_some_non_daemon_thread.__code__
|
||||
|
||||
|
||||
_patched_threading_to_hide_pydevd_threads = False
|
||||
|
||||
|
||||
def mark_as_pydevd_daemon_thread(thread):
|
||||
if not IS_JYTHON and not IS_IRONPYTHON and PYDEVD_APPLY_PATCHING_TO_HIDE_PYDEVD_THREADS:
|
||||
global _patched_threading_to_hide_pydevd_threads
|
||||
if not _patched_threading_to_hide_pydevd_threads:
|
||||
# When we mark the first thread as a pydevd daemon thread, we also change the threading
|
||||
# functions to hide pydevd threads.
|
||||
# Note: we don't just "hide" the pydevd threads from the threading module by not using it
|
||||
# (i.e.: just using the `thread.start_new_thread` instead of `threading.Thread`)
|
||||
# because there's 1 thread (the `CheckAliveThread`) which is a pydevd thread but
|
||||
# isn't really a daemon thread (so, we need CPython to wait on it for shutdown,
|
||||
# in which case it needs to be in `threading` and the patching would be needed anyways).
|
||||
_patched_threading_to_hide_pydevd_threads = True
|
||||
try:
|
||||
_patch_threading_to_hide_pydevd_threads()
|
||||
except:
|
||||
pydev_log.exception("Error applying patching to hide pydevd threads.")
|
||||
|
||||
thread.pydev_do_not_trace = True
|
||||
thread.is_pydev_daemon_thread = True
|
||||
thread.daemon = True
|
||||
|
||||
|
||||
def run_as_pydevd_daemon_thread(py_db, func, *args, **kwargs):
|
||||
"""
|
||||
Runs a function as a pydevd daemon thread (without any tracing in place).
|
||||
"""
|
||||
t = PyDBDaemonThread(py_db, target_and_args=(func, args, kwargs))
|
||||
t.name = "%s (pydevd daemon thread)" % (func.__name__,)
|
||||
t.start()
|
||||
return t
|
||||
@@ -0,0 +1,64 @@
|
||||
"""
|
||||
This module holds the customization settings for the debugger.
|
||||
"""
|
||||
|
||||
from _pydevd_bundle.pydevd_constants import QUOTED_LINE_PROTOCOL
|
||||
from _pydev_bundle import pydev_log
|
||||
import sys
|
||||
|
||||
|
||||
class PydevdCustomization(object):
|
||||
DEFAULT_PROTOCOL: str = QUOTED_LINE_PROTOCOL
|
||||
|
||||
# Debug mode may be set to 'debugpy-dap'.
|
||||
#
|
||||
# In 'debugpy-dap' mode the following settings are done to PyDB:
|
||||
#
|
||||
# py_db.skip_suspend_on_breakpoint_exception = (BaseException,)
|
||||
# py_db.skip_print_breakpoint_exception = (NameError,)
|
||||
# py_db.multi_threads_single_notification = True
|
||||
DEBUG_MODE: str = ""
|
||||
|
||||
# This may be a <sys_path_entry>;<module_name> to be pre-imported
|
||||
# Something as: 'c:/temp/foo;my_module.bar'
|
||||
#
|
||||
# What's done in this case is something as:
|
||||
#
|
||||
# sys.path.insert(0, <sys_path_entry>)
|
||||
# try:
|
||||
# import <module_name>
|
||||
# finally:
|
||||
# del sys.path[0]
|
||||
#
|
||||
# If the pre-import fails an output message is
|
||||
# sent (but apart from that debugger execution
|
||||
# should continue).
|
||||
PREIMPORT: str = ""
|
||||
|
||||
|
||||
def on_pydb_init(py_db):
|
||||
if PydevdCustomization.DEBUG_MODE == "debugpy-dap":
|
||||
pydev_log.debug("Apply debug mode: debugpy-dap")
|
||||
py_db.skip_suspend_on_breakpoint_exception = (BaseException,)
|
||||
py_db.skip_print_breakpoint_exception = (NameError,)
|
||||
py_db.multi_threads_single_notification = True
|
||||
elif not PydevdCustomization.DEBUG_MODE:
|
||||
pydev_log.debug("Apply debug mode: default")
|
||||
else:
|
||||
pydev_log.debug("WARNING: unknown debug mode: %s", PydevdCustomization.DEBUG_MODE)
|
||||
|
||||
if PydevdCustomization.PREIMPORT:
|
||||
pydev_log.debug("Preimport: %s", PydevdCustomization.PREIMPORT)
|
||||
try:
|
||||
sys_path_entry, module_name = PydevdCustomization.PREIMPORT.rsplit(";", maxsplit=1)
|
||||
except Exception:
|
||||
pydev_log.exception("Expected ';' in %s" % (PydevdCustomization.PREIMPORT,))
|
||||
else:
|
||||
try:
|
||||
sys.path.insert(0, sys_path_entry)
|
||||
try:
|
||||
__import__(module_name)
|
||||
finally:
|
||||
sys.path.remove(sys_path_entry)
|
||||
except Exception:
|
||||
pydev_log.exception("Error importing %s (with sys.path entry: %s)" % (module_name, sys_path_entry))
|
||||
@@ -0,0 +1,123 @@
|
||||
"""
|
||||
Support for a tag that allows skipping over functions while debugging.
|
||||
"""
|
||||
import linecache
|
||||
import re
|
||||
|
||||
# To suppress tracing a method, add the tag @DontTrace
|
||||
# to a comment either preceding or on the same line as
|
||||
# the method definition
|
||||
#
|
||||
# E.g.:
|
||||
# #@DontTrace
|
||||
# def test1():
|
||||
# pass
|
||||
#
|
||||
# ... or ...
|
||||
#
|
||||
# def test2(): #@DontTrace
|
||||
# pass
|
||||
DONT_TRACE_TAG = "@DontTrace"
|
||||
|
||||
# Regular expression to match a decorator (at the beginning
|
||||
# of a line).
|
||||
RE_DECORATOR = re.compile(r"^\s*@")
|
||||
|
||||
# Mapping from code object to bool.
|
||||
# If the key exists, the value is the cached result of should_trace_hook
|
||||
_filename_to_ignored_lines = {}
|
||||
|
||||
|
||||
def default_should_trace_hook(code, absolute_filename):
|
||||
"""
|
||||
Return True if this frame should be traced, False if tracing should be blocked.
|
||||
"""
|
||||
# First, check whether this code object has a cached value
|
||||
ignored_lines = _filename_to_ignored_lines.get(absolute_filename)
|
||||
if ignored_lines is None:
|
||||
# Now, look up that line of code and check for a @DontTrace
|
||||
# preceding or on the same line as the method.
|
||||
# E.g.:
|
||||
# #@DontTrace
|
||||
# def test():
|
||||
# pass
|
||||
# ... or ...
|
||||
# def test(): #@DontTrace
|
||||
# pass
|
||||
ignored_lines = {}
|
||||
lines = linecache.getlines(absolute_filename)
|
||||
for i_line, line in enumerate(lines):
|
||||
j = line.find("#")
|
||||
if j >= 0:
|
||||
comment = line[j:]
|
||||
if DONT_TRACE_TAG in comment:
|
||||
ignored_lines[i_line] = 1
|
||||
|
||||
# Note: when it's found in the comment, mark it up and down for the decorator lines found.
|
||||
k = i_line - 1
|
||||
while k >= 0:
|
||||
if RE_DECORATOR.match(lines[k]):
|
||||
ignored_lines[k] = 1
|
||||
k -= 1
|
||||
else:
|
||||
break
|
||||
|
||||
k = i_line + 1
|
||||
while k <= len(lines):
|
||||
if RE_DECORATOR.match(lines[k]):
|
||||
ignored_lines[k] = 1
|
||||
k += 1
|
||||
else:
|
||||
break
|
||||
|
||||
_filename_to_ignored_lines[absolute_filename] = ignored_lines
|
||||
|
||||
func_line = code.co_firstlineno - 1 # co_firstlineno is 1-based, so -1 is needed
|
||||
return not (
|
||||
func_line - 1 in ignored_lines # -1 to get line before method
|
||||
or func_line in ignored_lines
|
||||
) # method line
|
||||
|
||||
|
||||
should_trace_hook = None
|
||||
|
||||
|
||||
def clear_trace_filter_cache():
|
||||
"""
|
||||
Clear the trace filter cache.
|
||||
Call this after reloading.
|
||||
"""
|
||||
global should_trace_hook
|
||||
try:
|
||||
# Need to temporarily disable a hook because otherwise
|
||||
# _filename_to_ignored_lines.clear() will never complete.
|
||||
old_hook = should_trace_hook
|
||||
should_trace_hook = None
|
||||
|
||||
# Clear the linecache
|
||||
linecache.clearcache()
|
||||
_filename_to_ignored_lines.clear()
|
||||
|
||||
finally:
|
||||
should_trace_hook = old_hook
|
||||
|
||||
|
||||
def trace_filter(mode):
|
||||
"""
|
||||
Set the trace filter mode.
|
||||
|
||||
mode: Whether to enable the trace hook.
|
||||
True: Trace filtering on (skipping methods tagged @DontTrace)
|
||||
False: Trace filtering off (trace methods tagged @DontTrace)
|
||||
None/default: Toggle trace filtering.
|
||||
"""
|
||||
global should_trace_hook
|
||||
if mode is None:
|
||||
mode = should_trace_hook is None
|
||||
|
||||
if mode:
|
||||
should_trace_hook = default_should_trace_hook
|
||||
else:
|
||||
should_trace_hook = None
|
||||
|
||||
return mode
|
||||
@@ -0,0 +1,178 @@
|
||||
# Important: Autogenerated file.
|
||||
|
||||
# fmt: off
|
||||
# DO NOT edit manually!
|
||||
# DO NOT edit manually!
|
||||
|
||||
LIB_FILE = 1
|
||||
PYDEV_FILE = 2
|
||||
|
||||
DONT_TRACE_DIRS = {
|
||||
'_pydev_bundle': PYDEV_FILE,
|
||||
'_pydev_runfiles': PYDEV_FILE,
|
||||
'_pydevd_bundle': PYDEV_FILE,
|
||||
'_pydevd_frame_eval': PYDEV_FILE,
|
||||
'_pydevd_sys_monitoring': PYDEV_FILE,
|
||||
'pydev_ipython': LIB_FILE,
|
||||
'pydev_sitecustomize': PYDEV_FILE,
|
||||
'pydevd_attach_to_process': PYDEV_FILE,
|
||||
'pydevd_concurrency_analyser': PYDEV_FILE,
|
||||
'pydevd_plugins': PYDEV_FILE,
|
||||
'test_pydevd_reload': PYDEV_FILE,
|
||||
}
|
||||
|
||||
LIB_FILES_IN_DONT_TRACE_DIRS = {
|
||||
'__init__.py',
|
||||
'inputhook.py',
|
||||
'inputhookglut.py',
|
||||
'inputhookgtk.py',
|
||||
'inputhookgtk3.py',
|
||||
'inputhookpyglet.py',
|
||||
'inputhookqt4.py',
|
||||
'inputhookqt5.py',
|
||||
'inputhookqt6.py',
|
||||
'inputhooktk.py',
|
||||
'inputhookwx.py',
|
||||
'matplotlibtools.py',
|
||||
'qt.py',
|
||||
'qt_for_kernel.py',
|
||||
'qt_loaders.py',
|
||||
'version.py',
|
||||
}
|
||||
|
||||
DONT_TRACE = {
|
||||
# commonly used things from the stdlib that we don't want to trace
|
||||
'Queue.py':LIB_FILE,
|
||||
'queue.py':LIB_FILE,
|
||||
'socket.py':LIB_FILE,
|
||||
'weakref.py':LIB_FILE,
|
||||
'_weakrefset.py':LIB_FILE,
|
||||
'linecache.py':LIB_FILE,
|
||||
'threading.py':LIB_FILE,
|
||||
'dis.py':LIB_FILE,
|
||||
|
||||
# things from pydev that we don't want to trace
|
||||
'__main__pydevd_gen_debug_adapter_protocol.py': PYDEV_FILE,
|
||||
'_pydev_calltip_util.py': PYDEV_FILE,
|
||||
'_pydev_completer.py': PYDEV_FILE,
|
||||
'_pydev_execfile.py': PYDEV_FILE,
|
||||
'_pydev_filesystem_encoding.py': PYDEV_FILE,
|
||||
'_pydev_getopt.py': PYDEV_FILE,
|
||||
'_pydev_imports_tipper.py': PYDEV_FILE,
|
||||
'_pydev_jy_imports_tipper.py': PYDEV_FILE,
|
||||
'_pydev_log.py': PYDEV_FILE,
|
||||
'_pydev_saved_modules.py': PYDEV_FILE,
|
||||
'_pydev_sys_patch.py': PYDEV_FILE,
|
||||
'_pydev_tipper_common.py': PYDEV_FILE,
|
||||
'_pydevd_sys_monitoring.py': PYDEV_FILE,
|
||||
'django_debug.py': PYDEV_FILE,
|
||||
'jinja2_debug.py': PYDEV_FILE,
|
||||
'pycompletionserver.py': PYDEV_FILE,
|
||||
'pydev_app_engine_debug_startup.py': PYDEV_FILE,
|
||||
'pydev_console_utils.py': PYDEV_FILE,
|
||||
'pydev_import_hook.py': PYDEV_FILE,
|
||||
'pydev_imports.py': PYDEV_FILE,
|
||||
'pydev_ipython_console.py': PYDEV_FILE,
|
||||
'pydev_ipython_console_011.py': PYDEV_FILE,
|
||||
'pydev_is_thread_alive.py': PYDEV_FILE,
|
||||
'pydev_localhost.py': PYDEV_FILE,
|
||||
'pydev_log.py': PYDEV_FILE,
|
||||
'pydev_monkey.py': PYDEV_FILE,
|
||||
'pydev_monkey_qt.py': PYDEV_FILE,
|
||||
'pydev_override.py': PYDEV_FILE,
|
||||
'pydev_run_in_console.py': PYDEV_FILE,
|
||||
'pydev_runfiles.py': PYDEV_FILE,
|
||||
'pydev_runfiles_coverage.py': PYDEV_FILE,
|
||||
'pydev_runfiles_nose.py': PYDEV_FILE,
|
||||
'pydev_runfiles_parallel.py': PYDEV_FILE,
|
||||
'pydev_runfiles_parallel_client.py': PYDEV_FILE,
|
||||
'pydev_runfiles_pytest2.py': PYDEV_FILE,
|
||||
'pydev_runfiles_unittest.py': PYDEV_FILE,
|
||||
'pydev_runfiles_xml_rpc.py': PYDEV_FILE,
|
||||
'pydev_umd.py': PYDEV_FILE,
|
||||
'pydev_versioncheck.py': PYDEV_FILE,
|
||||
'pydevconsole.py': PYDEV_FILE,
|
||||
'pydevconsole_code.py': PYDEV_FILE,
|
||||
'pydevd.py': PYDEV_FILE,
|
||||
'pydevd_additional_thread_info.py': PYDEV_FILE,
|
||||
'pydevd_additional_thread_info_regular.py': PYDEV_FILE,
|
||||
'pydevd_api.py': PYDEV_FILE,
|
||||
'pydevd_base_schema.py': PYDEV_FILE,
|
||||
'pydevd_breakpoints.py': PYDEV_FILE,
|
||||
'pydevd_bytecode_utils.py': PYDEV_FILE,
|
||||
'pydevd_bytecode_utils_py311.py': PYDEV_FILE,
|
||||
'pydevd_code_to_source.py': PYDEV_FILE,
|
||||
'pydevd_collect_bytecode_info.py': PYDEV_FILE,
|
||||
'pydevd_comm.py': PYDEV_FILE,
|
||||
'pydevd_comm_constants.py': PYDEV_FILE,
|
||||
'pydevd_command_line_handling.py': PYDEV_FILE,
|
||||
'pydevd_concurrency_logger.py': PYDEV_FILE,
|
||||
'pydevd_console.py': PYDEV_FILE,
|
||||
'pydevd_constants.py': PYDEV_FILE,
|
||||
'pydevd_custom_frames.py': PYDEV_FILE,
|
||||
'pydevd_cython_wrapper.py': PYDEV_FILE,
|
||||
'pydevd_daemon_thread.py': PYDEV_FILE,
|
||||
'pydevd_defaults.py': PYDEV_FILE,
|
||||
'pydevd_dont_trace.py': PYDEV_FILE,
|
||||
'pydevd_dont_trace_files.py': PYDEV_FILE,
|
||||
'pydevd_exec2.py': PYDEV_FILE,
|
||||
'pydevd_extension_api.py': PYDEV_FILE,
|
||||
'pydevd_extension_utils.py': PYDEV_FILE,
|
||||
'pydevd_file_utils.py': PYDEV_FILE,
|
||||
'pydevd_filtering.py': PYDEV_FILE,
|
||||
'pydevd_frame.py': PYDEV_FILE,
|
||||
'pydevd_frame_eval_cython_wrapper.py': PYDEV_FILE,
|
||||
'pydevd_frame_eval_main.py': PYDEV_FILE,
|
||||
'pydevd_frame_tracing.py': PYDEV_FILE,
|
||||
'pydevd_frame_utils.py': PYDEV_FILE,
|
||||
'pydevd_gevent_integration.py': PYDEV_FILE,
|
||||
'pydevd_helpers.py': PYDEV_FILE,
|
||||
'pydevd_import_class.py': PYDEV_FILE,
|
||||
'pydevd_io.py': PYDEV_FILE,
|
||||
'pydevd_json_debug_options.py': PYDEV_FILE,
|
||||
'pydevd_line_validation.py': PYDEV_FILE,
|
||||
'pydevd_modify_bytecode.py': PYDEV_FILE,
|
||||
'pydevd_net_command.py': PYDEV_FILE,
|
||||
'pydevd_net_command_factory_json.py': PYDEV_FILE,
|
||||
'pydevd_net_command_factory_xml.py': PYDEV_FILE,
|
||||
'pydevd_plugin_numpy_types.py': PYDEV_FILE,
|
||||
'pydevd_plugin_pandas_types.py': PYDEV_FILE,
|
||||
'pydevd_plugin_utils.py': PYDEV_FILE,
|
||||
'pydevd_plugins_django_form_str.py': PYDEV_FILE,
|
||||
'pydevd_process_net_command.py': PYDEV_FILE,
|
||||
'pydevd_process_net_command_json.py': PYDEV_FILE,
|
||||
'pydevd_referrers.py': PYDEV_FILE,
|
||||
'pydevd_reload.py': PYDEV_FILE,
|
||||
'pydevd_resolver.py': PYDEV_FILE,
|
||||
'pydevd_runpy.py': PYDEV_FILE,
|
||||
'pydevd_safe_repr.py': PYDEV_FILE,
|
||||
'pydevd_save_locals.py': PYDEV_FILE,
|
||||
'pydevd_schema.py': PYDEV_FILE,
|
||||
'pydevd_schema_log.py': PYDEV_FILE,
|
||||
'pydevd_signature.py': PYDEV_FILE,
|
||||
'pydevd_source_mapping.py': PYDEV_FILE,
|
||||
'pydevd_stackless.py': PYDEV_FILE,
|
||||
'pydevd_suspended_frames.py': PYDEV_FILE,
|
||||
'pydevd_sys_monitoring.py': PYDEV_FILE,
|
||||
'pydevd_thread_lifecycle.py': PYDEV_FILE,
|
||||
'pydevd_thread_wrappers.py': PYDEV_FILE,
|
||||
'pydevd_timeout.py': PYDEV_FILE,
|
||||
'pydevd_trace_dispatch.py': PYDEV_FILE,
|
||||
'pydevd_trace_dispatch_regular.py': PYDEV_FILE,
|
||||
'pydevd_traceproperty.py': PYDEV_FILE,
|
||||
'pydevd_tracing.py': PYDEV_FILE,
|
||||
'pydevd_utils.py': PYDEV_FILE,
|
||||
'pydevd_vars.py': PYDEV_FILE,
|
||||
'pydevd_vm_type.py': PYDEV_FILE,
|
||||
'pydevd_xml.py': PYDEV_FILE,
|
||||
}
|
||||
|
||||
# if we try to trace io.py it seems it can get halted (see http://bugs.python.org/issue4716)
|
||||
DONT_TRACE['io.py'] = LIB_FILE
|
||||
|
||||
# Don't trace common encodings too
|
||||
DONT_TRACE['cp1252.py'] = LIB_FILE
|
||||
DONT_TRACE['utf_8.py'] = LIB_FILE
|
||||
DONT_TRACE['codecs.py'] = LIB_FILE
|
||||
|
||||
# fmt: on
|
||||
@@ -0,0 +1,5 @@
|
||||
def Exec(exp, global_vars, local_vars=None):
|
||||
if local_vars is not None:
|
||||
exec(exp, global_vars, local_vars)
|
||||
else:
|
||||
exec(exp, global_vars)
|
||||
@@ -0,0 +1,107 @@
|
||||
import abc
|
||||
from typing import Any
|
||||
|
||||
|
||||
# borrowed from from six
|
||||
def _with_metaclass(meta, *bases):
|
||||
"""Create a base class with a metaclass."""
|
||||
|
||||
class metaclass(meta):
|
||||
def __new__(cls, name, this_bases, d):
|
||||
return meta(name, bases, d)
|
||||
|
||||
return type.__new__(metaclass, "temporary_class", (), {})
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# AbstractResolver
|
||||
# =======================================================================================================================
|
||||
class _AbstractResolver(_with_metaclass(abc.ABCMeta)):
|
||||
"""
|
||||
This class exists only for documentation purposes to explain how to create a resolver.
|
||||
|
||||
Some examples on how to resolve things:
|
||||
- list: get_dictionary could return a dict with index->item and use the index to resolve it later
|
||||
- set: get_dictionary could return a dict with id(object)->object and reiterate in that array to resolve it later
|
||||
- arbitrary instance: get_dictionary could return dict with attr_name->attr and use getattr to resolve it later
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def resolve(self, var, attribute):
|
||||
"""
|
||||
In this method, we'll resolve some child item given the string representation of the item in the key
|
||||
representing the previously asked dictionary.
|
||||
|
||||
:param var: this is the actual variable to be resolved.
|
||||
:param attribute: this is the string representation of a key previously returned in get_dictionary.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_dictionary(self, var):
|
||||
"""
|
||||
:param var: this is the variable that should have its children gotten.
|
||||
|
||||
:return: a dictionary where each pair key, value should be shown to the user as children items
|
||||
in the variables view for the given var.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class _AbstractProvider(_with_metaclass(abc.ABCMeta)):
|
||||
@abc.abstractmethod
|
||||
def can_provide(self, type_object, type_name):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# API CLASSES:
|
||||
# =======================================================================================================================
|
||||
|
||||
|
||||
class TypeResolveProvider(_AbstractResolver, _AbstractProvider):
|
||||
"""
|
||||
Implement this in an extension to provide a custom resolver, see _AbstractResolver
|
||||
"""
|
||||
|
||||
|
||||
class StrPresentationProvider(_AbstractProvider):
|
||||
"""
|
||||
Implement this in an extension to provide a str presentation for a type
|
||||
"""
|
||||
|
||||
def get_str_in_context(self, val: Any, context: str):
|
||||
"""
|
||||
:param val:
|
||||
This is the object for which we want a string representation.
|
||||
|
||||
:param context:
|
||||
This is the context in which the variable is being requested. Valid values:
|
||||
"watch",
|
||||
"repl",
|
||||
"hover",
|
||||
"clipboard"
|
||||
|
||||
:note: this method is not required (if it's not available, get_str is called directly,
|
||||
so, it's only needed if the string representation needs to be converted based on
|
||||
the context).
|
||||
"""
|
||||
return self.get_str(val)
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_str(self, val):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class DebuggerEventHandler(_with_metaclass(abc.ABCMeta)):
|
||||
"""
|
||||
Implement this to receive lifecycle events from the debugger
|
||||
"""
|
||||
|
||||
def on_debugger_modules_loaded(self, **kwargs):
|
||||
"""
|
||||
This method invoked after all debugger modules are loaded. Useful for importing and/or patching debugger
|
||||
modules at a safe time
|
||||
:param kwargs: This is intended to be flexible dict passed from the debugger.
|
||||
Currently passes the debugger version
|
||||
"""
|
||||
@@ -0,0 +1,65 @@
|
||||
import pkgutil
|
||||
import sys
|
||||
from _pydev_bundle import pydev_log
|
||||
|
||||
try:
|
||||
import pydevd_plugins.extensions as extensions
|
||||
except:
|
||||
pydev_log.exception()
|
||||
extensions = None
|
||||
|
||||
|
||||
class ExtensionManager(object):
|
||||
def __init__(self):
|
||||
self.loaded_extensions = None
|
||||
self.type_to_instance = {}
|
||||
|
||||
def _load_modules(self):
|
||||
self.loaded_extensions = []
|
||||
if extensions:
|
||||
for module_loader, name, ispkg in pkgutil.walk_packages(extensions.__path__, extensions.__name__ + "."):
|
||||
mod_name = name.split(".")[-1]
|
||||
if not ispkg and mod_name.startswith("pydevd_plugin"):
|
||||
try:
|
||||
__import__(name)
|
||||
module = sys.modules[name]
|
||||
self.loaded_extensions.append(module)
|
||||
except ImportError:
|
||||
pydev_log.critical("Unable to load extension: %s", name)
|
||||
|
||||
def _ensure_loaded(self):
|
||||
if self.loaded_extensions is None:
|
||||
self._load_modules()
|
||||
|
||||
def _iter_attr(self):
|
||||
for extension in self.loaded_extensions:
|
||||
dunder_all = getattr(extension, "__all__", None)
|
||||
for attr_name in dir(extension):
|
||||
if not attr_name.startswith("_"):
|
||||
if dunder_all is None or attr_name in dunder_all:
|
||||
yield attr_name, getattr(extension, attr_name)
|
||||
|
||||
def get_extension_classes(self, extension_type):
|
||||
self._ensure_loaded()
|
||||
if extension_type in self.type_to_instance:
|
||||
return self.type_to_instance[extension_type]
|
||||
handlers = self.type_to_instance.setdefault(extension_type, [])
|
||||
for attr_name, attr in self._iter_attr():
|
||||
if isinstance(attr, type) and issubclass(attr, extension_type) and attr is not extension_type:
|
||||
try:
|
||||
handlers.append(attr())
|
||||
except:
|
||||
pydev_log.exception("Unable to load extension class: %s", attr_name)
|
||||
return handlers
|
||||
|
||||
|
||||
EXTENSION_MANAGER_INSTANCE = ExtensionManager()
|
||||
|
||||
|
||||
def extensions_of_type(extension_type):
|
||||
"""
|
||||
|
||||
:param T extension_type: The type of the extension hook
|
||||
:rtype: list[T]
|
||||
"""
|
||||
return EXTENSION_MANAGER_INSTANCE.get_extension_classes(extension_type)
|
||||
@@ -0,0 +1,338 @@
|
||||
import fnmatch
|
||||
import glob
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from _pydev_bundle import pydev_log
|
||||
import pydevd_file_utils
|
||||
import json
|
||||
from collections import namedtuple
|
||||
from _pydev_bundle._pydev_saved_modules import threading
|
||||
from pydevd_file_utils import normcase
|
||||
from _pydevd_bundle.pydevd_constants import USER_CODE_BASENAMES_STARTING_WITH, LIBRARY_CODE_BASENAMES_STARTING_WITH, IS_PYPY, IS_WINDOWS
|
||||
from _pydevd_bundle import pydevd_constants
|
||||
from _pydevd_bundle.pydevd_constants import is_true_in_env
|
||||
|
||||
ExcludeFilter = namedtuple("ExcludeFilter", "name, exclude, is_path")
|
||||
|
||||
|
||||
def _convert_to_str_and_clear_empty(roots):
|
||||
new_roots = []
|
||||
for root in roots:
|
||||
assert isinstance(root, str), "%s not str (found: %s)" % (root, type(root))
|
||||
if root:
|
||||
new_roots.append(root)
|
||||
return new_roots
|
||||
|
||||
|
||||
def _check_matches(patterns, paths):
|
||||
if not patterns and not paths:
|
||||
# Matched to the end.
|
||||
return True
|
||||
|
||||
if (not patterns and paths) or (patterns and not paths):
|
||||
return False
|
||||
|
||||
pattern = normcase(patterns[0])
|
||||
path = normcase(paths[0])
|
||||
|
||||
if not glob.has_magic(pattern):
|
||||
if pattern != path:
|
||||
return False
|
||||
|
||||
elif pattern == "**":
|
||||
if len(patterns) == 1:
|
||||
return True # if ** is the last one it matches anything to the right.
|
||||
|
||||
for i in range(len(paths)):
|
||||
# Recursively check the remaining patterns as the
|
||||
# current pattern could match any number of paths.
|
||||
if _check_matches(patterns[1:], paths[i:]):
|
||||
return True
|
||||
|
||||
elif not fnmatch.fnmatch(path, pattern):
|
||||
# Current part doesn't match.
|
||||
return False
|
||||
|
||||
return _check_matches(patterns[1:], paths[1:])
|
||||
|
||||
|
||||
def glob_matches_path(path, pattern, sep=os.sep, altsep=os.altsep):
|
||||
if altsep:
|
||||
pattern = pattern.replace(altsep, sep)
|
||||
path = path.replace(altsep, sep)
|
||||
|
||||
drive = ""
|
||||
if len(path) > 1 and path[1] == ":":
|
||||
drive, path = path[0], path[2:]
|
||||
|
||||
if drive and len(pattern) > 1:
|
||||
if pattern[1] == ":":
|
||||
if drive.lower() != pattern[0].lower():
|
||||
return False
|
||||
pattern = pattern[2:]
|
||||
|
||||
patterns = pattern.split(sep)
|
||||
paths = path.split(sep)
|
||||
if paths:
|
||||
if paths[0] == "":
|
||||
paths = paths[1:]
|
||||
if patterns:
|
||||
if patterns[0] == "":
|
||||
patterns = patterns[1:]
|
||||
|
||||
return _check_matches(patterns, paths)
|
||||
|
||||
|
||||
class FilesFiltering(object):
|
||||
"""
|
||||
Note: calls at FilesFiltering are uncached.
|
||||
|
||||
The actual API used should be through PyDB.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._exclude_filters = []
|
||||
self._project_roots = []
|
||||
self._library_roots = []
|
||||
|
||||
# Filter out libraries?
|
||||
self._use_libraries_filter = False
|
||||
self.require_module = False # True if some exclude filter filters by the module.
|
||||
|
||||
self.set_use_libraries_filter(is_true_in_env("PYDEVD_FILTER_LIBRARIES"))
|
||||
|
||||
project_roots = os.getenv("IDE_PROJECT_ROOTS", None)
|
||||
if project_roots is not None:
|
||||
project_roots = project_roots.split(os.pathsep)
|
||||
else:
|
||||
project_roots = []
|
||||
self.set_project_roots(project_roots)
|
||||
|
||||
library_roots = os.getenv("LIBRARY_ROOTS", None)
|
||||
if library_roots is not None:
|
||||
library_roots = library_roots.split(os.pathsep)
|
||||
else:
|
||||
library_roots = self._get_default_library_roots()
|
||||
self.set_library_roots(library_roots)
|
||||
|
||||
# Stepping filters.
|
||||
pydevd_filters = os.getenv("PYDEVD_FILTERS", "")
|
||||
# To filter out it's something as: {'**/not_my_code/**': True}
|
||||
if pydevd_filters:
|
||||
pydev_log.debug("PYDEVD_FILTERS %s", (pydevd_filters,))
|
||||
if pydevd_filters.startswith("{"):
|
||||
# dict(glob_pattern (str) -> exclude(True or False))
|
||||
exclude_filters = []
|
||||
for key, val in json.loads(pydevd_filters).items():
|
||||
exclude_filters.append(ExcludeFilter(key, val, True))
|
||||
self._exclude_filters = exclude_filters
|
||||
else:
|
||||
# A ';' separated list of strings with globs for the
|
||||
# list of excludes.
|
||||
filters = pydevd_filters.split(";")
|
||||
new_filters = []
|
||||
for new_filter in filters:
|
||||
if new_filter.strip():
|
||||
new_filters.append(ExcludeFilter(new_filter.strip(), True, True))
|
||||
self._exclude_filters = new_filters
|
||||
|
||||
@classmethod
|
||||
def _get_default_library_roots(cls):
|
||||
pydev_log.debug("Collecting default library roots.")
|
||||
# Provide sensible defaults if not in env vars.
|
||||
import site
|
||||
|
||||
roots = []
|
||||
|
||||
try:
|
||||
import sysconfig # Python 2.7 onwards only.
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
for path_name in set(("stdlib", "platstdlib", "purelib", "platlib")) & set(sysconfig.get_path_names()):
|
||||
roots.append(sysconfig.get_path(path_name))
|
||||
|
||||
# Make sure we always get at least the standard library location (based on the `os` and
|
||||
# `threading` modules -- it's a bit weird that it may be different on the ci, but it happens).
|
||||
roots.append(os.path.dirname(os.__file__))
|
||||
roots.append(os.path.dirname(threading.__file__))
|
||||
if IS_PYPY:
|
||||
# On PyPy 3.6 (7.3.1) it wrongly says that sysconfig.get_path('stdlib') is
|
||||
# <install>/lib-pypy when the installed version is <install>/lib_pypy.
|
||||
try:
|
||||
import _pypy_wait
|
||||
except ImportError:
|
||||
pydev_log.debug("Unable to import _pypy_wait on PyPy when collecting default library roots.")
|
||||
else:
|
||||
pypy_lib_dir = os.path.dirname(_pypy_wait.__file__)
|
||||
pydev_log.debug("Adding %s to default library roots.", pypy_lib_dir)
|
||||
roots.append(pypy_lib_dir)
|
||||
|
||||
if hasattr(site, "getusersitepackages"):
|
||||
site_paths = site.getusersitepackages()
|
||||
if isinstance(site_paths, (list, tuple)):
|
||||
for site_path in site_paths:
|
||||
roots.append(site_path)
|
||||
else:
|
||||
roots.append(site_paths)
|
||||
|
||||
if hasattr(site, "getsitepackages"):
|
||||
site_paths = site.getsitepackages()
|
||||
if isinstance(site_paths, (list, tuple)):
|
||||
for site_path in site_paths:
|
||||
roots.append(site_path)
|
||||
else:
|
||||
roots.append(site_paths)
|
||||
|
||||
for path in sys.path:
|
||||
if os.path.exists(path) and os.path.basename(path) in ("site-packages", "pip-global"):
|
||||
roots.append(path)
|
||||
|
||||
# On WASM some of the roots may not exist, filter those out.
|
||||
roots = [path for path in roots if path is not None]
|
||||
roots.extend([os.path.realpath(path) for path in roots])
|
||||
|
||||
return sorted(set(roots))
|
||||
|
||||
def _fix_roots(self, roots):
|
||||
roots = _convert_to_str_and_clear_empty(roots)
|
||||
new_roots = []
|
||||
for root in roots:
|
||||
path = self._absolute_normalized_path(root)
|
||||
if pydevd_constants.IS_WINDOWS:
|
||||
new_roots.append(path + "\\")
|
||||
else:
|
||||
new_roots.append(path + "/")
|
||||
return new_roots
|
||||
|
||||
def _absolute_normalized_path(self, filename):
|
||||
"""
|
||||
Provides a version of the filename that's absolute and normalized.
|
||||
"""
|
||||
return normcase(pydevd_file_utils.absolute_path(filename))
|
||||
|
||||
def set_project_roots(self, project_roots):
|
||||
self._project_roots = self._fix_roots(project_roots)
|
||||
pydev_log.debug("IDE_PROJECT_ROOTS %s\n" % project_roots)
|
||||
|
||||
def _get_project_roots(self):
|
||||
return self._project_roots
|
||||
|
||||
def set_library_roots(self, roots):
|
||||
self._library_roots = self._fix_roots(roots)
|
||||
pydev_log.debug("LIBRARY_ROOTS %s\n" % roots)
|
||||
|
||||
def _get_library_roots(self):
|
||||
return self._library_roots
|
||||
|
||||
def in_project_roots(self, received_filename):
|
||||
"""
|
||||
Note: don't call directly. Use PyDb.in_project_scope (there's no caching here and it doesn't
|
||||
handle all possibilities for knowing whether a project is actually in the scope, it
|
||||
just handles the heuristics based on the absolute_normalized_filename without the actual frame).
|
||||
"""
|
||||
DEBUG = False
|
||||
|
||||
if received_filename.startswith(USER_CODE_BASENAMES_STARTING_WITH):
|
||||
if DEBUG:
|
||||
pydev_log.debug(
|
||||
"In in_project_roots - user basenames - starts with %s (%s)", received_filename, USER_CODE_BASENAMES_STARTING_WITH
|
||||
)
|
||||
return True
|
||||
|
||||
if received_filename.startswith(LIBRARY_CODE_BASENAMES_STARTING_WITH):
|
||||
if DEBUG:
|
||||
pydev_log.debug(
|
||||
"Not in in_project_roots - library basenames - starts with %s (%s)",
|
||||
received_filename,
|
||||
LIBRARY_CODE_BASENAMES_STARTING_WITH,
|
||||
)
|
||||
return False
|
||||
|
||||
project_roots = self._get_project_roots() # roots are absolute/normalized.
|
||||
|
||||
absolute_normalized_filename = self._absolute_normalized_path(received_filename)
|
||||
absolute_normalized_filename_as_dir = absolute_normalized_filename + ("\\" if IS_WINDOWS else "/")
|
||||
|
||||
found_in_project = []
|
||||
for root in project_roots:
|
||||
if root and (absolute_normalized_filename.startswith(root) or root == absolute_normalized_filename_as_dir):
|
||||
if DEBUG:
|
||||
pydev_log.debug("In project: %s (%s)", absolute_normalized_filename, root)
|
||||
found_in_project.append(root)
|
||||
|
||||
found_in_library = []
|
||||
library_roots = self._get_library_roots()
|
||||
for root in library_roots:
|
||||
if root and (absolute_normalized_filename.startswith(root) or root == absolute_normalized_filename_as_dir):
|
||||
found_in_library.append(root)
|
||||
if DEBUG:
|
||||
pydev_log.debug("In library: %s (%s)", absolute_normalized_filename, root)
|
||||
else:
|
||||
if DEBUG:
|
||||
pydev_log.debug("Not in library: %s (%s)", absolute_normalized_filename, root)
|
||||
|
||||
if not project_roots:
|
||||
# If we have no project roots configured, consider it being in the project
|
||||
# roots if it's not found in site-packages (because we have defaults for those
|
||||
# and not the other way around).
|
||||
in_project = not found_in_library
|
||||
if DEBUG:
|
||||
pydev_log.debug("Final in project (no project roots): %s (%s)", absolute_normalized_filename, in_project)
|
||||
|
||||
else:
|
||||
in_project = False
|
||||
if found_in_project:
|
||||
if not found_in_library:
|
||||
if DEBUG:
|
||||
pydev_log.debug("Final in project (in_project and not found_in_library): %s (True)", absolute_normalized_filename)
|
||||
in_project = True
|
||||
else:
|
||||
# Found in both, let's see which one has the bigger path matched.
|
||||
if max(len(x) for x in found_in_project) > max(len(x) for x in found_in_library):
|
||||
in_project = True
|
||||
if DEBUG:
|
||||
pydev_log.debug("Final in project (found in both): %s (%s)", absolute_normalized_filename, in_project)
|
||||
|
||||
return in_project
|
||||
|
||||
def use_libraries_filter(self):
|
||||
"""
|
||||
Should we debug only what's inside project folders?
|
||||
"""
|
||||
return self._use_libraries_filter
|
||||
|
||||
def set_use_libraries_filter(self, use):
|
||||
pydev_log.debug("pydevd: Use libraries filter: %s\n" % use)
|
||||
self._use_libraries_filter = use
|
||||
|
||||
def use_exclude_filters(self):
|
||||
# Enabled if we have any filters registered.
|
||||
return len(self._exclude_filters) > 0
|
||||
|
||||
def exclude_by_filter(self, absolute_filename, module_name):
|
||||
"""
|
||||
:return: True if it should be excluded, False if it should be included and None
|
||||
if no rule matched the given file.
|
||||
"""
|
||||
for exclude_filter in self._exclude_filters: # : :type exclude_filter: ExcludeFilter
|
||||
if exclude_filter.is_path:
|
||||
if glob_matches_path(absolute_filename, exclude_filter.name):
|
||||
return exclude_filter.exclude
|
||||
else:
|
||||
# Module filter.
|
||||
if exclude_filter.name == module_name or module_name.startswith(exclude_filter.name + "."):
|
||||
return exclude_filter.exclude
|
||||
return None
|
||||
|
||||
def set_exclude_filters(self, exclude_filters):
|
||||
"""
|
||||
:param list(ExcludeFilter) exclude_filters:
|
||||
"""
|
||||
self._exclude_filters = exclude_filters
|
||||
self.require_module = False
|
||||
for exclude_filter in exclude_filters:
|
||||
if not exclude_filter.is_path:
|
||||
self.require_module = True
|
||||
break
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,445 @@
|
||||
from _pydevd_bundle.pydevd_constants import EXCEPTION_TYPE_USER_UNHANDLED, EXCEPTION_TYPE_UNHANDLED, IS_PY311_OR_GREATER, IS_PY313_0
|
||||
from _pydev_bundle import pydev_log
|
||||
import itertools
|
||||
from typing import Any, Dict
|
||||
from os.path import basename, splitext
|
||||
|
||||
|
||||
class Frame(object):
|
||||
def __init__(self, f_back, f_fileno, f_code, f_locals, f_globals=None, f_trace=None):
|
||||
self.f_back = f_back
|
||||
self.f_lineno = f_fileno
|
||||
self.f_code = f_code
|
||||
self.f_locals = f_locals
|
||||
self.f_globals = f_globals
|
||||
self.f_trace = f_trace
|
||||
|
||||
if self.f_globals is None:
|
||||
self.f_globals = {}
|
||||
|
||||
|
||||
class FCode(object):
|
||||
def __init__(self, name, filename):
|
||||
self.co_name = name
|
||||
self.co_filename = filename
|
||||
self.co_firstlineno = 1
|
||||
self.co_flags = 0
|
||||
|
||||
def co_lines(self):
|
||||
return ()
|
||||
|
||||
|
||||
def add_exception_to_frame(frame, exception_info):
|
||||
frame.f_locals["__exception__"] = exception_info
|
||||
|
||||
|
||||
def remove_exception_from_frame(frame):
|
||||
if IS_PY313_0:
|
||||
# In 3.13.0 frame.f_locals became a proxy for a dict, It does not
|
||||
# have methods to allow items to be removed, only added. So just set the item to None.
|
||||
# Should be fixed in 3.13.1 in PR: https://github.com/python/cpython/pull/125616
|
||||
frame.f_locals["__exception__"] = None
|
||||
else:
|
||||
frame.f_locals.pop("__exception__", None)
|
||||
|
||||
|
||||
FILES_WITH_IMPORT_HOOKS = ["pydev_monkey_qt.py", "pydev_import_hook.py"]
|
||||
|
||||
|
||||
def just_raised(trace):
|
||||
if trace is None:
|
||||
return False
|
||||
|
||||
return trace.tb_next is None
|
||||
|
||||
|
||||
def short_tb(exc_tb):
|
||||
traceback = []
|
||||
while exc_tb:
|
||||
traceback.append("{%r, %r, %r}" % (exc_tb.tb_frame.f_code.co_filename, exc_tb.tb_frame.f_code.co_name, exc_tb.tb_lineno))
|
||||
exc_tb = exc_tb.tb_next
|
||||
return "Traceback: %s\n" % (" -> ".join(traceback))
|
||||
|
||||
|
||||
def short_frame(frame):
|
||||
if frame is None:
|
||||
return "None"
|
||||
|
||||
filename = frame.f_code.co_filename
|
||||
name = splitext(basename(filename))[0]
|
||||
line = hasattr(frame, "f_lineno") and frame.f_lineno or 1
|
||||
return "%s::%s %s" % (name, frame.f_code.co_name, line)
|
||||
|
||||
|
||||
def short_stack(frame):
|
||||
stack = []
|
||||
while frame:
|
||||
stack.append(short_frame(frame))
|
||||
frame = frame.f_back if hasattr(frame, "f_back") else None
|
||||
return "Stack: %s\n" % (" -> ".join(stack))
|
||||
|
||||
|
||||
def ignore_exception_trace(trace):
|
||||
while trace is not None:
|
||||
filename = trace.tb_frame.f_code.co_filename
|
||||
if filename in ("<frozen importlib._bootstrap>", "<frozen importlib._bootstrap_external>"):
|
||||
# Do not stop on inner exceptions in py3 while importing
|
||||
return True
|
||||
|
||||
# ImportError should appear in a user's code, not inside debugger
|
||||
for file in FILES_WITH_IMPORT_HOOKS:
|
||||
if filename.endswith(file):
|
||||
return True
|
||||
|
||||
trace = trace.tb_next
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def cached_call(obj, func, *args):
|
||||
cached_name = "_cached_" + func.__name__
|
||||
if not hasattr(obj, cached_name):
|
||||
setattr(obj, cached_name, func(*args))
|
||||
|
||||
return getattr(obj, cached_name)
|
||||
|
||||
|
||||
class _LineColInfo:
|
||||
def __init__(self, lineno, end_lineno, colno, end_colno):
|
||||
self.lineno = lineno
|
||||
self.end_lineno = end_lineno
|
||||
self.colno = colno
|
||||
self.end_colno = end_colno
|
||||
|
||||
def map_columns_to_line(self, original_line: str):
|
||||
"""
|
||||
The columns internally are actually based on bytes.
|
||||
|
||||
Also, the position isn't always the ideal one as the start may not be
|
||||
what we want (if the user has many subscripts in the line the start
|
||||
will always be the same and only the end would change).
|
||||
For more details see:
|
||||
https://github.com/microsoft/debugpy/issues/1099#issuecomment-1303403995
|
||||
|
||||
So, this function maps the start/end columns to the position to be shown in the editor.
|
||||
"""
|
||||
colno = _utf8_byte_offset_to_character_offset(original_line, self.colno)
|
||||
end_colno = _utf8_byte_offset_to_character_offset(original_line, self.end_colno)
|
||||
|
||||
if self.lineno == self.end_lineno:
|
||||
try:
|
||||
ret = _extract_caret_anchors_in_bytes_from_line_segment(original_line[colno:end_colno])
|
||||
if ret is not None:
|
||||
return (
|
||||
_utf8_byte_offset_to_character_offset(original_line, ret[0] + self.colno),
|
||||
_utf8_byte_offset_to_character_offset(original_line, ret[1] + self.colno),
|
||||
)
|
||||
except Exception:
|
||||
pass # Suppress exception
|
||||
|
||||
return colno, end_colno
|
||||
|
||||
|
||||
_utf8_with_2_bytes = 0x80
|
||||
_utf8_with_3_bytes = 0x800
|
||||
_utf8_with_4_bytes = 0x10000
|
||||
|
||||
|
||||
def _utf8_byte_offset_to_character_offset(s: str, offset: int):
|
||||
byte_offset = 0
|
||||
char_offset = 0
|
||||
offset = offset or 0
|
||||
|
||||
for char_offset, character in enumerate(s):
|
||||
byte_offset += 1
|
||||
|
||||
codepoint = ord(character)
|
||||
|
||||
if codepoint >= _utf8_with_4_bytes:
|
||||
byte_offset += 3
|
||||
|
||||
elif codepoint >= _utf8_with_3_bytes:
|
||||
byte_offset += 2
|
||||
|
||||
elif codepoint >= _utf8_with_2_bytes:
|
||||
byte_offset += 1
|
||||
|
||||
if byte_offset > offset:
|
||||
break
|
||||
else:
|
||||
char_offset += 1
|
||||
|
||||
return char_offset
|
||||
|
||||
|
||||
# Based on traceback._extract_caret_anchors_in_bytes_from_line_segment (Python 3.11.0)
|
||||
def _extract_caret_anchors_in_bytes_from_line_segment(segment: str):
|
||||
import ast
|
||||
|
||||
try:
|
||||
segment = segment.encode("utf-8")
|
||||
except UnicodeEncodeError:
|
||||
return None
|
||||
try:
|
||||
tree = ast.parse(segment)
|
||||
except SyntaxError:
|
||||
return None
|
||||
|
||||
if len(tree.body) != 1:
|
||||
return None
|
||||
|
||||
statement = tree.body[0]
|
||||
if isinstance(statement, ast.Expr):
|
||||
expr = statement.value
|
||||
if isinstance(expr, ast.BinOp):
|
||||
operator_str = segment[expr.left.end_col_offset : expr.right.col_offset]
|
||||
operator_offset = len(operator_str) - len(operator_str.lstrip())
|
||||
|
||||
left_anchor = expr.left.end_col_offset + operator_offset
|
||||
right_anchor = left_anchor + 1
|
||||
if operator_offset + 1 < len(operator_str) and not operator_str[operator_offset + 1] == ord(b" "):
|
||||
right_anchor += 1
|
||||
return left_anchor, right_anchor
|
||||
if isinstance(expr, ast.Subscript):
|
||||
return expr.value.end_col_offset, expr.slice.end_col_offset + 1
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class FramesList(object):
|
||||
def __init__(self):
|
||||
self._frames = []
|
||||
|
||||
# If available, the line number for the frame will be gotten from this dict,
|
||||
# otherwise frame.f_lineno will be used (needed for unhandled exceptions as
|
||||
# the place where we report may be different from the place where it's raised).
|
||||
self.frame_id_to_lineno = {}
|
||||
self.frame_id_to_line_col_info: Dict[Any, _LineColInfo] = {}
|
||||
|
||||
self.exc_type = None
|
||||
self.exc_desc = None
|
||||
self.trace_obj = None
|
||||
|
||||
# This may be set to set the current frame (for the case where we have
|
||||
# an unhandled exception where we want to show the root bu we have a different
|
||||
# executing frame).
|
||||
self.current_frame = None
|
||||
|
||||
# This is to know whether an exception was extracted from a __cause__ or __context__.
|
||||
self.exc_context_msg = ""
|
||||
|
||||
self.chained_frames_list = None
|
||||
|
||||
def append(self, frame):
|
||||
self._frames.append(frame)
|
||||
|
||||
def last_frame(self):
|
||||
return self._frames[-1]
|
||||
|
||||
def __len__(self):
|
||||
return len(self._frames)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._frames)
|
||||
|
||||
def __repr__(self):
|
||||
lst = ["FramesList("]
|
||||
|
||||
lst.append("\n exc_type: ")
|
||||
lst.append(str(self.exc_type))
|
||||
|
||||
lst.append("\n exc_desc: ")
|
||||
lst.append(str(self.exc_desc))
|
||||
|
||||
lst.append("\n trace_obj: ")
|
||||
lst.append(str(self.trace_obj))
|
||||
|
||||
lst.append("\n current_frame: ")
|
||||
lst.append(str(self.current_frame))
|
||||
|
||||
for frame in self._frames:
|
||||
lst.append("\n ")
|
||||
lst.append(repr(frame))
|
||||
lst.append(",")
|
||||
|
||||
if self.chained_frames_list is not None:
|
||||
lst.append("\n--- Chained ---\n")
|
||||
lst.append(str(self.chained_frames_list))
|
||||
|
||||
lst.append("\n)")
|
||||
|
||||
return "".join(lst)
|
||||
|
||||
__str__ = __repr__
|
||||
|
||||
|
||||
class _DummyFrameWrapper(object):
|
||||
def __init__(self, frame, f_lineno, f_back):
|
||||
self._base_frame = frame
|
||||
self.f_lineno = f_lineno
|
||||
self.f_back = f_back
|
||||
self.f_trace = None
|
||||
original_code = frame.f_code
|
||||
name = original_code.co_name
|
||||
self.f_code = FCode(name, original_code.co_filename)
|
||||
|
||||
@property
|
||||
def f_locals(self):
|
||||
return self._base_frame.f_locals
|
||||
|
||||
@property
|
||||
def f_globals(self):
|
||||
return self._base_frame.f_globals
|
||||
|
||||
def __str__(self):
|
||||
return "<_DummyFrameWrapper, file '%s', line %s, %s" % (self.f_code.co_filename, self.f_lineno, self.f_code.co_name)
|
||||
|
||||
__repr__ = __str__
|
||||
|
||||
|
||||
_cause_message = "\nThe above exception was the direct cause " "of the following exception:\n\n"
|
||||
|
||||
_context_message = "\nDuring handling of the above exception, " "another exception occurred:\n\n"
|
||||
|
||||
|
||||
def create_frames_list_from_exception_cause(trace_obj, frame, exc_type, exc_desc, memo):
|
||||
lst = []
|
||||
msg = "<Unknown context>"
|
||||
try:
|
||||
exc_cause = getattr(exc_desc, "__cause__", None)
|
||||
msg = _cause_message
|
||||
except Exception:
|
||||
exc_cause = None
|
||||
|
||||
if exc_cause is None:
|
||||
try:
|
||||
exc_cause = getattr(exc_desc, "__context__", None)
|
||||
msg = _context_message
|
||||
except Exception:
|
||||
exc_cause = None
|
||||
|
||||
if exc_cause is None or id(exc_cause) in memo:
|
||||
return None
|
||||
|
||||
# The traceback module does this, so, let's play safe here too...
|
||||
memo.add(id(exc_cause))
|
||||
|
||||
tb = exc_cause.__traceback__
|
||||
frames_list = FramesList()
|
||||
frames_list.exc_type = type(exc_cause)
|
||||
frames_list.exc_desc = exc_cause
|
||||
frames_list.trace_obj = tb
|
||||
frames_list.exc_context_msg = msg
|
||||
|
||||
while tb is not None:
|
||||
# Note: we don't use the actual tb.tb_frame because if the cause of the exception
|
||||
# uses the same frame object, the id(frame) would be the same and the frame_id_to_lineno
|
||||
# would be wrong as the same frame needs to appear with 2 different lines.
|
||||
lst.append((_DummyFrameWrapper(tb.tb_frame, tb.tb_lineno, None), tb.tb_lineno, _get_line_col_info_from_tb(tb)))
|
||||
tb = tb.tb_next
|
||||
|
||||
for tb_frame, tb_lineno, line_col_info in lst:
|
||||
frames_list.append(tb_frame)
|
||||
frames_list.frame_id_to_lineno[id(tb_frame)] = tb_lineno
|
||||
frames_list.frame_id_to_line_col_info[id(tb_frame)] = line_col_info
|
||||
|
||||
return frames_list
|
||||
|
||||
|
||||
if IS_PY311_OR_GREATER:
|
||||
|
||||
def _get_code_position(code, instruction_index):
|
||||
if instruction_index < 0:
|
||||
return (None, None, None, None)
|
||||
positions_gen = code.co_positions()
|
||||
# Note: some or all of the tuple elements can be None...
|
||||
return next(itertools.islice(positions_gen, instruction_index // 2, None))
|
||||
|
||||
def _get_line_col_info_from_tb(tb):
|
||||
positions = _get_code_position(tb.tb_frame.f_code, tb.tb_lasti)
|
||||
if positions[0] is None:
|
||||
return _LineColInfo(tb.tb_lineno, *positions[1:])
|
||||
else:
|
||||
return _LineColInfo(*positions)
|
||||
|
||||
else:
|
||||
|
||||
def _get_line_col_info_from_tb(tb):
|
||||
# Not available on older versions of Python.
|
||||
return None
|
||||
|
||||
|
||||
def create_frames_list_from_traceback(trace_obj, frame, exc_type, exc_desc, exception_type=None):
|
||||
"""
|
||||
:param trace_obj:
|
||||
This is the traceback from which the list should be created.
|
||||
|
||||
:param frame:
|
||||
This is the first frame to be considered (i.e.: topmost frame). If None is passed, all
|
||||
the frames from the traceback are shown (so, None should be passed for unhandled exceptions).
|
||||
|
||||
:param exception_type:
|
||||
If this is an unhandled exception or user unhandled exception, we'll not trim the stack to create from the passed
|
||||
frame, rather, we'll just mark the frame in the frames list.
|
||||
"""
|
||||
lst = []
|
||||
|
||||
tb = trace_obj
|
||||
if tb is not None and tb.tb_frame is not None:
|
||||
f = tb.tb_frame.f_back
|
||||
while f is not None:
|
||||
lst.insert(0, (f, f.f_lineno, None))
|
||||
f = f.f_back
|
||||
|
||||
while tb is not None:
|
||||
lst.append((tb.tb_frame, tb.tb_lineno, _get_line_col_info_from_tb(tb)))
|
||||
tb = tb.tb_next
|
||||
|
||||
frames_list = None
|
||||
|
||||
for tb_frame, tb_lineno, line_col_info in reversed(lst):
|
||||
if frames_list is None and ((frame is tb_frame) or (frame is None) or (exception_type == EXCEPTION_TYPE_USER_UNHANDLED)):
|
||||
frames_list = FramesList()
|
||||
|
||||
if frames_list is not None:
|
||||
frames_list.append(tb_frame)
|
||||
frames_list.frame_id_to_lineno[id(tb_frame)] = tb_lineno
|
||||
frames_list.frame_id_to_line_col_info[id(tb_frame)] = line_col_info
|
||||
|
||||
if frames_list is None and frame is not None:
|
||||
# Fallback (shouldn't happen in practice).
|
||||
pydev_log.info("create_frames_list_from_traceback did not find topmost frame in list.")
|
||||
frames_list = create_frames_list_from_frame(frame)
|
||||
|
||||
frames_list.exc_type = exc_type
|
||||
frames_list.exc_desc = exc_desc
|
||||
frames_list.trace_obj = trace_obj
|
||||
|
||||
if exception_type == EXCEPTION_TYPE_USER_UNHANDLED:
|
||||
frames_list.current_frame = frame
|
||||
elif exception_type == EXCEPTION_TYPE_UNHANDLED:
|
||||
if len(frames_list) > 0:
|
||||
frames_list.current_frame = frames_list.last_frame()
|
||||
|
||||
curr = frames_list
|
||||
memo = set()
|
||||
memo.add(id(exc_desc))
|
||||
|
||||
while True:
|
||||
chained = create_frames_list_from_exception_cause(None, None, None, curr.exc_desc, memo)
|
||||
if chained is None:
|
||||
break
|
||||
else:
|
||||
curr.chained_frames_list = chained
|
||||
curr = chained
|
||||
|
||||
return frames_list
|
||||
|
||||
|
||||
def create_frames_list_from_frame(frame):
|
||||
lst = FramesList()
|
||||
while frame is not None:
|
||||
lst.append(frame)
|
||||
frame = frame.f_back
|
||||
|
||||
return lst
|
||||
@@ -0,0 +1,91 @@
|
||||
import pydevd_tracing
|
||||
import greenlet
|
||||
import gevent
|
||||
from _pydev_bundle._pydev_saved_modules import threading
|
||||
from _pydevd_bundle.pydevd_custom_frames import add_custom_frame, update_custom_frame, remove_custom_frame
|
||||
from _pydevd_bundle.pydevd_constants import GEVENT_SHOW_PAUSED_GREENLETS, get_global_debugger, thread_get_ident
|
||||
from _pydev_bundle import pydev_log
|
||||
from pydevd_file_utils import basename
|
||||
|
||||
_saved_greenlets_to_custom_frame_thread_id = {}
|
||||
|
||||
if GEVENT_SHOW_PAUSED_GREENLETS:
|
||||
|
||||
def _get_paused_name(py_db, g):
|
||||
frame = g.gr_frame
|
||||
use_frame = frame
|
||||
|
||||
# i.e.: Show in the description of the greenlet the last user-code found.
|
||||
while use_frame is not None:
|
||||
if py_db.apply_files_filter(use_frame, use_frame.f_code.co_filename, True):
|
||||
frame = use_frame
|
||||
use_frame = use_frame.f_back
|
||||
else:
|
||||
break
|
||||
|
||||
if use_frame is None:
|
||||
use_frame = frame
|
||||
|
||||
return "%s: %s - %s" % (type(g).__name__, use_frame.f_code.co_name, basename(use_frame.f_code.co_filename))
|
||||
|
||||
def greenlet_events(event, args):
|
||||
if event in ("switch", "throw"):
|
||||
py_db = get_global_debugger()
|
||||
origin, target = args
|
||||
|
||||
if not origin.dead and origin.gr_frame is not None:
|
||||
frame_custom_thread_id = _saved_greenlets_to_custom_frame_thread_id.get(origin)
|
||||
if frame_custom_thread_id is None:
|
||||
_saved_greenlets_to_custom_frame_thread_id[origin] = add_custom_frame(
|
||||
origin.gr_frame, _get_paused_name(py_db, origin), thread_get_ident()
|
||||
)
|
||||
else:
|
||||
update_custom_frame(frame_custom_thread_id, origin.gr_frame, _get_paused_name(py_db, origin), thread_get_ident())
|
||||
else:
|
||||
frame_custom_thread_id = _saved_greenlets_to_custom_frame_thread_id.pop(origin, None)
|
||||
if frame_custom_thread_id is not None:
|
||||
remove_custom_frame(frame_custom_thread_id)
|
||||
|
||||
# This one will be resumed, so, remove custom frame from it.
|
||||
frame_custom_thread_id = _saved_greenlets_to_custom_frame_thread_id.pop(target, None)
|
||||
if frame_custom_thread_id is not None:
|
||||
remove_custom_frame(frame_custom_thread_id)
|
||||
|
||||
# The tracing needs to be reapplied for each greenlet as gevent
|
||||
# clears the tracing set through sys.settrace for each greenlet.
|
||||
pydevd_tracing.reapply_settrace()
|
||||
|
||||
else:
|
||||
# i.e.: no logic related to showing paused greenlets is needed.
|
||||
def greenlet_events(event, args):
|
||||
pydevd_tracing.reapply_settrace()
|
||||
|
||||
|
||||
def enable_gevent_integration():
|
||||
# References:
|
||||
# https://greenlet.readthedocs.io/en/latest/api.html#greenlet.settrace
|
||||
# https://greenlet.readthedocs.io/en/latest/tracing.html
|
||||
|
||||
# Note: gevent.version_info is WRONG (gevent.__version__ must be used).
|
||||
try:
|
||||
if tuple(int(x) for x in gevent.__version__.split(".")[:2]) <= (20, 0):
|
||||
if not GEVENT_SHOW_PAUSED_GREENLETS:
|
||||
return
|
||||
|
||||
if not hasattr(greenlet, "settrace"):
|
||||
# In older versions it was optional.
|
||||
# We still try to use if available though.
|
||||
pydev_log.debug("greenlet.settrace not available. GEVENT_SHOW_PAUSED_GREENLETS will have no effect.")
|
||||
return
|
||||
try:
|
||||
greenlet.settrace(greenlet_events)
|
||||
except:
|
||||
pydev_log.exception("Error with greenlet.settrace.")
|
||||
except:
|
||||
pydev_log.exception("Error setting up gevent %s.", gevent.__version__)
|
||||
|
||||
|
||||
def log_gevent_debug_info():
|
||||
pydev_log.debug("Greenlet version: %s", greenlet.__version__)
|
||||
pydev_log.debug("Gevent version: %s", gevent.__version__)
|
||||
pydev_log.debug("Gevent install location: %s", gevent.__file__)
|
||||
@@ -0,0 +1,70 @@
|
||||
# Note: code gotten from _pydev_imports_tipper.
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
def _imp(name, log=None):
|
||||
try:
|
||||
return __import__(name)
|
||||
except:
|
||||
if "." in name:
|
||||
sub = name[0 : name.rfind(".")]
|
||||
|
||||
if log is not None:
|
||||
log.add_content("Unable to import", name, "trying with", sub)
|
||||
log.add_exception()
|
||||
|
||||
return _imp(sub, log)
|
||||
else:
|
||||
s = "Unable to import module: %s - sys.path: %s" % (str(name), sys.path)
|
||||
if log is not None:
|
||||
log.add_content(s)
|
||||
log.add_exception()
|
||||
|
||||
raise ImportError(s)
|
||||
|
||||
|
||||
IS_IPY = False
|
||||
if sys.platform == "cli":
|
||||
IS_IPY = True
|
||||
_old_imp = _imp
|
||||
|
||||
def _imp(name, log=None):
|
||||
# We must add a reference in clr for .Net
|
||||
import clr # @UnresolvedImport
|
||||
|
||||
initial_name = name
|
||||
while "." in name:
|
||||
try:
|
||||
clr.AddReference(name)
|
||||
break # If it worked, that's OK.
|
||||
except:
|
||||
name = name[0 : name.rfind(".")]
|
||||
else:
|
||||
try:
|
||||
clr.AddReference(name)
|
||||
except:
|
||||
pass # That's OK (not dot net module).
|
||||
|
||||
return _old_imp(initial_name, log)
|
||||
|
||||
|
||||
def import_name(name, log=None):
|
||||
mod = _imp(name, log)
|
||||
|
||||
components = name.split(".")
|
||||
|
||||
old_comp = None
|
||||
for comp in components[1:]:
|
||||
try:
|
||||
# this happens in the following case:
|
||||
# we have mx.DateTime.mxDateTime.mxDateTime.pyd
|
||||
# but after importing it, mx.DateTime.mxDateTime shadows access to mxDateTime.pyd
|
||||
mod = getattr(mod, comp)
|
||||
except AttributeError:
|
||||
if old_comp != comp:
|
||||
raise
|
||||
|
||||
old_comp = comp
|
||||
|
||||
return mod
|
||||
@@ -0,0 +1,256 @@
|
||||
from _pydevd_bundle.pydevd_constants import ForkSafeLock, get_global_debugger
|
||||
import os
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
|
||||
|
||||
class IORedirector:
|
||||
"""
|
||||
This class works to wrap a stream (stdout/stderr) with an additional redirect.
|
||||
"""
|
||||
|
||||
def __init__(self, original, new_redirect, wrap_buffer=False):
|
||||
"""
|
||||
:param stream original:
|
||||
The stream to be wrapped (usually stdout/stderr, but could be None).
|
||||
|
||||
:param stream new_redirect:
|
||||
Usually IOBuf (below).
|
||||
|
||||
:param bool wrap_buffer:
|
||||
Whether to create a buffer attribute (needed to mimick python 3 s
|
||||
tdout/stderr which has a buffer to write binary data).
|
||||
"""
|
||||
self._lock = ForkSafeLock(rlock=True)
|
||||
self._writing = False
|
||||
self._redirect_to = (original, new_redirect)
|
||||
if wrap_buffer and hasattr(original, "buffer"):
|
||||
self.buffer = IORedirector(original.buffer, new_redirect.buffer, False)
|
||||
|
||||
def write(self, s):
|
||||
# Note that writing to the original stream may fail for some reasons
|
||||
# (such as trying to write something that's not a string or having it closed).
|
||||
with self._lock:
|
||||
if self._writing:
|
||||
return
|
||||
self._writing = True
|
||||
try:
|
||||
for r in self._redirect_to:
|
||||
if hasattr(r, "write"):
|
||||
r.write(s)
|
||||
finally:
|
||||
self._writing = False
|
||||
|
||||
def isatty(self):
|
||||
for r in self._redirect_to:
|
||||
if hasattr(r, "isatty"):
|
||||
return r.isatty()
|
||||
return False
|
||||
|
||||
def flush(self):
|
||||
for r in self._redirect_to:
|
||||
if hasattr(r, "flush"):
|
||||
r.flush()
|
||||
|
||||
def __getattr__(self, name):
|
||||
for r in self._redirect_to:
|
||||
if hasattr(r, name):
|
||||
return getattr(r, name)
|
||||
raise AttributeError(name)
|
||||
|
||||
|
||||
class RedirectToPyDBIoMessages(object):
|
||||
def __init__(self, out_ctx, wrap_stream, wrap_buffer, on_write=None):
|
||||
"""
|
||||
:param out_ctx:
|
||||
1=stdout and 2=stderr
|
||||
|
||||
:param wrap_stream:
|
||||
Either sys.stdout or sys.stderr.
|
||||
|
||||
:param bool wrap_buffer:
|
||||
If True the buffer attribute (which wraps writing bytes) should be
|
||||
wrapped.
|
||||
|
||||
:param callable(str) on_write:
|
||||
May be a custom callable to be called when to write something.
|
||||
If not passed the default implementation will create an io message
|
||||
and send it through the debugger.
|
||||
"""
|
||||
encoding = getattr(wrap_stream, "encoding", None)
|
||||
if not encoding:
|
||||
encoding = os.environ.get("PYTHONIOENCODING", "utf-8")
|
||||
self.encoding = encoding
|
||||
self._out_ctx = out_ctx
|
||||
if wrap_buffer:
|
||||
self.buffer = RedirectToPyDBIoMessages(out_ctx, wrap_stream, wrap_buffer=False, on_write=on_write)
|
||||
self._on_write = on_write
|
||||
|
||||
def get_pydb(self):
|
||||
# Note: separate method for mocking on tests.
|
||||
return get_global_debugger()
|
||||
|
||||
def flush(self):
|
||||
pass # no-op here
|
||||
|
||||
def write(self, s):
|
||||
if self._on_write is not None:
|
||||
self._on_write(s)
|
||||
return
|
||||
|
||||
if s:
|
||||
# Need s in str
|
||||
if isinstance(s, bytes):
|
||||
s = s.decode(self.encoding, errors="replace")
|
||||
|
||||
py_db = self.get_pydb()
|
||||
if py_db is not None:
|
||||
# Note that the actual message contents will be a xml with utf-8, although
|
||||
# the entry is str on py3 and bytes on py2.
|
||||
cmd = py_db.cmd_factory.make_io_message(s, self._out_ctx)
|
||||
if py_db.writer is not None:
|
||||
py_db.writer.add_command(cmd)
|
||||
|
||||
|
||||
class IOBuf:
|
||||
"""This class works as a replacement for stdio and stderr.
|
||||
It is a buffer and when its contents are requested, it will erase what
|
||||
it has so far so that the next return will not return the same contents again.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.buflist = []
|
||||
import os
|
||||
|
||||
self.encoding = os.environ.get("PYTHONIOENCODING", "utf-8")
|
||||
|
||||
def getvalue(self):
|
||||
b = self.buflist
|
||||
self.buflist = [] # clear it
|
||||
return "".join(b) # bytes on py2, str on py3.
|
||||
|
||||
def write(self, s):
|
||||
if isinstance(s, bytes):
|
||||
s = s.decode(self.encoding, errors="replace")
|
||||
self.buflist.append(s)
|
||||
|
||||
def isatty(self):
|
||||
return False
|
||||
|
||||
def flush(self):
|
||||
pass
|
||||
|
||||
def empty(self):
|
||||
return len(self.buflist) == 0
|
||||
|
||||
|
||||
class _RedirectInfo(object):
|
||||
def __init__(self, original, redirect_to):
|
||||
self.original = original
|
||||
self.redirect_to = redirect_to
|
||||
|
||||
|
||||
class _RedirectionsHolder:
|
||||
_lock = ForkSafeLock(rlock=True)
|
||||
_stack_stdout = []
|
||||
_stack_stderr = []
|
||||
|
||||
_pydevd_stdout_redirect_ = None
|
||||
_pydevd_stderr_redirect_ = None
|
||||
|
||||
|
||||
def start_redirect(keep_original_redirection=False, std="stdout", redirect_to=None):
|
||||
"""
|
||||
@param std: 'stdout', 'stderr', or 'both'
|
||||
"""
|
||||
with _RedirectionsHolder._lock:
|
||||
if redirect_to is None:
|
||||
redirect_to = IOBuf()
|
||||
|
||||
if std == "both":
|
||||
config_stds = ["stdout", "stderr"]
|
||||
else:
|
||||
config_stds = [std]
|
||||
|
||||
for std in config_stds:
|
||||
original = getattr(sys, std)
|
||||
stack = getattr(_RedirectionsHolder, "_stack_%s" % std)
|
||||
|
||||
if keep_original_redirection:
|
||||
wrap_buffer = True if hasattr(redirect_to, "buffer") else False
|
||||
new_std_instance = IORedirector(getattr(sys, std), redirect_to, wrap_buffer=wrap_buffer)
|
||||
setattr(sys, std, new_std_instance)
|
||||
else:
|
||||
new_std_instance = redirect_to
|
||||
setattr(sys, std, redirect_to)
|
||||
|
||||
stack.append(_RedirectInfo(original, new_std_instance))
|
||||
|
||||
return redirect_to
|
||||
|
||||
|
||||
def end_redirect(std="stdout"):
|
||||
with _RedirectionsHolder._lock:
|
||||
if std == "both":
|
||||
config_stds = ["stdout", "stderr"]
|
||||
else:
|
||||
config_stds = [std]
|
||||
for std in config_stds:
|
||||
stack = getattr(_RedirectionsHolder, "_stack_%s" % std)
|
||||
redirect_info = stack.pop()
|
||||
setattr(sys, std, redirect_info.original)
|
||||
|
||||
|
||||
def redirect_stream_to_pydb_io_messages(std):
|
||||
"""
|
||||
:param std:
|
||||
'stdout' or 'stderr'
|
||||
"""
|
||||
with _RedirectionsHolder._lock:
|
||||
redirect_to_name = "_pydevd_%s_redirect_" % (std,)
|
||||
if getattr(_RedirectionsHolder, redirect_to_name) is None:
|
||||
wrap_buffer = True
|
||||
original = getattr(sys, std)
|
||||
|
||||
redirect_to = RedirectToPyDBIoMessages(1 if std == "stdout" else 2, original, wrap_buffer)
|
||||
start_redirect(keep_original_redirection=True, std=std, redirect_to=redirect_to)
|
||||
|
||||
stack = getattr(_RedirectionsHolder, "_stack_%s" % std)
|
||||
setattr(_RedirectionsHolder, redirect_to_name, stack[-1])
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def stop_redirect_stream_to_pydb_io_messages(std):
|
||||
"""
|
||||
:param std:
|
||||
'stdout' or 'stderr'
|
||||
"""
|
||||
with _RedirectionsHolder._lock:
|
||||
redirect_to_name = "_pydevd_%s_redirect_" % (std,)
|
||||
redirect_info = getattr(_RedirectionsHolder, redirect_to_name)
|
||||
if redirect_info is not None: # :type redirect_info: _RedirectInfo
|
||||
setattr(_RedirectionsHolder, redirect_to_name, None)
|
||||
|
||||
stack = getattr(_RedirectionsHolder, "_stack_%s" % std)
|
||||
prev_info = stack.pop()
|
||||
|
||||
curr = getattr(sys, std)
|
||||
if curr is redirect_info.redirect_to:
|
||||
setattr(sys, std, redirect_info.original)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def redirect_stream_to_pydb_io_messages_context():
|
||||
with _RedirectionsHolder._lock:
|
||||
redirecting = []
|
||||
for std in ("stdout", "stderr"):
|
||||
if redirect_stream_to_pydb_io_messages(std):
|
||||
redirecting.append(std)
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
for std in redirecting:
|
||||
stop_redirect_stream_to_pydb_io_messages(std)
|
||||
@@ -0,0 +1,200 @@
|
||||
import json
|
||||
import urllib.parse as urllib_parse
|
||||
|
||||
|
||||
class DebugOptions(object):
|
||||
__slots__ = [
|
||||
"just_my_code",
|
||||
"redirect_output",
|
||||
"show_return_value",
|
||||
"break_system_exit_zero",
|
||||
"django_debug",
|
||||
"flask_debug",
|
||||
"stop_on_entry",
|
||||
"max_exception_stack_frames",
|
||||
"gui_event_loop",
|
||||
"client_os",
|
||||
]
|
||||
|
||||
def __init__(self):
|
||||
self.just_my_code = True
|
||||
self.redirect_output = False
|
||||
self.show_return_value = False
|
||||
self.break_system_exit_zero = False
|
||||
self.django_debug = False
|
||||
self.flask_debug = False
|
||||
self.stop_on_entry = False
|
||||
self.max_exception_stack_frames = 0
|
||||
self.gui_event_loop = "matplotlib"
|
||||
self.client_os = None
|
||||
|
||||
def to_json(self):
|
||||
dct = {}
|
||||
for s in self.__slots__:
|
||||
dct[s] = getattr(self, s)
|
||||
return json.dumps(dct)
|
||||
|
||||
def update_fom_debug_options(self, debug_options):
|
||||
if "DEBUG_STDLIB" in debug_options:
|
||||
self.just_my_code = not debug_options.get("DEBUG_STDLIB")
|
||||
|
||||
if "REDIRECT_OUTPUT" in debug_options:
|
||||
self.redirect_output = debug_options.get("REDIRECT_OUTPUT")
|
||||
|
||||
if "SHOW_RETURN_VALUE" in debug_options:
|
||||
self.show_return_value = debug_options.get("SHOW_RETURN_VALUE")
|
||||
|
||||
if "BREAK_SYSTEMEXIT_ZERO" in debug_options:
|
||||
self.break_system_exit_zero = debug_options.get("BREAK_SYSTEMEXIT_ZERO")
|
||||
|
||||
if "DJANGO_DEBUG" in debug_options:
|
||||
self.django_debug = debug_options.get("DJANGO_DEBUG")
|
||||
|
||||
if "FLASK_DEBUG" in debug_options:
|
||||
self.flask_debug = debug_options.get("FLASK_DEBUG")
|
||||
|
||||
if "STOP_ON_ENTRY" in debug_options:
|
||||
self.stop_on_entry = debug_options.get("STOP_ON_ENTRY")
|
||||
|
||||
if "CLIENT_OS_TYPE" in debug_options:
|
||||
self.client_os = debug_options.get("CLIENT_OS_TYPE")
|
||||
|
||||
# Note: _max_exception_stack_frames cannot be set by debug options.
|
||||
|
||||
def update_from_args(self, args):
|
||||
if "justMyCode" in args:
|
||||
self.just_my_code = bool_parser(args["justMyCode"])
|
||||
else:
|
||||
# i.e.: if justMyCode is provided, don't check the deprecated value
|
||||
if "debugStdLib" in args:
|
||||
self.just_my_code = not bool_parser(args["debugStdLib"])
|
||||
|
||||
if "redirectOutput" in args:
|
||||
self.redirect_output = bool_parser(args["redirectOutput"])
|
||||
|
||||
if "showReturnValue" in args:
|
||||
self.show_return_value = bool_parser(args["showReturnValue"])
|
||||
|
||||
if "breakOnSystemExitZero" in args:
|
||||
self.break_system_exit_zero = bool_parser(args["breakOnSystemExitZero"])
|
||||
|
||||
if "django" in args:
|
||||
self.django_debug = bool_parser(args["django"])
|
||||
|
||||
if "flask" in args:
|
||||
self.flask_debug = bool_parser(args["flask"])
|
||||
|
||||
if "jinja" in args:
|
||||
self.flask_debug = bool_parser(args["jinja"])
|
||||
|
||||
if "stopOnEntry" in args:
|
||||
self.stop_on_entry = bool_parser(args["stopOnEntry"])
|
||||
|
||||
self.max_exception_stack_frames = int_parser(args.get("maxExceptionStackFrames", 0))
|
||||
|
||||
if "guiEventLoop" in args:
|
||||
self.gui_event_loop = str(args["guiEventLoop"])
|
||||
|
||||
if "clientOS" in args:
|
||||
self.client_os = str(args["clientOS"]).upper()
|
||||
|
||||
|
||||
def int_parser(s, default_value=0):
|
||||
try:
|
||||
return int(s)
|
||||
except Exception:
|
||||
return default_value
|
||||
|
||||
|
||||
def bool_parser(s):
|
||||
return s in ("True", "true", "1", True, 1)
|
||||
|
||||
|
||||
def unquote(s):
|
||||
return None if s is None else urllib_parse.unquote(s)
|
||||
|
||||
|
||||
DEBUG_OPTIONS_PARSER = {
|
||||
"WAIT_ON_ABNORMAL_EXIT": bool_parser,
|
||||
"WAIT_ON_NORMAL_EXIT": bool_parser,
|
||||
"BREAK_SYSTEMEXIT_ZERO": bool_parser,
|
||||
"REDIRECT_OUTPUT": bool_parser,
|
||||
"DJANGO_DEBUG": bool_parser,
|
||||
"FLASK_DEBUG": bool_parser,
|
||||
"FIX_FILE_PATH_CASE": bool_parser,
|
||||
"CLIENT_OS_TYPE": unquote,
|
||||
"DEBUG_STDLIB": bool_parser,
|
||||
"STOP_ON_ENTRY": bool_parser,
|
||||
"SHOW_RETURN_VALUE": bool_parser,
|
||||
"MULTIPROCESS": bool_parser,
|
||||
}
|
||||
|
||||
DEBUG_OPTIONS_BY_FLAG = {
|
||||
"RedirectOutput": "REDIRECT_OUTPUT=True",
|
||||
"WaitOnNormalExit": "WAIT_ON_NORMAL_EXIT=True",
|
||||
"WaitOnAbnormalExit": "WAIT_ON_ABNORMAL_EXIT=True",
|
||||
"BreakOnSystemExitZero": "BREAK_SYSTEMEXIT_ZERO=True",
|
||||
"Django": "DJANGO_DEBUG=True",
|
||||
"Flask": "FLASK_DEBUG=True",
|
||||
"Jinja": "FLASK_DEBUG=True",
|
||||
"FixFilePathCase": "FIX_FILE_PATH_CASE=True",
|
||||
"DebugStdLib": "DEBUG_STDLIB=True",
|
||||
"WindowsClient": "CLIENT_OS_TYPE=WINDOWS",
|
||||
"UnixClient": "CLIENT_OS_TYPE=UNIX",
|
||||
"StopOnEntry": "STOP_ON_ENTRY=True",
|
||||
"ShowReturnValue": "SHOW_RETURN_VALUE=True",
|
||||
"Multiprocess": "MULTIPROCESS=True",
|
||||
}
|
||||
|
||||
|
||||
def _build_debug_options(flags):
|
||||
"""Build string representation of debug options from the launch config."""
|
||||
return ";".join(DEBUG_OPTIONS_BY_FLAG[flag] for flag in flags or [] if flag in DEBUG_OPTIONS_BY_FLAG)
|
||||
|
||||
|
||||
def _parse_debug_options(opts):
|
||||
"""Debug options are semicolon separated key=value pairs"""
|
||||
options = {}
|
||||
if not opts:
|
||||
return options
|
||||
|
||||
for opt in opts.split(";"):
|
||||
try:
|
||||
key, value = opt.split("=")
|
||||
except ValueError:
|
||||
continue
|
||||
try:
|
||||
options[key] = DEBUG_OPTIONS_PARSER[key](value)
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
return options
|
||||
|
||||
|
||||
def _extract_debug_options(opts, flags=None):
|
||||
"""Return the debug options encoded in the given value.
|
||||
|
||||
"opts" is a semicolon-separated string of "key=value" pairs.
|
||||
"flags" is a list of strings.
|
||||
|
||||
If flags is provided then it is used as a fallback.
|
||||
|
||||
The values come from the launch config:
|
||||
|
||||
{
|
||||
type:'python',
|
||||
request:'launch'|'attach',
|
||||
name:'friendly name for debug config',
|
||||
debugOptions:[
|
||||
'RedirectOutput', 'Django'
|
||||
],
|
||||
options:'REDIRECT_OUTPUT=True;DJANGO_DEBUG=True'
|
||||
}
|
||||
|
||||
Further information can be found here:
|
||||
|
||||
https://code.visualstudio.com/docs/editor/debugging#_launchjson-attributes
|
||||
"""
|
||||
if not opts:
|
||||
opts = _build_debug_options(flags)
|
||||
return _parse_debug_options(opts)
|
||||
@@ -0,0 +1,150 @@
|
||||
from _pydevd_bundle.pydevd_constants import (
|
||||
DebugInfoHolder,
|
||||
get_global_debugger,
|
||||
GetGlobalDebugger,
|
||||
set_global_debugger,
|
||||
) # Keep for backward compatibility @UnusedImport
|
||||
from _pydevd_bundle.pydevd_utils import quote_smart as quote, to_string
|
||||
from _pydevd_bundle.pydevd_comm_constants import ID_TO_MEANING, CMD_EXIT
|
||||
from _pydevd_bundle.pydevd_constants import HTTP_PROTOCOL, HTTP_JSON_PROTOCOL, get_protocol, IS_JYTHON, ForkSafeLock
|
||||
import json
|
||||
from _pydev_bundle import pydev_log
|
||||
|
||||
|
||||
class _BaseNetCommand(object):
|
||||
# Command id. Should be set in instance.
|
||||
id = -1
|
||||
|
||||
# Dict representation of the command to be set in instance. Only set for json commands.
|
||||
as_dict = None
|
||||
|
||||
def send(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def call_after_send(self, callback):
|
||||
pass
|
||||
|
||||
|
||||
class _NullNetCommand(_BaseNetCommand):
|
||||
pass
|
||||
|
||||
|
||||
class _NullExitCommand(_NullNetCommand):
|
||||
id = CMD_EXIT
|
||||
|
||||
|
||||
# Constant meant to be passed to the writer when the command is meant to be ignored.
|
||||
NULL_NET_COMMAND = _NullNetCommand()
|
||||
|
||||
# Exit command -- only internal (we don't want/need to send this to the IDE).
|
||||
NULL_EXIT_COMMAND = _NullExitCommand()
|
||||
|
||||
|
||||
class NetCommand(_BaseNetCommand):
|
||||
"""
|
||||
Commands received/sent over the network.
|
||||
|
||||
Command can represent command received from the debugger,
|
||||
or one to be sent by daemon.
|
||||
"""
|
||||
|
||||
next_seq = 0 # sequence numbers
|
||||
|
||||
_showing_debug_info = 0
|
||||
_show_debug_info_lock = ForkSafeLock(rlock=True)
|
||||
|
||||
_after_send = None
|
||||
|
||||
def __init__(self, cmd_id, seq, text, is_json=False):
|
||||
"""
|
||||
If sequence is 0, new sequence will be generated (otherwise, this was the response
|
||||
to a command from the client).
|
||||
"""
|
||||
protocol = get_protocol()
|
||||
self.id = cmd_id
|
||||
if seq == 0:
|
||||
NetCommand.next_seq += 2
|
||||
seq = NetCommand.next_seq
|
||||
|
||||
self.seq = seq
|
||||
|
||||
if is_json:
|
||||
if hasattr(text, "to_dict"):
|
||||
as_dict = text.to_dict(update_ids_to_dap=True)
|
||||
else:
|
||||
assert isinstance(text, dict)
|
||||
as_dict = text
|
||||
as_dict["pydevd_cmd_id"] = cmd_id
|
||||
as_dict["seq"] = seq
|
||||
self.as_dict = as_dict
|
||||
try:
|
||||
text = json.dumps(as_dict)
|
||||
except TypeError:
|
||||
text = json.dumps(as_dict, default=str)
|
||||
|
||||
assert isinstance(text, str)
|
||||
|
||||
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 1:
|
||||
self._show_debug_info(cmd_id, seq, text)
|
||||
|
||||
if is_json:
|
||||
msg = text
|
||||
else:
|
||||
if protocol not in (HTTP_PROTOCOL, HTTP_JSON_PROTOCOL):
|
||||
encoded = quote(to_string(text), '/<>_=" \t')
|
||||
msg = "%s\t%s\t%s\n" % (cmd_id, seq, encoded)
|
||||
|
||||
else:
|
||||
msg = "%s\t%s\t%s" % (cmd_id, seq, text)
|
||||
|
||||
if isinstance(msg, str):
|
||||
msg = msg.encode("utf-8")
|
||||
|
||||
assert isinstance(msg, bytes)
|
||||
as_bytes = msg
|
||||
self._as_bytes = as_bytes
|
||||
|
||||
def send(self, sock):
|
||||
as_bytes = self._as_bytes
|
||||
try:
|
||||
if get_protocol() in (HTTP_PROTOCOL, HTTP_JSON_PROTOCOL):
|
||||
sock.sendall(("Content-Length: %s\r\n\r\n" % len(as_bytes)).encode("ascii"))
|
||||
sock.sendall(as_bytes)
|
||||
if self._after_send:
|
||||
for method in self._after_send:
|
||||
method(sock)
|
||||
except:
|
||||
if IS_JYTHON:
|
||||
# Ignore errors in sock.sendall in Jython (seems to be common for Jython to
|
||||
# give spurious exceptions at interpreter shutdown here).
|
||||
pass
|
||||
else:
|
||||
raise
|
||||
|
||||
def call_after_send(self, callback):
|
||||
if not self._after_send:
|
||||
self._after_send = [callback]
|
||||
else:
|
||||
self._after_send.append(callback)
|
||||
|
||||
@classmethod
|
||||
def _show_debug_info(cls, cmd_id, seq, text):
|
||||
with cls._show_debug_info_lock:
|
||||
# Only one thread each time (rlock).
|
||||
if cls._showing_debug_info:
|
||||
# avoid recursing in the same thread (just printing could create
|
||||
# a new command when redirecting output).
|
||||
return
|
||||
|
||||
cls._showing_debug_info += 1
|
||||
try:
|
||||
out_message = "sending cmd (%s) --> " % (get_protocol(),)
|
||||
out_message += "%20s" % ID_TO_MEANING.get(str(cmd_id), "UNKNOWN")
|
||||
out_message += " "
|
||||
out_message += text.replace("\n", " ")
|
||||
try:
|
||||
pydev_log.critical("%s\n", out_message)
|
||||
except:
|
||||
pass
|
||||
finally:
|
||||
cls._showing_debug_info -= 1
|
||||
+586
@@ -0,0 +1,586 @@
|
||||
from functools import partial
|
||||
import itertools
|
||||
import os
|
||||
import sys
|
||||
import socket as socket_module
|
||||
|
||||
from _pydev_bundle._pydev_imports_tipper import TYPE_IMPORT, TYPE_CLASS, TYPE_FUNCTION, TYPE_ATTR, TYPE_BUILTIN, TYPE_PARAM
|
||||
from _pydev_bundle.pydev_is_thread_alive import is_thread_alive
|
||||
from _pydev_bundle.pydev_override import overrides
|
||||
from _pydevd_bundle._debug_adapter import pydevd_schema
|
||||
from _pydevd_bundle._debug_adapter.pydevd_schema import (
|
||||
ModuleEvent,
|
||||
ModuleEventBody,
|
||||
Module,
|
||||
OutputEventBody,
|
||||
OutputEvent,
|
||||
ContinuedEventBody,
|
||||
ExitedEventBody,
|
||||
ExitedEvent,
|
||||
)
|
||||
from _pydevd_bundle.pydevd_comm_constants import (
|
||||
CMD_THREAD_CREATE,
|
||||
CMD_RETURN,
|
||||
CMD_MODULE_EVENT,
|
||||
CMD_WRITE_TO_CONSOLE,
|
||||
CMD_STEP_INTO,
|
||||
CMD_STEP_INTO_MY_CODE,
|
||||
CMD_STEP_OVER,
|
||||
CMD_STEP_OVER_MY_CODE,
|
||||
CMD_STEP_RETURN,
|
||||
CMD_STEP_CAUGHT_EXCEPTION,
|
||||
CMD_ADD_EXCEPTION_BREAK,
|
||||
CMD_SET_BREAK,
|
||||
CMD_SET_NEXT_STATEMENT,
|
||||
CMD_THREAD_SUSPEND_SINGLE_NOTIFICATION,
|
||||
CMD_THREAD_RESUME_SINGLE_NOTIFICATION,
|
||||
CMD_THREAD_KILL,
|
||||
CMD_STOP_ON_START,
|
||||
CMD_INPUT_REQUESTED,
|
||||
CMD_EXIT,
|
||||
CMD_STEP_INTO_COROUTINE,
|
||||
CMD_STEP_RETURN_MY_CODE,
|
||||
CMD_SMART_STEP_INTO,
|
||||
CMD_SET_FUNCTION_BREAK,
|
||||
CMD_THREAD_RUN,
|
||||
)
|
||||
from _pydevd_bundle.pydevd_constants import get_thread_id, ForkSafeLock, DebugInfoHolder
|
||||
from _pydevd_bundle.pydevd_net_command import NetCommand, NULL_NET_COMMAND
|
||||
from _pydevd_bundle.pydevd_net_command_factory_xml import NetCommandFactory
|
||||
from _pydevd_bundle.pydevd_utils import get_non_pydevd_threads
|
||||
import pydevd_file_utils
|
||||
from _pydevd_bundle.pydevd_comm import build_exception_info_response
|
||||
from _pydevd_bundle.pydevd_additional_thread_info import set_additional_thread_info
|
||||
from _pydevd_bundle import pydevd_frame_utils, pydevd_constants, pydevd_utils
|
||||
import linecache
|
||||
from io import StringIO
|
||||
from _pydev_bundle import pydev_log
|
||||
|
||||
|
||||
class ModulesManager(object):
|
||||
def __init__(self):
|
||||
self._lock = ForkSafeLock()
|
||||
self._modules = {}
|
||||
self._next_id = partial(next, itertools.count(0))
|
||||
|
||||
def track_module(self, filename_in_utf8, module_name, frame):
|
||||
"""
|
||||
:return list(NetCommand):
|
||||
Returns a list with the module events to be sent.
|
||||
"""
|
||||
if filename_in_utf8 in self._modules:
|
||||
return []
|
||||
|
||||
module_events = []
|
||||
with self._lock:
|
||||
# Must check again after getting the lock.
|
||||
if filename_in_utf8 in self._modules:
|
||||
return
|
||||
|
||||
try:
|
||||
version = str(frame.f_globals.get("__version__", ""))
|
||||
except:
|
||||
version = "<unknown>"
|
||||
|
||||
try:
|
||||
package_name = str(frame.f_globals.get("__package__", ""))
|
||||
except:
|
||||
package_name = "<unknown>"
|
||||
|
||||
module_id = self._next_id()
|
||||
|
||||
module = Module(module_id, module_name, filename_in_utf8)
|
||||
if version:
|
||||
module.version = version
|
||||
|
||||
if package_name:
|
||||
# Note: package doesn't appear in the docs but seems to be expected?
|
||||
module.kwargs["package"] = package_name
|
||||
|
||||
module_event = ModuleEvent(ModuleEventBody("new", module))
|
||||
|
||||
module_events.append(NetCommand(CMD_MODULE_EVENT, 0, module_event, is_json=True))
|
||||
|
||||
self._modules[filename_in_utf8] = module.to_dict()
|
||||
return module_events
|
||||
|
||||
def get_modules_info(self):
|
||||
"""
|
||||
:return list(Module)
|
||||
"""
|
||||
with self._lock:
|
||||
return list(self._modules.values())
|
||||
|
||||
|
||||
class NetCommandFactoryJson(NetCommandFactory):
|
||||
"""
|
||||
Factory for commands which will provide messages as json (they should be
|
||||
similar to the debug adapter where possible, although some differences
|
||||
are currently Ok).
|
||||
|
||||
Note that it currently overrides the xml version so that messages
|
||||
can be done one at a time (any message not overridden will currently
|
||||
use the xml version) -- after having all messages handled, it should
|
||||
no longer use NetCommandFactory as the base class.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
NetCommandFactory.__init__(self)
|
||||
self.modules_manager = ModulesManager()
|
||||
|
||||
@overrides(NetCommandFactory.make_version_message)
|
||||
def make_version_message(self, seq):
|
||||
return NULL_NET_COMMAND # Not a part of the debug adapter protocol
|
||||
|
||||
@overrides(NetCommandFactory.make_protocol_set_message)
|
||||
def make_protocol_set_message(self, seq):
|
||||
return NULL_NET_COMMAND # Not a part of the debug adapter protocol
|
||||
|
||||
@overrides(NetCommandFactory.make_thread_created_message)
|
||||
def make_thread_created_message(self, thread):
|
||||
# Note: the thread id for the debug adapter must be an int
|
||||
# (make the actual id from get_thread_id respect that later on).
|
||||
msg = pydevd_schema.ThreadEvent(
|
||||
pydevd_schema.ThreadEventBody("started", get_thread_id(thread)),
|
||||
)
|
||||
|
||||
return NetCommand(CMD_THREAD_CREATE, 0, msg, is_json=True)
|
||||
|
||||
@overrides(NetCommandFactory.make_custom_frame_created_message)
|
||||
def make_custom_frame_created_message(self, frame_id, frame_description):
|
||||
self._additional_thread_id_to_thread_name[frame_id] = frame_description
|
||||
msg = pydevd_schema.ThreadEvent(
|
||||
pydevd_schema.ThreadEventBody("started", frame_id),
|
||||
)
|
||||
|
||||
return NetCommand(CMD_THREAD_CREATE, 0, msg, is_json=True)
|
||||
|
||||
@overrides(NetCommandFactory.make_thread_killed_message)
|
||||
def make_thread_killed_message(self, tid):
|
||||
self._additional_thread_id_to_thread_name.pop(tid, None)
|
||||
msg = pydevd_schema.ThreadEvent(
|
||||
pydevd_schema.ThreadEventBody("exited", tid),
|
||||
)
|
||||
|
||||
return NetCommand(CMD_THREAD_KILL, 0, msg, is_json=True)
|
||||
|
||||
@overrides(NetCommandFactory.make_list_threads_message)
|
||||
def make_list_threads_message(self, py_db, seq):
|
||||
threads = []
|
||||
for thread in get_non_pydevd_threads():
|
||||
if is_thread_alive(thread):
|
||||
thread_id = get_thread_id(thread)
|
||||
|
||||
# Notify that it's created (no-op if we already notified before).
|
||||
py_db.notify_thread_created(thread_id, thread)
|
||||
|
||||
thread_schema = pydevd_schema.Thread(id=thread_id, name=thread.name)
|
||||
threads.append(thread_schema.to_dict())
|
||||
|
||||
for thread_id, thread_name in list(self._additional_thread_id_to_thread_name.items()):
|
||||
thread_schema = pydevd_schema.Thread(id=thread_id, name=thread_name)
|
||||
threads.append(thread_schema.to_dict())
|
||||
|
||||
body = pydevd_schema.ThreadsResponseBody(threads)
|
||||
response = pydevd_schema.ThreadsResponse(request_seq=seq, success=True, command="threads", body=body)
|
||||
|
||||
return NetCommand(CMD_RETURN, 0, response, is_json=True)
|
||||
|
||||
@overrides(NetCommandFactory.make_get_completions_message)
|
||||
def make_get_completions_message(self, seq, completions, qualifier, start):
|
||||
COMPLETION_TYPE_LOOK_UP = {
|
||||
TYPE_IMPORT: pydevd_schema.CompletionItemType.MODULE,
|
||||
TYPE_CLASS: pydevd_schema.CompletionItemType.CLASS,
|
||||
TYPE_FUNCTION: pydevd_schema.CompletionItemType.FUNCTION,
|
||||
TYPE_ATTR: pydevd_schema.CompletionItemType.FIELD,
|
||||
TYPE_BUILTIN: pydevd_schema.CompletionItemType.KEYWORD,
|
||||
TYPE_PARAM: pydevd_schema.CompletionItemType.VARIABLE,
|
||||
}
|
||||
|
||||
qualifier = qualifier.lower()
|
||||
qualifier_len = len(qualifier)
|
||||
targets = []
|
||||
for completion in completions:
|
||||
label = completion[0]
|
||||
if label.lower().startswith(qualifier):
|
||||
completion = pydevd_schema.CompletionItem(
|
||||
label=label, type=COMPLETION_TYPE_LOOK_UP[completion[3]], start=start, length=qualifier_len
|
||||
)
|
||||
targets.append(completion.to_dict())
|
||||
|
||||
body = pydevd_schema.CompletionsResponseBody(targets)
|
||||
response = pydevd_schema.CompletionsResponse(request_seq=seq, success=True, command="completions", body=body)
|
||||
return NetCommand(CMD_RETURN, 0, response, is_json=True)
|
||||
|
||||
def _format_frame_name(self, fmt, initial_name, module_name, line, path):
|
||||
if fmt is None:
|
||||
return initial_name
|
||||
frame_name = initial_name
|
||||
if fmt.get("module", False):
|
||||
if module_name:
|
||||
if initial_name == "<module>":
|
||||
frame_name = module_name
|
||||
else:
|
||||
frame_name = "%s.%s" % (module_name, initial_name)
|
||||
else:
|
||||
basename = os.path.basename(path)
|
||||
basename = basename[0:-3] if basename.lower().endswith(".py") else basename
|
||||
if initial_name == "<module>":
|
||||
frame_name = "%s in %s" % (initial_name, basename)
|
||||
else:
|
||||
frame_name = "%s.%s" % (basename, initial_name)
|
||||
|
||||
if fmt.get("line", False):
|
||||
frame_name = "%s : %d" % (frame_name, line)
|
||||
|
||||
return frame_name
|
||||
|
||||
@overrides(NetCommandFactory.make_get_thread_stack_message)
|
||||
def make_get_thread_stack_message(self, py_db, seq, thread_id, topmost_frame, fmt, must_be_suspended=False, start_frame=0, levels=0):
|
||||
frames = []
|
||||
module_events = []
|
||||
|
||||
try:
|
||||
# : :type suspended_frames_manager: SuspendedFramesManager
|
||||
suspended_frames_manager = py_db.suspended_frames_manager
|
||||
frames_list = suspended_frames_manager.get_frames_list(thread_id)
|
||||
if frames_list is None:
|
||||
# Could not find stack of suspended frame...
|
||||
if must_be_suspended:
|
||||
return None
|
||||
else:
|
||||
frames_list = pydevd_frame_utils.create_frames_list_from_frame(topmost_frame)
|
||||
|
||||
for (
|
||||
frame_id,
|
||||
frame,
|
||||
method_name,
|
||||
original_filename,
|
||||
filename_in_utf8,
|
||||
lineno,
|
||||
applied_mapping,
|
||||
show_as_current_frame,
|
||||
line_col_info,
|
||||
) in self._iter_visible_frames_info(py_db, frames_list, flatten_chained=True):
|
||||
try:
|
||||
module_name = str(frame.f_globals.get("__name__", ""))
|
||||
except:
|
||||
module_name = "<unknown>"
|
||||
|
||||
module_events.extend(self.modules_manager.track_module(filename_in_utf8, module_name, frame))
|
||||
|
||||
presentation_hint = None
|
||||
if not getattr(frame, "IS_PLUGIN_FRAME", False): # Never filter out plugin frames!
|
||||
if py_db.is_files_filter_enabled and py_db.apply_files_filter(frame, original_filename, False):
|
||||
continue
|
||||
|
||||
if not py_db.in_project_scope(frame):
|
||||
presentation_hint = "subtle"
|
||||
|
||||
formatted_name = self._format_frame_name(fmt, method_name, module_name, lineno, filename_in_utf8)
|
||||
if show_as_current_frame:
|
||||
formatted_name += " (Current frame)"
|
||||
source_reference = pydevd_file_utils.get_client_filename_source_reference(filename_in_utf8)
|
||||
|
||||
if not source_reference and not applied_mapping and not os.path.exists(original_filename):
|
||||
if getattr(frame.f_code, "co_lines", None) or getattr(frame.f_code, "co_lnotab", None):
|
||||
# Create a source-reference to be used where we provide the source by decompiling the code.
|
||||
# Note: When the time comes to retrieve the source reference in this case, we'll
|
||||
# check the linecache first (see: get_decompiled_source_from_frame_id).
|
||||
source_reference = pydevd_file_utils.create_source_reference_for_frame_id(frame_id, original_filename)
|
||||
else:
|
||||
# Check if someone added a source reference to the linecache (Python attrs does this).
|
||||
if linecache.getline(original_filename, 1):
|
||||
source_reference = pydevd_file_utils.create_source_reference_for_linecache(original_filename)
|
||||
|
||||
column = 1
|
||||
endcol = None
|
||||
if line_col_info is not None:
|
||||
try:
|
||||
line_text = linecache.getline(original_filename, lineno)
|
||||
except:
|
||||
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 2:
|
||||
pydev_log.exception("Unable to get line from linecache for file: %s", original_filename)
|
||||
else:
|
||||
if line_text:
|
||||
colno, endcolno = line_col_info.map_columns_to_line(line_text)
|
||||
column = colno + 1
|
||||
if line_col_info.lineno == line_col_info.end_lineno:
|
||||
endcol = endcolno + 1
|
||||
|
||||
frames.append(
|
||||
pydevd_schema.StackFrame(
|
||||
frame_id,
|
||||
formatted_name,
|
||||
lineno,
|
||||
column=column,
|
||||
endColumn=endcol,
|
||||
source={
|
||||
"path": filename_in_utf8,
|
||||
"sourceReference": source_reference,
|
||||
},
|
||||
presentationHint=presentation_hint,
|
||||
).to_dict()
|
||||
)
|
||||
finally:
|
||||
topmost_frame = None
|
||||
|
||||
for module_event in module_events:
|
||||
py_db.writer.add_command(module_event)
|
||||
|
||||
total_frames = len(frames)
|
||||
stack_frames = frames
|
||||
if bool(levels):
|
||||
start = start_frame
|
||||
end = min(start + levels, total_frames)
|
||||
stack_frames = frames[start:end]
|
||||
|
||||
response = pydevd_schema.StackTraceResponse(
|
||||
request_seq=seq,
|
||||
success=True,
|
||||
command="stackTrace",
|
||||
body=pydevd_schema.StackTraceResponseBody(stackFrames=stack_frames, totalFrames=total_frames),
|
||||
)
|
||||
return NetCommand(CMD_RETURN, 0, response, is_json=True)
|
||||
|
||||
@overrides(NetCommandFactory.make_warning_message)
|
||||
def make_warning_message(self, msg):
|
||||
category = "important"
|
||||
body = OutputEventBody(msg, category)
|
||||
event = OutputEvent(body)
|
||||
return NetCommand(CMD_WRITE_TO_CONSOLE, 0, event, is_json=True)
|
||||
|
||||
@overrides(NetCommandFactory.make_io_message)
|
||||
def make_io_message(self, msg, ctx):
|
||||
category = "stdout" if int(ctx) == 1 else "stderr"
|
||||
body = OutputEventBody(msg, category)
|
||||
event = OutputEvent(body)
|
||||
return NetCommand(CMD_WRITE_TO_CONSOLE, 0, event, is_json=True)
|
||||
|
||||
@overrides(NetCommandFactory.make_console_message)
|
||||
def make_console_message(self, msg):
|
||||
category = "console"
|
||||
body = OutputEventBody(msg, category)
|
||||
event = OutputEvent(body)
|
||||
return NetCommand(CMD_WRITE_TO_CONSOLE, 0, event, is_json=True)
|
||||
|
||||
_STEP_REASONS = set(
|
||||
[
|
||||
CMD_STEP_INTO,
|
||||
CMD_STEP_INTO_MY_CODE,
|
||||
CMD_STEP_OVER,
|
||||
CMD_STEP_OVER_MY_CODE,
|
||||
CMD_STEP_RETURN,
|
||||
CMD_STEP_RETURN_MY_CODE,
|
||||
CMD_STEP_INTO_MY_CODE,
|
||||
CMD_STOP_ON_START,
|
||||
CMD_STEP_INTO_COROUTINE,
|
||||
CMD_SMART_STEP_INTO,
|
||||
]
|
||||
)
|
||||
_EXCEPTION_REASONS = set(
|
||||
[
|
||||
CMD_STEP_CAUGHT_EXCEPTION,
|
||||
CMD_ADD_EXCEPTION_BREAK,
|
||||
]
|
||||
)
|
||||
|
||||
@overrides(NetCommandFactory.make_thread_suspend_single_notification)
|
||||
def make_thread_suspend_single_notification(self, py_db, thread_id, thread, stop_reason):
|
||||
exc_desc = None
|
||||
exc_name = None
|
||||
info = set_additional_thread_info(thread)
|
||||
|
||||
preserve_focus_hint = False
|
||||
if stop_reason in self._STEP_REASONS:
|
||||
if info.pydev_original_step_cmd == CMD_STOP_ON_START:
|
||||
# Just to make sure that's not set as the original reason anymore.
|
||||
info.pydev_original_step_cmd = -1
|
||||
stop_reason = "entry"
|
||||
else:
|
||||
stop_reason = "step"
|
||||
elif stop_reason in self._EXCEPTION_REASONS:
|
||||
stop_reason = "exception"
|
||||
elif stop_reason == CMD_SET_BREAK:
|
||||
stop_reason = "breakpoint"
|
||||
elif stop_reason == CMD_SET_FUNCTION_BREAK:
|
||||
stop_reason = "function breakpoint"
|
||||
elif stop_reason == CMD_SET_NEXT_STATEMENT:
|
||||
stop_reason = "goto"
|
||||
else:
|
||||
stop_reason = "pause"
|
||||
preserve_focus_hint = True
|
||||
|
||||
if stop_reason == "exception":
|
||||
exception_info_response = build_exception_info_response(
|
||||
py_db, thread_id, thread, -1, set_additional_thread_info, self._iter_visible_frames_info, max_frames=-1
|
||||
)
|
||||
exception_info_response
|
||||
|
||||
exc_name = exception_info_response.body.exceptionId
|
||||
exc_desc = exception_info_response.body.description
|
||||
|
||||
body = pydevd_schema.StoppedEventBody(
|
||||
reason=stop_reason,
|
||||
description=exc_desc,
|
||||
threadId=thread_id,
|
||||
text=exc_name,
|
||||
allThreadsStopped=True,
|
||||
preserveFocusHint=preserve_focus_hint,
|
||||
)
|
||||
event = pydevd_schema.StoppedEvent(body)
|
||||
return NetCommand(CMD_THREAD_SUSPEND_SINGLE_NOTIFICATION, 0, event, is_json=True)
|
||||
|
||||
@overrides(NetCommandFactory.make_thread_resume_single_notification)
|
||||
def make_thread_resume_single_notification(self, thread_id):
|
||||
body = ContinuedEventBody(threadId=thread_id, allThreadsContinued=True)
|
||||
event = pydevd_schema.ContinuedEvent(body)
|
||||
return NetCommand(CMD_THREAD_RESUME_SINGLE_NOTIFICATION, 0, event, is_json=True)
|
||||
|
||||
@overrides(NetCommandFactory.make_set_next_stmnt_status_message)
|
||||
def make_set_next_stmnt_status_message(self, seq, is_success, exception_msg):
|
||||
response = pydevd_schema.GotoResponse(
|
||||
request_seq=int(seq), success=is_success, command="goto", body={}, message=(None if is_success else exception_msg)
|
||||
)
|
||||
return NetCommand(CMD_RETURN, 0, response, is_json=True)
|
||||
|
||||
@overrides(NetCommandFactory.make_send_curr_exception_trace_message)
|
||||
def make_send_curr_exception_trace_message(self, *args, **kwargs):
|
||||
return NULL_NET_COMMAND # Not a part of the debug adapter protocol
|
||||
|
||||
@overrides(NetCommandFactory.make_send_curr_exception_trace_proceeded_message)
|
||||
def make_send_curr_exception_trace_proceeded_message(self, *args, **kwargs):
|
||||
return NULL_NET_COMMAND # Not a part of the debug adapter protocol
|
||||
|
||||
@overrides(NetCommandFactory.make_send_breakpoint_exception_message)
|
||||
def make_send_breakpoint_exception_message(self, *args, **kwargs):
|
||||
return NULL_NET_COMMAND # Not a part of the debug adapter protocol
|
||||
|
||||
@overrides(NetCommandFactory.make_process_created_message)
|
||||
def make_process_created_message(self, *args, **kwargs):
|
||||
return NULL_NET_COMMAND # Not a part of the debug adapter protocol
|
||||
|
||||
@overrides(NetCommandFactory.make_process_about_to_be_replaced_message)
|
||||
def make_process_about_to_be_replaced_message(self):
|
||||
event = ExitedEvent(ExitedEventBody(-1, pydevdReason="processReplaced"))
|
||||
|
||||
cmd = NetCommand(CMD_RETURN, 0, event, is_json=True)
|
||||
|
||||
def after_send(socket):
|
||||
socket.setsockopt(socket_module.IPPROTO_TCP, socket_module.TCP_NODELAY, 1)
|
||||
|
||||
cmd.call_after_send(after_send)
|
||||
return cmd
|
||||
|
||||
@overrides(NetCommandFactory.make_thread_suspend_message)
|
||||
def make_thread_suspend_message(self, py_db, thread_id, frames_list, stop_reason, message, trace_suspend_type, thread, info):
|
||||
from _pydevd_bundle.pydevd_comm_constants import CMD_THREAD_SUSPEND
|
||||
|
||||
if py_db.multi_threads_single_notification:
|
||||
pydev_log.debug("Skipping per-thread thread suspend notification.")
|
||||
return NULL_NET_COMMAND # Don't send per-thread, send a single one.
|
||||
pydev_log.debug("Sending per-thread thread suspend notification (stop_reason: %s)", stop_reason)
|
||||
|
||||
exc_desc = None
|
||||
exc_name = None
|
||||
preserve_focus_hint = False
|
||||
if stop_reason in self._STEP_REASONS:
|
||||
if info.pydev_original_step_cmd == CMD_STOP_ON_START:
|
||||
# Just to make sure that's not set as the original reason anymore.
|
||||
info.pydev_original_step_cmd = -1
|
||||
stop_reason = "entry"
|
||||
else:
|
||||
stop_reason = "step"
|
||||
elif stop_reason in self._EXCEPTION_REASONS:
|
||||
stop_reason = "exception"
|
||||
elif stop_reason == CMD_SET_BREAK:
|
||||
stop_reason = "breakpoint"
|
||||
elif stop_reason == CMD_SET_FUNCTION_BREAK:
|
||||
stop_reason = "function breakpoint"
|
||||
elif stop_reason == CMD_SET_NEXT_STATEMENT:
|
||||
stop_reason = "goto"
|
||||
else:
|
||||
stop_reason = "pause"
|
||||
preserve_focus_hint = True
|
||||
|
||||
if stop_reason == "exception":
|
||||
exception_info_response = build_exception_info_response(
|
||||
py_db, thread_id, thread, -1, set_additional_thread_info, self._iter_visible_frames_info, max_frames=-1
|
||||
)
|
||||
exception_info_response
|
||||
|
||||
exc_name = exception_info_response.body.exceptionId
|
||||
exc_desc = exception_info_response.body.description
|
||||
|
||||
body = pydevd_schema.StoppedEventBody(
|
||||
reason=stop_reason,
|
||||
description=exc_desc,
|
||||
threadId=thread_id,
|
||||
text=exc_name,
|
||||
allThreadsStopped=False,
|
||||
preserveFocusHint=preserve_focus_hint,
|
||||
)
|
||||
event = pydevd_schema.StoppedEvent(body)
|
||||
return NetCommand(CMD_THREAD_SUSPEND, 0, event, is_json=True)
|
||||
|
||||
@overrides(NetCommandFactory.make_thread_run_message)
|
||||
def make_thread_run_message(self, py_db, thread_id, reason):
|
||||
if py_db.multi_threads_single_notification:
|
||||
return NULL_NET_COMMAND # Don't send per-thread, send a single one.
|
||||
body = ContinuedEventBody(threadId=thread_id, allThreadsContinued=False)
|
||||
event = pydevd_schema.ContinuedEvent(body)
|
||||
return NetCommand(CMD_THREAD_RUN, 0, event, is_json=True)
|
||||
|
||||
@overrides(NetCommandFactory.make_reloaded_code_message)
|
||||
def make_reloaded_code_message(self, *args, **kwargs):
|
||||
return NULL_NET_COMMAND # Not a part of the debug adapter protocol
|
||||
|
||||
@overrides(NetCommandFactory.make_input_requested_message)
|
||||
def make_input_requested_message(self, started):
|
||||
event = pydevd_schema.PydevdInputRequestedEvent(body={})
|
||||
return NetCommand(CMD_INPUT_REQUESTED, 0, event, is_json=True)
|
||||
|
||||
@overrides(NetCommandFactory.make_skipped_step_in_because_of_filters)
|
||||
def make_skipped_step_in_because_of_filters(self, py_db, frame):
|
||||
msg = "Frame skipped from debugging during step-in."
|
||||
if py_db.get_use_libraries_filter():
|
||||
msg += (
|
||||
'\nNote: may have been skipped because of "justMyCode" option (default == true). '
|
||||
'Try setting "justMyCode": false in the debug configuration (e.g., launch.json).\n'
|
||||
)
|
||||
return self.make_warning_message(msg)
|
||||
|
||||
@overrides(NetCommandFactory.make_evaluation_timeout_msg)
|
||||
def make_evaluation_timeout_msg(self, py_db, expression, curr_thread):
|
||||
msg = """Evaluating: %s did not finish after %.2f seconds.
|
||||
This may mean a number of things:
|
||||
- This evaluation is really slow and this is expected.
|
||||
In this case it's possible to silence this error by raising the timeout, setting the
|
||||
PYDEVD_WARN_EVALUATION_TIMEOUT environment variable to a bigger value.
|
||||
|
||||
- The evaluation may need other threads running while it's running:
|
||||
In this case, it's possible to set the PYDEVD_UNBLOCK_THREADS_TIMEOUT
|
||||
environment variable so that if after a given timeout an evaluation doesn't finish,
|
||||
other threads are unblocked or you can manually resume all threads.
|
||||
|
||||
Alternatively, it's also possible to skip breaking on a particular thread by setting a
|
||||
`pydev_do_not_trace = True` attribute in the related threading.Thread instance
|
||||
(if some thread should always be running and no breakpoints are expected to be hit in it).
|
||||
|
||||
- The evaluation is deadlocked:
|
||||
In this case you may set the PYDEVD_THREAD_DUMP_ON_WARN_EVALUATION_TIMEOUT
|
||||
environment variable to true so that a thread dump is shown along with this message and
|
||||
optionally, set the PYDEVD_INTERRUPT_THREAD_TIMEOUT to some value so that the debugger
|
||||
tries to interrupt the evaluation (if possible) when this happens.
|
||||
""" % (expression, pydevd_constants.PYDEVD_WARN_EVALUATION_TIMEOUT)
|
||||
|
||||
if pydevd_constants.PYDEVD_THREAD_DUMP_ON_WARN_EVALUATION_TIMEOUT:
|
||||
stream = StringIO()
|
||||
pydevd_utils.dump_threads(stream, show_pydevd_threads=False)
|
||||
msg += "\n\n%s\n" % stream.getvalue()
|
||||
return self.make_warning_message(msg)
|
||||
|
||||
@overrides(NetCommandFactory.make_exit_command)
|
||||
def make_exit_command(self, py_db):
|
||||
event = pydevd_schema.TerminatedEvent(pydevd_schema.TerminatedEventBody())
|
||||
return NetCommand(CMD_EXIT, 0, event, is_json=True)
|
||||
+558
@@ -0,0 +1,558 @@
|
||||
import json
|
||||
|
||||
from _pydev_bundle.pydev_is_thread_alive import is_thread_alive
|
||||
from _pydev_bundle._pydev_saved_modules import thread
|
||||
from _pydevd_bundle import pydevd_xml, pydevd_frame_utils, pydevd_constants, pydevd_utils
|
||||
from _pydevd_bundle.pydevd_comm_constants import (
|
||||
CMD_THREAD_CREATE,
|
||||
CMD_THREAD_KILL,
|
||||
CMD_THREAD_SUSPEND,
|
||||
CMD_THREAD_RUN,
|
||||
CMD_GET_VARIABLE,
|
||||
CMD_EVALUATE_EXPRESSION,
|
||||
CMD_GET_FRAME,
|
||||
CMD_WRITE_TO_CONSOLE,
|
||||
CMD_GET_COMPLETIONS,
|
||||
CMD_LOAD_SOURCE,
|
||||
CMD_SET_NEXT_STATEMENT,
|
||||
CMD_EXIT,
|
||||
CMD_GET_FILE_CONTENTS,
|
||||
CMD_EVALUATE_CONSOLE_EXPRESSION,
|
||||
CMD_RUN_CUSTOM_OPERATION,
|
||||
CMD_GET_BREAKPOINT_EXCEPTION,
|
||||
CMD_SEND_CURR_EXCEPTION_TRACE,
|
||||
CMD_SEND_CURR_EXCEPTION_TRACE_PROCEEDED,
|
||||
CMD_SHOW_CONSOLE,
|
||||
CMD_GET_ARRAY,
|
||||
CMD_INPUT_REQUESTED,
|
||||
CMD_GET_DESCRIPTION,
|
||||
CMD_PROCESS_CREATED,
|
||||
CMD_SHOW_CYTHON_WARNING,
|
||||
CMD_LOAD_FULL_VALUE,
|
||||
CMD_GET_THREAD_STACK,
|
||||
CMD_GET_EXCEPTION_DETAILS,
|
||||
CMD_THREAD_SUSPEND_SINGLE_NOTIFICATION,
|
||||
CMD_THREAD_RESUME_SINGLE_NOTIFICATION,
|
||||
CMD_GET_NEXT_STATEMENT_TARGETS,
|
||||
CMD_VERSION,
|
||||
CMD_RETURN,
|
||||
CMD_SET_PROTOCOL,
|
||||
CMD_ERROR,
|
||||
MAX_IO_MSG_SIZE,
|
||||
VERSION_STRING,
|
||||
CMD_RELOAD_CODE,
|
||||
CMD_LOAD_SOURCE_FROM_FRAME_ID,
|
||||
)
|
||||
from _pydevd_bundle.pydevd_constants import (
|
||||
DebugInfoHolder,
|
||||
get_thread_id,
|
||||
get_global_debugger,
|
||||
GetGlobalDebugger,
|
||||
set_global_debugger,
|
||||
) # Keep for backward compatibility @UnusedImport
|
||||
from _pydevd_bundle.pydevd_net_command import NetCommand, NULL_NET_COMMAND, NULL_EXIT_COMMAND
|
||||
from _pydevd_bundle.pydevd_utils import quote_smart as quote, get_non_pydevd_threads
|
||||
from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame
|
||||
import pydevd_file_utils
|
||||
from pydevd_tracing import get_exception_traceback_str
|
||||
from _pydev_bundle._pydev_completer import completions_to_xml
|
||||
from _pydev_bundle import pydev_log
|
||||
from _pydevd_bundle.pydevd_frame_utils import FramesList
|
||||
from io import StringIO
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# NetCommandFactory
|
||||
# =======================================================================================================================
|
||||
class NetCommandFactory(object):
|
||||
def __init__(self):
|
||||
self._additional_thread_id_to_thread_name = {}
|
||||
|
||||
def _thread_to_xml(self, thread):
|
||||
"""thread information as XML"""
|
||||
name = pydevd_xml.make_valid_xml_value(thread.name)
|
||||
cmd_text = '<thread name="%s" id="%s" />' % (quote(name), get_thread_id(thread))
|
||||
return cmd_text
|
||||
|
||||
def make_error_message(self, seq, text):
|
||||
cmd = NetCommand(CMD_ERROR, seq, text)
|
||||
if DebugInfoHolder.DEBUG_TRACE_LEVEL > 2:
|
||||
pydev_log.error("Error: %s" % (text,))
|
||||
return cmd
|
||||
|
||||
def make_protocol_set_message(self, seq):
|
||||
return NetCommand(CMD_SET_PROTOCOL, seq, "")
|
||||
|
||||
def make_thread_created_message(self, thread):
|
||||
cmdText = "<xml>" + self._thread_to_xml(thread) + "</xml>"
|
||||
return NetCommand(CMD_THREAD_CREATE, 0, cmdText)
|
||||
|
||||
def make_process_created_message(self):
|
||||
cmdText = "<process/>"
|
||||
return NetCommand(CMD_PROCESS_CREATED, 0, cmdText)
|
||||
|
||||
def make_process_about_to_be_replaced_message(self):
|
||||
return NULL_NET_COMMAND
|
||||
|
||||
def make_show_cython_warning_message(self):
|
||||
try:
|
||||
return NetCommand(CMD_SHOW_CYTHON_WARNING, 0, "")
|
||||
except:
|
||||
return self.make_error_message(0, get_exception_traceback_str())
|
||||
|
||||
def make_custom_frame_created_message(self, frame_id, frame_description):
|
||||
self._additional_thread_id_to_thread_name[frame_id] = frame_description
|
||||
frame_description = pydevd_xml.make_valid_xml_value(frame_description)
|
||||
return NetCommand(CMD_THREAD_CREATE, 0, '<xml><thread name="%s" id="%s"/></xml>' % (frame_description, frame_id))
|
||||
|
||||
def make_list_threads_message(self, py_db, seq):
|
||||
"""returns thread listing as XML"""
|
||||
try:
|
||||
threads = get_non_pydevd_threads()
|
||||
cmd_text = ["<xml>"]
|
||||
append = cmd_text.append
|
||||
for thread in threads:
|
||||
if is_thread_alive(thread):
|
||||
append(self._thread_to_xml(thread))
|
||||
|
||||
for thread_id, thread_name in list(self._additional_thread_id_to_thread_name.items()):
|
||||
name = pydevd_xml.make_valid_xml_value(thread_name)
|
||||
append('<thread name="%s" id="%s" />' % (quote(name), thread_id))
|
||||
|
||||
append("</xml>")
|
||||
return NetCommand(CMD_RETURN, seq, "".join(cmd_text))
|
||||
except:
|
||||
return self.make_error_message(seq, get_exception_traceback_str())
|
||||
|
||||
def make_get_thread_stack_message(self, py_db, seq, thread_id, topmost_frame, fmt, must_be_suspended=False, start_frame=0, levels=0):
|
||||
"""
|
||||
Returns thread stack as XML.
|
||||
|
||||
:param must_be_suspended: If True and the thread is not suspended, returns None.
|
||||
"""
|
||||
try:
|
||||
# If frame is None, the return is an empty frame list.
|
||||
cmd_text = ['<xml><thread id="%s">' % (thread_id,)]
|
||||
|
||||
if topmost_frame is not None:
|
||||
try:
|
||||
# : :type suspended_frames_manager: SuspendedFramesManager
|
||||
suspended_frames_manager = py_db.suspended_frames_manager
|
||||
frames_list = suspended_frames_manager.get_frames_list(thread_id)
|
||||
if frames_list is None:
|
||||
# Could not find stack of suspended frame...
|
||||
if must_be_suspended:
|
||||
return None
|
||||
else:
|
||||
frames_list = pydevd_frame_utils.create_frames_list_from_frame(topmost_frame)
|
||||
|
||||
cmd_text.append(self.make_thread_stack_str(py_db, frames_list))
|
||||
finally:
|
||||
topmost_frame = None
|
||||
cmd_text.append("</thread></xml>")
|
||||
return NetCommand(CMD_GET_THREAD_STACK, seq, "".join(cmd_text))
|
||||
except:
|
||||
return self.make_error_message(seq, get_exception_traceback_str())
|
||||
|
||||
def make_variable_changed_message(self, seq, payload):
|
||||
# notify debugger that value was changed successfully
|
||||
return NetCommand(CMD_RETURN, seq, payload)
|
||||
|
||||
def make_warning_message(self, msg):
|
||||
return self.make_io_message(msg, 2)
|
||||
|
||||
def make_console_message(self, msg):
|
||||
return self.make_io_message(msg, 2)
|
||||
|
||||
def make_io_message(self, msg, ctx):
|
||||
"""
|
||||
@param msg: the message to pass to the debug server
|
||||
@param ctx: 1 for stdio 2 for stderr
|
||||
"""
|
||||
try:
|
||||
msg = pydevd_constants.as_str(msg)
|
||||
|
||||
if len(msg) > MAX_IO_MSG_SIZE:
|
||||
msg = msg[0:MAX_IO_MSG_SIZE]
|
||||
msg += "..."
|
||||
|
||||
msg = pydevd_xml.make_valid_xml_value(quote(msg, "/>_= "))
|
||||
return NetCommand(str(CMD_WRITE_TO_CONSOLE), 0, '<xml><io s="%s" ctx="%s"/></xml>' % (msg, ctx))
|
||||
except:
|
||||
return self.make_error_message(0, get_exception_traceback_str())
|
||||
|
||||
def make_version_message(self, seq):
|
||||
try:
|
||||
return NetCommand(CMD_VERSION, seq, VERSION_STRING)
|
||||
except:
|
||||
return self.make_error_message(seq, get_exception_traceback_str())
|
||||
|
||||
def make_thread_killed_message(self, tid):
|
||||
self._additional_thread_id_to_thread_name.pop(tid, None)
|
||||
try:
|
||||
return NetCommand(CMD_THREAD_KILL, 0, str(tid))
|
||||
except:
|
||||
return self.make_error_message(0, get_exception_traceback_str())
|
||||
|
||||
def _iter_visible_frames_info(self, py_db, frames_list, flatten_chained=False):
|
||||
assert frames_list.__class__ == FramesList
|
||||
is_chained = False
|
||||
while True:
|
||||
for frame in frames_list:
|
||||
show_as_current_frame = frame is frames_list.current_frame
|
||||
if frame.f_code is None:
|
||||
pydev_log.info("Frame without f_code: %s", frame)
|
||||
continue # IronPython sometimes does not have it!
|
||||
|
||||
method_name = frame.f_code.co_name # method name (if in method) or ? if global
|
||||
if method_name is None:
|
||||
pydev_log.info("Frame without co_name: %s", frame)
|
||||
continue # IronPython sometimes does not have it!
|
||||
|
||||
if is_chained:
|
||||
method_name = "[Chained Exc: %s] %s" % (frames_list.exc_desc, method_name)
|
||||
|
||||
abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame)
|
||||
if py_db.get_file_type(frame, abs_path_real_path_and_base) == py_db.PYDEV_FILE:
|
||||
# Skip pydevd files.
|
||||
frame = frame.f_back
|
||||
continue
|
||||
|
||||
frame_id = id(frame)
|
||||
lineno = frames_list.frame_id_to_lineno.get(frame_id, frame.f_lineno)
|
||||
line_col_info = frames_list.frame_id_to_line_col_info.get(frame_id)
|
||||
|
||||
filename_in_utf8, lineno, changed = py_db.source_mapping.map_to_client(abs_path_real_path_and_base[0], lineno)
|
||||
new_filename_in_utf8, applied_mapping = pydevd_file_utils.map_file_to_client(filename_in_utf8)
|
||||
applied_mapping = applied_mapping or changed
|
||||
|
||||
yield (
|
||||
frame_id,
|
||||
frame,
|
||||
method_name,
|
||||
abs_path_real_path_and_base[0],
|
||||
new_filename_in_utf8,
|
||||
lineno,
|
||||
applied_mapping,
|
||||
show_as_current_frame,
|
||||
line_col_info,
|
||||
)
|
||||
|
||||
if not flatten_chained:
|
||||
break
|
||||
|
||||
frames_list = frames_list.chained_frames_list
|
||||
if frames_list is None or len(frames_list) == 0:
|
||||
break
|
||||
is_chained = True
|
||||
|
||||
def make_thread_stack_str(self, py_db, frames_list):
|
||||
assert frames_list.__class__ == FramesList
|
||||
make_valid_xml_value = pydevd_xml.make_valid_xml_value
|
||||
cmd_text_list = []
|
||||
append = cmd_text_list.append
|
||||
|
||||
try:
|
||||
for (
|
||||
frame_id,
|
||||
frame,
|
||||
method_name,
|
||||
_original_filename,
|
||||
filename_in_utf8,
|
||||
lineno,
|
||||
_applied_mapping,
|
||||
_show_as_current_frame,
|
||||
line_col_info,
|
||||
) in self._iter_visible_frames_info(py_db, frames_list, flatten_chained=True):
|
||||
# print("file is ", filename_in_utf8)
|
||||
# print("line is ", lineno)
|
||||
|
||||
# Note: variables are all gotten 'on-demand'.
|
||||
append('<frame id="%s" name="%s" ' % (frame_id, make_valid_xml_value(method_name)))
|
||||
append('file="%s" line="%s">' % (quote(make_valid_xml_value(filename_in_utf8), "/>_= \t"), lineno))
|
||||
append("</frame>")
|
||||
except:
|
||||
pydev_log.exception()
|
||||
|
||||
return "".join(cmd_text_list)
|
||||
|
||||
def make_thread_suspend_str(
|
||||
self,
|
||||
py_db,
|
||||
thread_id,
|
||||
frames_list,
|
||||
stop_reason=None,
|
||||
message=None,
|
||||
trace_suspend_type="trace",
|
||||
):
|
||||
"""
|
||||
:return tuple(str,str):
|
||||
Returns tuple(thread_suspended_str, thread_stack_str).
|
||||
|
||||
i.e.:
|
||||
(
|
||||
'''
|
||||
<xml>
|
||||
<thread id="id" stop_reason="reason">
|
||||
<frame id="id" name="functionName " file="file" line="line">
|
||||
</frame>
|
||||
</thread>
|
||||
</xml>
|
||||
'''
|
||||
,
|
||||
'''
|
||||
<frame id="id" name="functionName " file="file" line="line">
|
||||
</frame>
|
||||
'''
|
||||
)
|
||||
"""
|
||||
assert frames_list.__class__ == FramesList
|
||||
make_valid_xml_value = pydevd_xml.make_valid_xml_value
|
||||
cmd_text_list = []
|
||||
append = cmd_text_list.append
|
||||
|
||||
cmd_text_list.append("<xml>")
|
||||
if message:
|
||||
message = make_valid_xml_value(message)
|
||||
|
||||
append('<thread id="%s"' % (thread_id,))
|
||||
if stop_reason is not None:
|
||||
append(' stop_reason="%s"' % (stop_reason,))
|
||||
if message is not None:
|
||||
append(' message="%s"' % (message,))
|
||||
if trace_suspend_type is not None:
|
||||
append(' suspend_type="%s"' % (trace_suspend_type,))
|
||||
append(">")
|
||||
thread_stack_str = self.make_thread_stack_str(py_db, frames_list)
|
||||
append(thread_stack_str)
|
||||
append("</thread></xml>")
|
||||
|
||||
return "".join(cmd_text_list), thread_stack_str
|
||||
|
||||
def make_thread_suspend_message(self, py_db, thread_id, frames_list, stop_reason, message, trace_suspend_type, thread, additional_info):
|
||||
try:
|
||||
thread_suspend_str, thread_stack_str = self.make_thread_suspend_str(
|
||||
py_db, thread_id, frames_list, stop_reason, message, trace_suspend_type
|
||||
)
|
||||
cmd = NetCommand(CMD_THREAD_SUSPEND, 0, thread_suspend_str)
|
||||
cmd.thread_stack_str = thread_stack_str
|
||||
cmd.thread_suspend_str = thread_suspend_str
|
||||
return cmd
|
||||
except:
|
||||
return self.make_error_message(0, get_exception_traceback_str())
|
||||
|
||||
def make_thread_suspend_single_notification(self, py_db, thread_id, thread, stop_reason):
|
||||
try:
|
||||
return NetCommand(CMD_THREAD_SUSPEND_SINGLE_NOTIFICATION, 0, json.dumps({"thread_id": thread_id, "stop_reason": stop_reason}))
|
||||
except:
|
||||
return self.make_error_message(0, get_exception_traceback_str())
|
||||
|
||||
def make_thread_resume_single_notification(self, thread_id):
|
||||
try:
|
||||
return NetCommand(CMD_THREAD_RESUME_SINGLE_NOTIFICATION, 0, json.dumps({"thread_id": thread_id}))
|
||||
except:
|
||||
return self.make_error_message(0, get_exception_traceback_str())
|
||||
|
||||
def make_thread_run_message(self, py_db, thread_id, reason):
|
||||
try:
|
||||
return NetCommand(CMD_THREAD_RUN, 0, "%s\t%s" % (thread_id, reason))
|
||||
except:
|
||||
return self.make_error_message(0, get_exception_traceback_str())
|
||||
|
||||
def make_get_variable_message(self, seq, payload):
|
||||
try:
|
||||
return NetCommand(CMD_GET_VARIABLE, seq, payload)
|
||||
except Exception:
|
||||
return self.make_error_message(seq, get_exception_traceback_str())
|
||||
|
||||
def make_get_array_message(self, seq, payload):
|
||||
try:
|
||||
return NetCommand(CMD_GET_ARRAY, seq, payload)
|
||||
except Exception:
|
||||
return self.make_error_message(seq, get_exception_traceback_str())
|
||||
|
||||
def make_get_description_message(self, seq, payload):
|
||||
try:
|
||||
return NetCommand(CMD_GET_DESCRIPTION, seq, payload)
|
||||
except Exception:
|
||||
return self.make_error_message(seq, get_exception_traceback_str())
|
||||
|
||||
def make_get_frame_message(self, seq, payload):
|
||||
try:
|
||||
return NetCommand(CMD_GET_FRAME, seq, payload)
|
||||
except Exception:
|
||||
return self.make_error_message(seq, get_exception_traceback_str())
|
||||
|
||||
def make_evaluate_expression_message(self, seq, payload):
|
||||
try:
|
||||
return NetCommand(CMD_EVALUATE_EXPRESSION, seq, payload)
|
||||
except Exception:
|
||||
return self.make_error_message(seq, get_exception_traceback_str())
|
||||
|
||||
def make_get_completions_message(self, seq, completions, qualifier, start):
|
||||
try:
|
||||
payload = completions_to_xml(completions)
|
||||
return NetCommand(CMD_GET_COMPLETIONS, seq, payload)
|
||||
except Exception:
|
||||
return self.make_error_message(seq, get_exception_traceback_str())
|
||||
|
||||
def make_get_file_contents(self, seq, payload):
|
||||
try:
|
||||
return NetCommand(CMD_GET_FILE_CONTENTS, seq, payload)
|
||||
except Exception:
|
||||
return self.make_error_message(seq, get_exception_traceback_str())
|
||||
|
||||
def make_reloaded_code_message(self, seq, reloaded_ok):
|
||||
try:
|
||||
return NetCommand(CMD_RELOAD_CODE, seq, '<xml><reloaded ok="%s"></reloaded></xml>' % reloaded_ok)
|
||||
except Exception:
|
||||
return self.make_error_message(seq, get_exception_traceback_str())
|
||||
|
||||
def make_send_breakpoint_exception_message(self, seq, payload):
|
||||
try:
|
||||
return NetCommand(CMD_GET_BREAKPOINT_EXCEPTION, seq, payload)
|
||||
except Exception:
|
||||
return self.make_error_message(seq, get_exception_traceback_str())
|
||||
|
||||
def _make_send_curr_exception_trace_str(self, py_db, thread_id, exc_type, exc_desc, trace_obj):
|
||||
frames_list = pydevd_frame_utils.create_frames_list_from_traceback(trace_obj, None, exc_type, exc_desc)
|
||||
|
||||
exc_type = pydevd_xml.make_valid_xml_value(str(exc_type)).replace("\t", " ") or "exception: type unknown"
|
||||
exc_desc = pydevd_xml.make_valid_xml_value(str(exc_desc)).replace("\t", " ") or "exception: no description"
|
||||
|
||||
thread_suspend_str, thread_stack_str = self.make_thread_suspend_str(
|
||||
py_db, thread_id, frames_list, CMD_SEND_CURR_EXCEPTION_TRACE, ""
|
||||
)
|
||||
return exc_type, exc_desc, thread_suspend_str, thread_stack_str
|
||||
|
||||
def make_send_curr_exception_trace_message(self, py_db, seq, thread_id, curr_frame_id, exc_type, exc_desc, trace_obj):
|
||||
try:
|
||||
exc_type, exc_desc, thread_suspend_str, _thread_stack_str = self._make_send_curr_exception_trace_str(
|
||||
py_db, thread_id, exc_type, exc_desc, trace_obj
|
||||
)
|
||||
payload = str(curr_frame_id) + "\t" + exc_type + "\t" + exc_desc + "\t" + thread_suspend_str
|
||||
return NetCommand(CMD_SEND_CURR_EXCEPTION_TRACE, seq, payload)
|
||||
except Exception:
|
||||
return self.make_error_message(seq, get_exception_traceback_str())
|
||||
|
||||
def make_get_exception_details_message(self, py_db, seq, thread_id, topmost_frame):
|
||||
"""Returns exception details as XML"""
|
||||
try:
|
||||
# If the debugger is not suspended, just return the thread and its id.
|
||||
cmd_text = ['<xml><thread id="%s" ' % (thread_id,)]
|
||||
|
||||
if topmost_frame is not None:
|
||||
try:
|
||||
frame = topmost_frame
|
||||
topmost_frame = None
|
||||
while frame is not None:
|
||||
if frame.f_code.co_name == "do_wait_suspend" and frame.f_code.co_filename.endswith("pydevd.py"):
|
||||
arg = frame.f_locals.get("arg", None)
|
||||
if arg is not None:
|
||||
exc_type, exc_desc, _thread_suspend_str, thread_stack_str = self._make_send_curr_exception_trace_str(
|
||||
py_db, thread_id, *arg
|
||||
)
|
||||
cmd_text.append('exc_type="%s" ' % (exc_type,))
|
||||
cmd_text.append('exc_desc="%s" ' % (exc_desc,))
|
||||
cmd_text.append(">")
|
||||
cmd_text.append(thread_stack_str)
|
||||
break
|
||||
frame = frame.f_back
|
||||
else:
|
||||
cmd_text.append(">")
|
||||
finally:
|
||||
frame = None
|
||||
cmd_text.append("</thread></xml>")
|
||||
return NetCommand(CMD_GET_EXCEPTION_DETAILS, seq, "".join(cmd_text))
|
||||
except:
|
||||
return self.make_error_message(seq, get_exception_traceback_str())
|
||||
|
||||
def make_send_curr_exception_trace_proceeded_message(self, seq, thread_id):
|
||||
try:
|
||||
return NetCommand(CMD_SEND_CURR_EXCEPTION_TRACE_PROCEEDED, 0, str(thread_id))
|
||||
except:
|
||||
return self.make_error_message(0, get_exception_traceback_str())
|
||||
|
||||
def make_send_console_message(self, seq, payload):
|
||||
try:
|
||||
return NetCommand(CMD_EVALUATE_CONSOLE_EXPRESSION, seq, payload)
|
||||
except Exception:
|
||||
return self.make_error_message(seq, get_exception_traceback_str())
|
||||
|
||||
def make_custom_operation_message(self, seq, payload):
|
||||
try:
|
||||
return NetCommand(CMD_RUN_CUSTOM_OPERATION, seq, payload)
|
||||
except Exception:
|
||||
return self.make_error_message(seq, get_exception_traceback_str())
|
||||
|
||||
def make_load_source_message(self, seq, source):
|
||||
return NetCommand(CMD_LOAD_SOURCE, seq, source)
|
||||
|
||||
def make_load_source_from_frame_id_message(self, seq, source):
|
||||
return NetCommand(CMD_LOAD_SOURCE_FROM_FRAME_ID, seq, source)
|
||||
|
||||
def make_show_console_message(self, py_db, thread_id, frame):
|
||||
try:
|
||||
frames_list = pydevd_frame_utils.create_frames_list_from_frame(frame)
|
||||
thread_suspended_str, _thread_stack_str = self.make_thread_suspend_str(py_db, thread_id, frames_list, CMD_SHOW_CONSOLE, "")
|
||||
return NetCommand(CMD_SHOW_CONSOLE, 0, thread_suspended_str)
|
||||
except:
|
||||
return self.make_error_message(0, get_exception_traceback_str())
|
||||
|
||||
def make_input_requested_message(self, started):
|
||||
try:
|
||||
return NetCommand(CMD_INPUT_REQUESTED, 0, str(started))
|
||||
except:
|
||||
return self.make_error_message(0, get_exception_traceback_str())
|
||||
|
||||
def make_set_next_stmnt_status_message(self, seq, is_success, exception_msg):
|
||||
try:
|
||||
message = str(is_success) + "\t" + exception_msg
|
||||
return NetCommand(CMD_SET_NEXT_STATEMENT, int(seq), message)
|
||||
except:
|
||||
return self.make_error_message(0, get_exception_traceback_str())
|
||||
|
||||
def make_load_full_value_message(self, seq, payload):
|
||||
try:
|
||||
return NetCommand(CMD_LOAD_FULL_VALUE, seq, payload)
|
||||
except Exception:
|
||||
return self.make_error_message(seq, get_exception_traceback_str())
|
||||
|
||||
def make_get_next_statement_targets_message(self, seq, payload):
|
||||
try:
|
||||
return NetCommand(CMD_GET_NEXT_STATEMENT_TARGETS, seq, payload)
|
||||
except Exception:
|
||||
return self.make_error_message(seq, get_exception_traceback_str())
|
||||
|
||||
def make_skipped_step_in_because_of_filters(self, py_db, frame):
|
||||
return NULL_NET_COMMAND # Not a part of the xml protocol
|
||||
|
||||
def make_evaluation_timeout_msg(self, py_db, expression, thread):
|
||||
msg = """pydevd: Evaluating: %s did not finish after %.2f seconds.
|
||||
This may mean a number of things:
|
||||
- This evaluation is really slow and this is expected.
|
||||
In this case it's possible to silence this error by raising the timeout, setting the
|
||||
PYDEVD_WARN_EVALUATION_TIMEOUT environment variable to a bigger value.
|
||||
|
||||
- The evaluation may need other threads running while it's running:
|
||||
In this case, you may need to manually let other paused threads continue.
|
||||
|
||||
Alternatively, it's also possible to skip breaking on a particular thread by setting a
|
||||
`pydev_do_not_trace = True` attribute in the related threading.Thread instance
|
||||
(if some thread should always be running and no breakpoints are expected to be hit in it).
|
||||
|
||||
- The evaluation is deadlocked:
|
||||
In this case you may set the PYDEVD_THREAD_DUMP_ON_WARN_EVALUATION_TIMEOUT
|
||||
environment variable to true so that a thread dump is shown along with this message and
|
||||
optionally, set the PYDEVD_INTERRUPT_THREAD_TIMEOUT to some value so that the debugger
|
||||
tries to interrupt the evaluation (if possible) when this happens.
|
||||
""" % (expression, pydevd_constants.PYDEVD_WARN_EVALUATION_TIMEOUT)
|
||||
|
||||
if pydevd_constants.PYDEVD_THREAD_DUMP_ON_WARN_EVALUATION_TIMEOUT:
|
||||
stream = StringIO()
|
||||
pydevd_utils.dump_threads(stream, show_pydevd_threads=False)
|
||||
msg += "\n\n%s\n" % stream.getvalue()
|
||||
return self.make_warning_message(msg)
|
||||
|
||||
def make_exit_command(self, py_db):
|
||||
return NULL_EXIT_COMMAND
|
||||
@@ -0,0 +1,208 @@
|
||||
import types
|
||||
|
||||
from _pydev_bundle import pydev_log
|
||||
from typing import Tuple, Literal
|
||||
|
||||
try:
|
||||
from pydevd_plugins import django_debug
|
||||
except:
|
||||
django_debug = None
|
||||
pydev_log.debug("Unable to load django_debug plugin")
|
||||
|
||||
try:
|
||||
from pydevd_plugins import jinja2_debug
|
||||
except:
|
||||
jinja2_debug = None
|
||||
pydev_log.debug("Unable to load jinja2_debug plugin")
|
||||
|
||||
|
||||
def load_plugins():
|
||||
plugins = []
|
||||
if django_debug is not None:
|
||||
plugins.append(django_debug)
|
||||
|
||||
if jinja2_debug is not None:
|
||||
plugins.append(jinja2_debug)
|
||||
return plugins
|
||||
|
||||
|
||||
def bind_func_to_method(func, obj, method_name):
|
||||
bound_method = types.MethodType(func, obj)
|
||||
|
||||
setattr(obj, method_name, bound_method)
|
||||
return bound_method
|
||||
|
||||
|
||||
class PluginManager(object):
|
||||
EMPTY_SENTINEL = object()
|
||||
|
||||
def __init__(self, main_debugger):
|
||||
self.plugins = load_plugins()
|
||||
|
||||
# When some breakpoint is added for a given plugin it becomes active.
|
||||
self.active_plugins = []
|
||||
|
||||
self.main_debugger = main_debugger
|
||||
|
||||
def add_breakpoint(self, func_name, *args, **kwargs):
|
||||
# add breakpoint for plugin
|
||||
for plugin in self.plugins:
|
||||
if hasattr(plugin, func_name):
|
||||
func = getattr(plugin, func_name)
|
||||
result = func(*args, **kwargs)
|
||||
if result:
|
||||
self.activate(plugin)
|
||||
return result
|
||||
return None
|
||||
|
||||
def activate(self, plugin):
|
||||
if plugin not in self.active_plugins:
|
||||
self.active_plugins.append(plugin)
|
||||
|
||||
# These are not a part of the API, rather, `add_breakpoint` should be used with `add_line_breakpoint` or `add_exception_breakpoint`
|
||||
# which will call it for all plugins and then if it's valid it'll be activated.
|
||||
#
|
||||
# def add_line_breakpoint(self, py_db, type, canonical_normalized_filename, breakpoint_id, line, condition, expression, func_name, hit_condition=None, is_logpoint=False, add_breakpoint_result=None, on_changed_breakpoint_state=None):
|
||||
# def add_exception_breakpoint(plugin, py_db, type, exception):
|
||||
|
||||
def after_breakpoints_consolidated(self, py_db, canonical_normalized_filename, id_to_pybreakpoint, file_to_line_to_breakpoints):
|
||||
for plugin in self.active_plugins:
|
||||
plugin.after_breakpoints_consolidated(py_db, canonical_normalized_filename, id_to_pybreakpoint, file_to_line_to_breakpoints)
|
||||
|
||||
def remove_exception_breakpoint(self, py_db, exception_type, exception):
|
||||
"""
|
||||
:param exception_type: 'django', 'jinja2' (can be extended)
|
||||
"""
|
||||
for plugin in self.active_plugins:
|
||||
ret = plugin.remove_exception_breakpoint(py_db, exception_type, exception)
|
||||
if ret:
|
||||
return ret
|
||||
|
||||
return None
|
||||
|
||||
def remove_all_exception_breakpoints(self, py_db):
|
||||
for plugin in self.active_plugins:
|
||||
plugin.remove_all_exception_breakpoints(py_db)
|
||||
|
||||
def get_breakpoints(self, py_db, breakpoint_type):
|
||||
"""
|
||||
:param breakpoint_type: 'django-line', 'jinja2-line'
|
||||
"""
|
||||
for plugin in self.active_plugins:
|
||||
ret = plugin.get_breakpoints(py_db, breakpoint_type)
|
||||
if ret:
|
||||
return ret
|
||||
|
||||
def can_skip(self, py_db, frame):
|
||||
for plugin in self.active_plugins:
|
||||
if not plugin.can_skip(py_db, frame):
|
||||
return False
|
||||
return True
|
||||
|
||||
def required_events_breakpoint(self) -> Tuple[Literal["line", "call"], ...]:
|
||||
ret = ()
|
||||
for plugin in self.active_plugins:
|
||||
new = plugin.required_events_breakpoint()
|
||||
if new:
|
||||
ret += new
|
||||
|
||||
return ret
|
||||
|
||||
def required_events_stepping(self) -> Tuple[Literal["line", "call", "return"], ...]:
|
||||
ret = ()
|
||||
for plugin in self.active_plugins:
|
||||
new = plugin.required_events_stepping()
|
||||
if new:
|
||||
ret += new
|
||||
|
||||
return ret
|
||||
|
||||
def is_tracked_frame(self, frame) -> bool:
|
||||
for plugin in self.active_plugins:
|
||||
if plugin.is_tracked_frame(frame):
|
||||
return True
|
||||
return False
|
||||
|
||||
def has_exception_breaks(self, py_db) -> bool:
|
||||
for plugin in self.active_plugins:
|
||||
if plugin.has_exception_breaks(py_db):
|
||||
return True
|
||||
return False
|
||||
|
||||
def has_line_breaks(self, py_db) -> bool:
|
||||
for plugin in self.active_plugins:
|
||||
if plugin.has_line_breaks(py_db):
|
||||
return True
|
||||
return False
|
||||
|
||||
def cmd_step_into(self, py_db, frame, event, info, thread, stop_info, stop: bool):
|
||||
"""
|
||||
:param stop_info: in/out information. If it should stop then it'll be
|
||||
filled by the plugin.
|
||||
:param stop: whether the stop has already been flagged for this frame.
|
||||
:returns:
|
||||
tuple(stop, plugin_stop)
|
||||
"""
|
||||
plugin_stop = False
|
||||
for plugin in self.active_plugins:
|
||||
stop, plugin_stop = plugin.cmd_step_into(py_db, frame, event, info, thread, stop_info, stop)
|
||||
if plugin_stop:
|
||||
return stop, plugin_stop
|
||||
return stop, plugin_stop
|
||||
|
||||
def cmd_step_over(self, py_db, frame, event, info, thread, stop_info, stop):
|
||||
plugin_stop = False
|
||||
for plugin in self.active_plugins:
|
||||
stop, plugin_stop = plugin.cmd_step_over(py_db, frame, event, info, thread, stop_info, stop)
|
||||
if plugin_stop:
|
||||
return stop, plugin_stop
|
||||
return stop, plugin_stop
|
||||
|
||||
def stop(self, py_db, frame, event, thread, stop_info, arg, step_cmd):
|
||||
"""
|
||||
The way this works is that the `cmd_step_into` or `cmd_step_over`
|
||||
is called which then fills the `stop_info` and then this method
|
||||
is called to do the actual stop.
|
||||
"""
|
||||
for plugin in self.active_plugins:
|
||||
stopped = plugin.stop(py_db, frame, event, thread, stop_info, arg, step_cmd)
|
||||
if stopped:
|
||||
return stopped
|
||||
return False
|
||||
|
||||
def get_breakpoint(self, py_db, frame, event, info):
|
||||
for plugin in self.active_plugins:
|
||||
ret = plugin.get_breakpoint(py_db, frame, event, info)
|
||||
if ret:
|
||||
return ret
|
||||
return None
|
||||
|
||||
def suspend(self, py_db, thread, frame, bp_type):
|
||||
"""
|
||||
:param bp_type: 'django' or 'jinja2'
|
||||
|
||||
:return:
|
||||
The frame for the suspend or None if it should not be suspended.
|
||||
"""
|
||||
for plugin in self.active_plugins:
|
||||
ret = plugin.suspend(py_db, thread, frame, bp_type)
|
||||
if ret is not None:
|
||||
return ret
|
||||
|
||||
return None
|
||||
|
||||
def exception_break(self, py_db, frame, thread, arg, is_unwind=False):
|
||||
for plugin in self.active_plugins:
|
||||
ret = plugin.exception_break(py_db, frame, thread, arg, is_unwind)
|
||||
if ret is not None:
|
||||
return ret
|
||||
|
||||
return None
|
||||
|
||||
def change_variable(self, frame, attr, expression):
|
||||
for plugin in self.active_plugins:
|
||||
ret = plugin.change_variable(frame, attr, expression, self.EMPTY_SENTINEL)
|
||||
if ret is not self.EMPTY_SENTINEL:
|
||||
return ret
|
||||
|
||||
return self.EMPTY_SENTINEL
|
||||
+805
@@ -0,0 +1,805 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from _pydev_bundle import pydev_log
|
||||
from _pydev_bundle.pydev_log import exception as pydev_log_exception
|
||||
from _pydevd_bundle import pydevd_traceproperty, pydevd_dont_trace, pydevd_utils
|
||||
from _pydevd_bundle.pydevd_additional_thread_info import set_additional_thread_info
|
||||
from _pydevd_bundle.pydevd_breakpoints import get_exception_class
|
||||
from _pydevd_bundle.pydevd_comm import (
|
||||
InternalEvaluateConsoleExpression,
|
||||
InternalConsoleGetCompletions,
|
||||
InternalRunCustomOperation,
|
||||
internal_get_next_statement_targets,
|
||||
internal_get_smart_step_into_variants,
|
||||
)
|
||||
from _pydevd_bundle.pydevd_constants import NEXT_VALUE_SEPARATOR, IS_WINDOWS, NULL
|
||||
from _pydevd_bundle.pydevd_comm_constants import ID_TO_MEANING, CMD_EXEC_EXPRESSION, CMD_AUTHENTICATE
|
||||
from _pydevd_bundle.pydevd_api import PyDevdAPI
|
||||
from io import StringIO
|
||||
from _pydevd_bundle.pydevd_net_command import NetCommand
|
||||
from _pydevd_bundle.pydevd_thread_lifecycle import pydevd_find_thread_by_id
|
||||
import pydevd_file_utils
|
||||
|
||||
|
||||
class _PyDevCommandProcessor(object):
|
||||
def __init__(self):
|
||||
self.api = PyDevdAPI()
|
||||
|
||||
def process_net_command(self, py_db, cmd_id, seq, text):
|
||||
"""Processes a command received from the Java side
|
||||
|
||||
@param cmd_id: the id of the command
|
||||
@param seq: the sequence of the command
|
||||
@param text: the text received in the command
|
||||
"""
|
||||
|
||||
# We can only proceed if the client is already authenticated or if it's the
|
||||
# command to authenticate.
|
||||
if cmd_id != CMD_AUTHENTICATE and not py_db.authentication.is_authenticated():
|
||||
cmd = py_db.cmd_factory.make_error_message(seq, "Client not authenticated.")
|
||||
py_db.writer.add_command(cmd)
|
||||
return
|
||||
|
||||
meaning = ID_TO_MEANING[str(cmd_id)]
|
||||
|
||||
# print('Handling %s (%s)' % (meaning, text))
|
||||
|
||||
method_name = meaning.lower()
|
||||
|
||||
on_command = getattr(self, method_name.lower(), None)
|
||||
if on_command is None:
|
||||
# I have no idea what this is all about
|
||||
cmd = py_db.cmd_factory.make_error_message(seq, "unexpected command " + str(cmd_id))
|
||||
py_db.writer.add_command(cmd)
|
||||
return
|
||||
|
||||
lock = py_db._main_lock
|
||||
if method_name == "cmd_thread_dump_to_stderr":
|
||||
# We can skip the main debugger locks for cases where we know it's not needed.
|
||||
lock = NULL
|
||||
|
||||
with lock:
|
||||
try:
|
||||
cmd = on_command(py_db, cmd_id, seq, text)
|
||||
if cmd is not None:
|
||||
py_db.writer.add_command(cmd)
|
||||
except:
|
||||
if traceback is not None and sys is not None and pydev_log_exception is not None:
|
||||
pydev_log_exception()
|
||||
|
||||
stream = StringIO()
|
||||
traceback.print_exc(file=stream)
|
||||
cmd = py_db.cmd_factory.make_error_message(
|
||||
seq,
|
||||
"Unexpected exception in process_net_command.\nInitial params: %s. Exception: %s"
|
||||
% (((cmd_id, seq, text), stream.getvalue())),
|
||||
)
|
||||
if cmd is not None:
|
||||
py_db.writer.add_command(cmd)
|
||||
|
||||
def cmd_authenticate(self, py_db, cmd_id, seq, text):
|
||||
access_token = text
|
||||
py_db.authentication.login(access_token)
|
||||
if py_db.authentication.is_authenticated():
|
||||
return NetCommand(cmd_id, seq, py_db.authentication.client_access_token)
|
||||
|
||||
return py_db.cmd_factory.make_error_message(seq, "Client not authenticated.")
|
||||
|
||||
def cmd_run(self, py_db, cmd_id, seq, text):
|
||||
return self.api.run(py_db)
|
||||
|
||||
def cmd_list_threads(self, py_db, cmd_id, seq, text):
|
||||
return self.api.list_threads(py_db, seq)
|
||||
|
||||
def cmd_get_completions(self, py_db, cmd_id, seq, text):
|
||||
# we received some command to get a variable
|
||||
# the text is: thread_id\tframe_id\tactivation token
|
||||
thread_id, frame_id, _scope, act_tok = text.split("\t", 3)
|
||||
|
||||
return self.api.request_completions(py_db, seq, thread_id, frame_id, act_tok)
|
||||
|
||||
def cmd_get_thread_stack(self, py_db, cmd_id, seq, text):
|
||||
# Receives a thread_id and a given timeout, which is the time we should
|
||||
# wait to the provide the stack if a given thread is still not suspended.
|
||||
if "\t" in text:
|
||||
thread_id, timeout = text.split("\t")
|
||||
timeout = float(timeout)
|
||||
else:
|
||||
thread_id = text
|
||||
timeout = 0.5 # Default timeout is .5 seconds
|
||||
|
||||
return self.api.request_stack(py_db, seq, thread_id, fmt={}, timeout=timeout)
|
||||
|
||||
def cmd_set_protocol(self, py_db, cmd_id, seq, text):
|
||||
return self.api.set_protocol(py_db, seq, text.strip())
|
||||
|
||||
def cmd_thread_suspend(self, py_db, cmd_id, seq, text):
|
||||
return self.api.request_suspend_thread(py_db, text.strip())
|
||||
|
||||
def cmd_version(self, py_db, cmd_id, seq, text):
|
||||
# Default based on server process (although ideally the IDE should
|
||||
# provide it).
|
||||
if IS_WINDOWS:
|
||||
ide_os = "WINDOWS"
|
||||
else:
|
||||
ide_os = "UNIX"
|
||||
|
||||
# Breakpoints can be grouped by 'LINE' or by 'ID'.
|
||||
breakpoints_by = "LINE"
|
||||
|
||||
splitted = text.split("\t")
|
||||
if len(splitted) == 1:
|
||||
_local_version = splitted
|
||||
|
||||
elif len(splitted) == 2:
|
||||
_local_version, ide_os = splitted
|
||||
|
||||
elif len(splitted) == 3:
|
||||
_local_version, ide_os, breakpoints_by = splitted
|
||||
|
||||
version_msg = self.api.set_ide_os_and_breakpoints_by(py_db, seq, ide_os, breakpoints_by)
|
||||
|
||||
# Enable thread notifications after the version command is completed.
|
||||
self.api.set_enable_thread_notifications(py_db, True)
|
||||
|
||||
return version_msg
|
||||
|
||||
def cmd_thread_run(self, py_db, cmd_id, seq, text):
|
||||
return self.api.request_resume_thread(text.strip())
|
||||
|
||||
def _cmd_step(self, py_db, cmd_id, seq, text):
|
||||
return self.api.request_step(py_db, text.strip(), cmd_id)
|
||||
|
||||
cmd_step_into = _cmd_step
|
||||
cmd_step_into_my_code = _cmd_step
|
||||
cmd_step_over = _cmd_step
|
||||
cmd_step_over_my_code = _cmd_step
|
||||
cmd_step_return = _cmd_step
|
||||
cmd_step_return_my_code = _cmd_step
|
||||
|
||||
def _cmd_set_next(self, py_db, cmd_id, seq, text):
|
||||
thread_id, line, func_name = text.split("\t", 2)
|
||||
return self.api.request_set_next(py_db, seq, thread_id, cmd_id, None, line, func_name)
|
||||
|
||||
cmd_run_to_line = _cmd_set_next
|
||||
cmd_set_next_statement = _cmd_set_next
|
||||
|
||||
def cmd_smart_step_into(self, py_db, cmd_id, seq, text):
|
||||
thread_id, line_or_bytecode_offset, func_name = text.split("\t", 2)
|
||||
if line_or_bytecode_offset.startswith("offset="):
|
||||
# In this case we request the smart step into to stop given the parent frame
|
||||
# and the location of the parent frame bytecode offset and not just the func_name
|
||||
# (this implies that `CMD_GET_SMART_STEP_INTO_VARIANTS` was previously used
|
||||
# to know what are the valid stop points).
|
||||
|
||||
temp = line_or_bytecode_offset[len("offset=") :]
|
||||
if ";" in temp:
|
||||
offset, child_offset = temp.split(";")
|
||||
offset = int(offset)
|
||||
child_offset = int(child_offset)
|
||||
else:
|
||||
child_offset = -1
|
||||
offset = int(temp)
|
||||
return self.api.request_smart_step_into(py_db, seq, thread_id, offset, child_offset)
|
||||
else:
|
||||
# If the offset wasn't passed, just use the line/func_name to do the stop.
|
||||
return self.api.request_smart_step_into_by_func_name(py_db, seq, thread_id, line_or_bytecode_offset, func_name)
|
||||
|
||||
def cmd_reload_code(self, py_db, cmd_id, seq, text):
|
||||
text = text.strip()
|
||||
if "\t" not in text:
|
||||
module_name = text.strip()
|
||||
filename = None
|
||||
else:
|
||||
module_name, filename = text.split("\t", 1)
|
||||
self.api.request_reload_code(py_db, seq, module_name, filename)
|
||||
|
||||
def cmd_change_variable(self, py_db, cmd_id, seq, text):
|
||||
# the text is: thread\tstackframe\tFRAME|GLOBAL\tattribute_to_change\tvalue_to_change
|
||||
thread_id, frame_id, scope, attr_and_value = text.split("\t", 3)
|
||||
|
||||
tab_index = attr_and_value.rindex("\t")
|
||||
attr = attr_and_value[0:tab_index].replace("\t", ".")
|
||||
value = attr_and_value[tab_index + 1 :]
|
||||
self.api.request_change_variable(py_db, seq, thread_id, frame_id, scope, attr, value)
|
||||
|
||||
def cmd_get_variable(self, py_db, cmd_id, seq, text):
|
||||
# we received some command to get a variable
|
||||
# the text is: thread_id\tframe_id\tFRAME|GLOBAL\tattributes*
|
||||
thread_id, frame_id, scopeattrs = text.split("\t", 2)
|
||||
|
||||
if scopeattrs.find("\t") != -1: # there are attributes beyond scope
|
||||
scope, attrs = scopeattrs.split("\t", 1)
|
||||
else:
|
||||
scope, attrs = (scopeattrs, None)
|
||||
|
||||
self.api.request_get_variable(py_db, seq, thread_id, frame_id, scope, attrs)
|
||||
|
||||
def cmd_get_array(self, py_db, cmd_id, seq, text):
|
||||
# Note: untested and unused in pydev
|
||||
# we received some command to get an array variable
|
||||
# the text is: thread_id\tframe_id\tFRAME|GLOBAL\tname\ttemp\troffs\tcoffs\trows\tcols\tformat
|
||||
roffset, coffset, rows, cols, format, thread_id, frame_id, scopeattrs = text.split("\t", 7)
|
||||
|
||||
if scopeattrs.find("\t") != -1: # there are attributes beyond scope
|
||||
scope, attrs = scopeattrs.split("\t", 1)
|
||||
else:
|
||||
scope, attrs = (scopeattrs, None)
|
||||
|
||||
self.api.request_get_array(py_db, seq, roffset, coffset, rows, cols, format, thread_id, frame_id, scope, attrs)
|
||||
|
||||
def cmd_show_return_values(self, py_db, cmd_id, seq, text):
|
||||
show_return_values = text.split("\t")[1]
|
||||
self.api.set_show_return_values(py_db, int(show_return_values) == 1)
|
||||
|
||||
def cmd_load_full_value(self, py_db, cmd_id, seq, text):
|
||||
# Note: untested and unused in pydev
|
||||
thread_id, frame_id, scopeattrs = text.split("\t", 2)
|
||||
vars = scopeattrs.split(NEXT_VALUE_SEPARATOR)
|
||||
|
||||
self.api.request_load_full_value(py_db, seq, thread_id, frame_id, vars)
|
||||
|
||||
def cmd_get_description(self, py_db, cmd_id, seq, text):
|
||||
# Note: untested and unused in pydev
|
||||
thread_id, frame_id, expression = text.split("\t", 2)
|
||||
self.api.request_get_description(py_db, seq, thread_id, frame_id, expression)
|
||||
|
||||
def cmd_get_frame(self, py_db, cmd_id, seq, text):
|
||||
thread_id, frame_id, scope = text.split("\t", 2)
|
||||
self.api.request_get_frame(py_db, seq, thread_id, frame_id)
|
||||
|
||||
def cmd_set_break(self, py_db, cmd_id, seq, text):
|
||||
# func name: 'None': match anything. Empty: match global, specified: only method context.
|
||||
# command to add some breakpoint.
|
||||
# text is filename\tline. Add to breakpoints dictionary
|
||||
suspend_policy = "NONE" # Can be 'NONE' or 'ALL'
|
||||
is_logpoint = False
|
||||
hit_condition = None
|
||||
if py_db._set_breakpoints_with_id:
|
||||
try:
|
||||
try:
|
||||
(
|
||||
breakpoint_id,
|
||||
btype,
|
||||
filename,
|
||||
line,
|
||||
func_name,
|
||||
condition,
|
||||
expression,
|
||||
hit_condition,
|
||||
is_logpoint,
|
||||
suspend_policy,
|
||||
) = text.split("\t", 9)
|
||||
except ValueError: # not enough values to unpack
|
||||
# No suspend_policy passed (use default).
|
||||
breakpoint_id, btype, filename, line, func_name, condition, expression, hit_condition, is_logpoint = text.split("\t", 8)
|
||||
is_logpoint = is_logpoint == "True"
|
||||
except ValueError: # not enough values to unpack
|
||||
breakpoint_id, btype, filename, line, func_name, condition, expression = text.split("\t", 6)
|
||||
|
||||
breakpoint_id = int(breakpoint_id)
|
||||
line = int(line)
|
||||
|
||||
# We must restore new lines and tabs as done in
|
||||
# AbstractDebugTarget.breakpointAdded
|
||||
condition = condition.replace("@_@NEW_LINE_CHAR@_@", "\n").replace("@_@TAB_CHAR@_@", "\t").strip()
|
||||
|
||||
expression = expression.replace("@_@NEW_LINE_CHAR@_@", "\n").replace("@_@TAB_CHAR@_@", "\t").strip()
|
||||
else:
|
||||
# Note: this else should be removed after PyCharm migrates to setting
|
||||
# breakpoints by id (and ideally also provides func_name).
|
||||
btype, filename, line, func_name, suspend_policy, condition, expression = text.split("\t", 6)
|
||||
# If we don't have an id given for each breakpoint, consider
|
||||
# the id to be the line.
|
||||
breakpoint_id = line = int(line)
|
||||
|
||||
condition = condition.replace("@_@NEW_LINE_CHAR@_@", "\n").replace("@_@TAB_CHAR@_@", "\t").strip()
|
||||
|
||||
expression = expression.replace("@_@NEW_LINE_CHAR@_@", "\n").replace("@_@TAB_CHAR@_@", "\t").strip()
|
||||
|
||||
if condition is not None and (len(condition) <= 0 or condition == "None"):
|
||||
condition = None
|
||||
|
||||
if expression is not None and (len(expression) <= 0 or expression == "None"):
|
||||
expression = None
|
||||
|
||||
if hit_condition is not None and (len(hit_condition) <= 0 or hit_condition == "None"):
|
||||
hit_condition = None
|
||||
|
||||
def on_changed_breakpoint_state(breakpoint_id, add_breakpoint_result):
|
||||
error_code = add_breakpoint_result.error_code
|
||||
|
||||
translated_line = add_breakpoint_result.translated_line
|
||||
translated_filename = add_breakpoint_result.translated_filename
|
||||
msg = ""
|
||||
if error_code:
|
||||
if error_code == self.api.ADD_BREAKPOINT_FILE_NOT_FOUND:
|
||||
msg = "pydev debugger: Trying to add breakpoint to file that does not exist: %s (will have no effect).\n" % (
|
||||
translated_filename,
|
||||
)
|
||||
|
||||
elif error_code == self.api.ADD_BREAKPOINT_FILE_EXCLUDED_BY_FILTERS:
|
||||
msg = "pydev debugger: Trying to add breakpoint to file that is excluded by filters: %s (will have no effect).\n" % (
|
||||
translated_filename,
|
||||
)
|
||||
|
||||
elif error_code == self.api.ADD_BREAKPOINT_LAZY_VALIDATION:
|
||||
msg = "" # Ignore this here (if/when loaded, it'll call on_changed_breakpoint_state again accordingly).
|
||||
|
||||
elif error_code == self.api.ADD_BREAKPOINT_INVALID_LINE:
|
||||
msg = "pydev debugger: Trying to add breakpoint to line (%s) that is not valid in: %s.\n" % (
|
||||
translated_line,
|
||||
translated_filename,
|
||||
)
|
||||
|
||||
else:
|
||||
# Shouldn't get here.
|
||||
msg = "pydev debugger: Breakpoint not validated (reason unknown -- please report as error): %s (%s).\n" % (
|
||||
translated_filename,
|
||||
translated_line,
|
||||
)
|
||||
|
||||
else:
|
||||
if add_breakpoint_result.original_line != translated_line:
|
||||
msg = "pydev debugger (info): Breakpoint in line: %s moved to line: %s (in %s).\n" % (
|
||||
add_breakpoint_result.original_line,
|
||||
translated_line,
|
||||
translated_filename,
|
||||
)
|
||||
|
||||
if msg:
|
||||
py_db.writer.add_command(py_db.cmd_factory.make_warning_message(msg))
|
||||
|
||||
result = self.api.add_breakpoint(
|
||||
py_db,
|
||||
self.api.filename_to_str(filename),
|
||||
btype,
|
||||
breakpoint_id,
|
||||
line,
|
||||
condition,
|
||||
func_name,
|
||||
expression,
|
||||
suspend_policy,
|
||||
hit_condition,
|
||||
is_logpoint,
|
||||
on_changed_breakpoint_state=on_changed_breakpoint_state,
|
||||
)
|
||||
|
||||
on_changed_breakpoint_state(breakpoint_id, result)
|
||||
|
||||
def cmd_remove_break(self, py_db, cmd_id, seq, text):
|
||||
# command to remove some breakpoint
|
||||
# text is type\file\tid. Remove from breakpoints dictionary
|
||||
breakpoint_type, filename, breakpoint_id = text.split("\t", 2)
|
||||
|
||||
filename = self.api.filename_to_str(filename)
|
||||
|
||||
try:
|
||||
breakpoint_id = int(breakpoint_id)
|
||||
except ValueError:
|
||||
pydev_log.critical("Error removing breakpoint. Expected breakpoint_id to be an int. Found: %s", breakpoint_id)
|
||||
|
||||
else:
|
||||
self.api.remove_breakpoint(py_db, filename, breakpoint_type, breakpoint_id)
|
||||
|
||||
def _cmd_exec_or_evaluate_expression(self, py_db, cmd_id, seq, text):
|
||||
# command to evaluate the given expression
|
||||
# text is: thread\tstackframe\tLOCAL\texpression
|
||||
attr_to_set_result = ""
|
||||
try:
|
||||
thread_id, frame_id, scope, expression, trim, attr_to_set_result = text.split("\t", 5)
|
||||
except ValueError:
|
||||
thread_id, frame_id, scope, expression, trim = text.split("\t", 4)
|
||||
is_exec = cmd_id == CMD_EXEC_EXPRESSION
|
||||
trim_if_too_big = int(trim) == 1
|
||||
|
||||
self.api.request_exec_or_evaluate(py_db, seq, thread_id, frame_id, expression, is_exec, trim_if_too_big, attr_to_set_result)
|
||||
|
||||
cmd_evaluate_expression = _cmd_exec_or_evaluate_expression
|
||||
cmd_exec_expression = _cmd_exec_or_evaluate_expression
|
||||
|
||||
def cmd_console_exec(self, py_db, cmd_id, seq, text):
|
||||
# command to exec expression in console, in case expression is only partially valid 'False' is returned
|
||||
# text is: thread\tstackframe\tLOCAL\texpression
|
||||
|
||||
thread_id, frame_id, scope, expression = text.split("\t", 3)
|
||||
self.api.request_console_exec(py_db, seq, thread_id, frame_id, expression)
|
||||
|
||||
def cmd_set_path_mapping_json(self, py_db, cmd_id, seq, text):
|
||||
"""
|
||||
:param text:
|
||||
Json text. Something as:
|
||||
|
||||
{
|
||||
"pathMappings": [
|
||||
{
|
||||
"localRoot": "c:/temp",
|
||||
"remoteRoot": "/usr/temp"
|
||||
}
|
||||
],
|
||||
"debug": true,
|
||||
"force": false
|
||||
}
|
||||
"""
|
||||
as_json = json.loads(text)
|
||||
force = as_json.get("force", False)
|
||||
|
||||
path_mappings = []
|
||||
for pathMapping in as_json.get("pathMappings", []):
|
||||
localRoot = pathMapping.get("localRoot", "")
|
||||
remoteRoot = pathMapping.get("remoteRoot", "")
|
||||
if (localRoot != "") and (remoteRoot != ""):
|
||||
path_mappings.append((localRoot, remoteRoot))
|
||||
|
||||
if bool(path_mappings) or force:
|
||||
pydevd_file_utils.setup_client_server_paths(path_mappings)
|
||||
|
||||
debug = as_json.get("debug", False)
|
||||
if debug or force:
|
||||
pydevd_file_utils.DEBUG_CLIENT_SERVER_TRANSLATION = debug
|
||||
|
||||
def cmd_set_py_exception_json(self, py_db, cmd_id, seq, text):
|
||||
# This API is optional and works 'in bulk' -- it's possible
|
||||
# to get finer-grained control with CMD_ADD_EXCEPTION_BREAK/CMD_REMOVE_EXCEPTION_BREAK
|
||||
# which allows setting caught/uncaught per exception, although global settings such as:
|
||||
# - skip_on_exceptions_thrown_in_same_context
|
||||
# - ignore_exceptions_thrown_in_lines_with_ignore_exception
|
||||
# must still be set through this API (before anything else as this clears all existing
|
||||
# exception breakpoints).
|
||||
try:
|
||||
py_db.break_on_uncaught_exceptions = {}
|
||||
py_db.break_on_caught_exceptions = {}
|
||||
py_db.break_on_user_uncaught_exceptions = {}
|
||||
|
||||
as_json = json.loads(text)
|
||||
break_on_uncaught = as_json.get("break_on_uncaught", False)
|
||||
break_on_caught = as_json.get("break_on_caught", False)
|
||||
break_on_user_caught = as_json.get("break_on_user_caught", False)
|
||||
py_db.skip_on_exceptions_thrown_in_same_context = as_json.get("skip_on_exceptions_thrown_in_same_context", False)
|
||||
py_db.ignore_exceptions_thrown_in_lines_with_ignore_exception = as_json.get(
|
||||
"ignore_exceptions_thrown_in_lines_with_ignore_exception", False
|
||||
)
|
||||
ignore_libraries = as_json.get("ignore_libraries", False)
|
||||
exception_types = as_json.get("exception_types", [])
|
||||
|
||||
for exception_type in exception_types:
|
||||
if not exception_type:
|
||||
continue
|
||||
|
||||
py_db.add_break_on_exception(
|
||||
exception_type,
|
||||
condition=None,
|
||||
expression=None,
|
||||
notify_on_handled_exceptions=break_on_caught,
|
||||
notify_on_unhandled_exceptions=break_on_uncaught,
|
||||
notify_on_user_unhandled_exceptions=break_on_user_caught,
|
||||
notify_on_first_raise_only=True,
|
||||
ignore_libraries=ignore_libraries,
|
||||
)
|
||||
|
||||
py_db.on_breakpoints_changed()
|
||||
except:
|
||||
pydev_log.exception("Error when setting exception list. Received: %s", text)
|
||||
|
||||
def cmd_set_py_exception(self, py_db, cmd_id, seq, text):
|
||||
# DEPRECATED. Use cmd_set_py_exception_json instead.
|
||||
try:
|
||||
splitted = text.split(";")
|
||||
py_db.break_on_uncaught_exceptions = {}
|
||||
py_db.break_on_caught_exceptions = {}
|
||||
py_db.break_on_user_uncaught_exceptions = {}
|
||||
if len(splitted) >= 5:
|
||||
if splitted[0] == "true":
|
||||
break_on_uncaught = True
|
||||
else:
|
||||
break_on_uncaught = False
|
||||
|
||||
if splitted[1] == "true":
|
||||
break_on_caught = True
|
||||
else:
|
||||
break_on_caught = False
|
||||
|
||||
if splitted[2] == "true":
|
||||
py_db.skip_on_exceptions_thrown_in_same_context = True
|
||||
else:
|
||||
py_db.skip_on_exceptions_thrown_in_same_context = False
|
||||
|
||||
if splitted[3] == "true":
|
||||
py_db.ignore_exceptions_thrown_in_lines_with_ignore_exception = True
|
||||
else:
|
||||
py_db.ignore_exceptions_thrown_in_lines_with_ignore_exception = False
|
||||
|
||||
if splitted[4] == "true":
|
||||
ignore_libraries = True
|
||||
else:
|
||||
ignore_libraries = False
|
||||
|
||||
for exception_type in splitted[5:]:
|
||||
exception_type = exception_type.strip()
|
||||
if not exception_type:
|
||||
continue
|
||||
|
||||
py_db.add_break_on_exception(
|
||||
exception_type,
|
||||
condition=None,
|
||||
expression=None,
|
||||
notify_on_handled_exceptions=break_on_caught,
|
||||
notify_on_unhandled_exceptions=break_on_uncaught,
|
||||
notify_on_user_unhandled_exceptions=False, # TODO (not currently supported in this API).
|
||||
notify_on_first_raise_only=True,
|
||||
ignore_libraries=ignore_libraries,
|
||||
)
|
||||
else:
|
||||
pydev_log.exception("Expected to have at least 5 ';' separated items. Received: %s", text)
|
||||
|
||||
except:
|
||||
pydev_log.exception("Error when setting exception list. Received: %s", text)
|
||||
|
||||
def _load_source(self, py_db, cmd_id, seq, text):
|
||||
filename = text
|
||||
filename = self.api.filename_to_str(filename)
|
||||
self.api.request_load_source(py_db, seq, filename)
|
||||
|
||||
cmd_load_source = _load_source
|
||||
cmd_get_file_contents = _load_source
|
||||
|
||||
def cmd_load_source_from_frame_id(self, py_db, cmd_id, seq, text):
|
||||
frame_id = text
|
||||
self.api.request_load_source_from_frame_id(py_db, seq, frame_id)
|
||||
|
||||
def cmd_set_property_trace(self, py_db, cmd_id, seq, text):
|
||||
# Command which receives whether to trace property getter/setter/deleter
|
||||
# text is feature_state(true/false);disable_getter/disable_setter/disable_deleter
|
||||
if text:
|
||||
splitted = text.split(";")
|
||||
if len(splitted) >= 3:
|
||||
if not py_db.disable_property_trace and splitted[0] == "true":
|
||||
# Replacing property by custom property only when the debugger starts
|
||||
pydevd_traceproperty.replace_builtin_property()
|
||||
py_db.disable_property_trace = True
|
||||
# Enable/Disable tracing of the property getter
|
||||
if splitted[1] == "true":
|
||||
py_db.disable_property_getter_trace = True
|
||||
else:
|
||||
py_db.disable_property_getter_trace = False
|
||||
# Enable/Disable tracing of the property setter
|
||||
if splitted[2] == "true":
|
||||
py_db.disable_property_setter_trace = True
|
||||
else:
|
||||
py_db.disable_property_setter_trace = False
|
||||
# Enable/Disable tracing of the property deleter
|
||||
if splitted[3] == "true":
|
||||
py_db.disable_property_deleter_trace = True
|
||||
else:
|
||||
py_db.disable_property_deleter_trace = False
|
||||
|
||||
def cmd_add_exception_break(self, py_db, cmd_id, seq, text):
|
||||
# Note that this message has some idiosyncrasies...
|
||||
#
|
||||
# notify_on_handled_exceptions can be 0, 1 or 2
|
||||
# 0 means we should not stop on handled exceptions.
|
||||
# 1 means we should stop on handled exceptions showing it on all frames where the exception passes.
|
||||
# 2 means we should stop on handled exceptions but we should only notify about it once.
|
||||
#
|
||||
# To ignore_libraries properly, besides setting ignore_libraries to 1, the IDE_PROJECT_ROOTS environment
|
||||
# variable must be set (so, we'll ignore anything not below IDE_PROJECT_ROOTS) -- this is not ideal as
|
||||
# the environment variable may not be properly set if it didn't start from the debugger (we should
|
||||
# create a custom message for that).
|
||||
#
|
||||
# There are 2 global settings which can only be set in CMD_SET_PY_EXCEPTION. Namely:
|
||||
#
|
||||
# py_db.skip_on_exceptions_thrown_in_same_context
|
||||
# - If True, we should only show the exception in a caller, not where it was first raised.
|
||||
#
|
||||
# py_db.ignore_exceptions_thrown_in_lines_with_ignore_exception
|
||||
# - If True exceptions thrown in lines with '@IgnoreException' will not be shown.
|
||||
|
||||
condition = ""
|
||||
expression = ""
|
||||
if text.find("\t") != -1:
|
||||
try:
|
||||
(
|
||||
exception,
|
||||
condition,
|
||||
expression,
|
||||
notify_on_handled_exceptions,
|
||||
notify_on_unhandled_exceptions,
|
||||
ignore_libraries,
|
||||
) = text.split("\t", 5)
|
||||
except:
|
||||
exception, notify_on_handled_exceptions, notify_on_unhandled_exceptions, ignore_libraries = text.split("\t", 3)
|
||||
else:
|
||||
exception, notify_on_handled_exceptions, notify_on_unhandled_exceptions, ignore_libraries = text, 0, 0, 0
|
||||
|
||||
condition = condition.replace("@_@NEW_LINE_CHAR@_@", "\n").replace("@_@TAB_CHAR@_@", "\t").strip()
|
||||
|
||||
if condition is not None and (len(condition) == 0 or condition == "None"):
|
||||
condition = None
|
||||
|
||||
expression = expression.replace("@_@NEW_LINE_CHAR@_@", "\n").replace("@_@TAB_CHAR@_@", "\t").strip()
|
||||
|
||||
if expression is not None and (len(expression) == 0 or expression == "None"):
|
||||
expression = None
|
||||
|
||||
if exception.find("-") != -1:
|
||||
breakpoint_type, exception = exception.split("-")
|
||||
else:
|
||||
breakpoint_type = "python"
|
||||
|
||||
if breakpoint_type == "python":
|
||||
self.api.add_python_exception_breakpoint(
|
||||
py_db,
|
||||
exception,
|
||||
condition,
|
||||
expression,
|
||||
notify_on_handled_exceptions=int(notify_on_handled_exceptions) > 0,
|
||||
notify_on_unhandled_exceptions=int(notify_on_unhandled_exceptions) == 1,
|
||||
notify_on_user_unhandled_exceptions=0, # TODO (not currently supported in this API).
|
||||
notify_on_first_raise_only=int(notify_on_handled_exceptions) == 2,
|
||||
ignore_libraries=int(ignore_libraries) > 0,
|
||||
)
|
||||
else:
|
||||
self.api.add_plugins_exception_breakpoint(py_db, breakpoint_type, exception)
|
||||
|
||||
def cmd_remove_exception_break(self, py_db, cmd_id, seq, text):
|
||||
exception = text
|
||||
if exception.find("-") != -1:
|
||||
exception_type, exception = exception.split("-")
|
||||
else:
|
||||
exception_type = "python"
|
||||
|
||||
if exception_type == "python":
|
||||
self.api.remove_python_exception_breakpoint(py_db, exception)
|
||||
else:
|
||||
self.api.remove_plugins_exception_breakpoint(py_db, exception_type, exception)
|
||||
|
||||
def cmd_add_django_exception_break(self, py_db, cmd_id, seq, text):
|
||||
self.api.add_plugins_exception_breakpoint(py_db, breakpoint_type="django", exception=text)
|
||||
|
||||
def cmd_remove_django_exception_break(self, py_db, cmd_id, seq, text):
|
||||
self.api.remove_plugins_exception_breakpoint(py_db, exception_type="django", exception=text)
|
||||
|
||||
def cmd_evaluate_console_expression(self, py_db, cmd_id, seq, text):
|
||||
# Command which takes care for the debug console communication
|
||||
if text != "":
|
||||
thread_id, frame_id, console_command = text.split("\t", 2)
|
||||
console_command, line = console_command.split("\t")
|
||||
|
||||
if console_command == "EVALUATE":
|
||||
int_cmd = InternalEvaluateConsoleExpression(seq, thread_id, frame_id, line, buffer_output=True)
|
||||
|
||||
elif console_command == "EVALUATE_UNBUFFERED":
|
||||
int_cmd = InternalEvaluateConsoleExpression(seq, thread_id, frame_id, line, buffer_output=False)
|
||||
|
||||
elif console_command == "GET_COMPLETIONS":
|
||||
int_cmd = InternalConsoleGetCompletions(seq, thread_id, frame_id, line)
|
||||
|
||||
else:
|
||||
raise ValueError("Unrecognized command: %s" % (console_command,))
|
||||
|
||||
py_db.post_internal_command(int_cmd, thread_id)
|
||||
|
||||
def cmd_run_custom_operation(self, py_db, cmd_id, seq, text):
|
||||
# Command which runs a custom operation
|
||||
if text != "":
|
||||
try:
|
||||
location, custom = text.split("||", 1)
|
||||
except:
|
||||
sys.stderr.write("Custom operation now needs a || separator. Found: %s\n" % (text,))
|
||||
raise
|
||||
|
||||
thread_id, frame_id, scopeattrs = location.split("\t", 2)
|
||||
|
||||
if scopeattrs.find("\t") != -1: # there are attributes beyond scope
|
||||
scope, attrs = scopeattrs.split("\t", 1)
|
||||
else:
|
||||
scope, attrs = (scopeattrs, None)
|
||||
|
||||
# : style: EXECFILE or EXEC
|
||||
# : encoded_code_or_file: file to execute or code
|
||||
# : fname: name of function to be executed in the resulting namespace
|
||||
style, encoded_code_or_file, fnname = custom.split("\t", 3)
|
||||
int_cmd = InternalRunCustomOperation(seq, thread_id, frame_id, scope, attrs, style, encoded_code_or_file, fnname)
|
||||
py_db.post_internal_command(int_cmd, thread_id)
|
||||
|
||||
def cmd_ignore_thrown_exception_at(self, py_db, cmd_id, seq, text):
|
||||
if text:
|
||||
replace = "REPLACE:" # Not all 3.x versions support u'REPLACE:', so, doing workaround.
|
||||
if text.startswith(replace):
|
||||
text = text[8:]
|
||||
py_db.filename_to_lines_where_exceptions_are_ignored.clear()
|
||||
|
||||
if text:
|
||||
for line in text.split("||"): # Can be bulk-created (one in each line)
|
||||
original_filename, line_number = line.split("|")
|
||||
original_filename = self.api.filename_to_server(original_filename)
|
||||
|
||||
canonical_normalized_filename = pydevd_file_utils.canonical_normalized_path(original_filename)
|
||||
absolute_filename = pydevd_file_utils.absolute_path(original_filename)
|
||||
|
||||
if os.path.exists(absolute_filename):
|
||||
lines_ignored = py_db.filename_to_lines_where_exceptions_are_ignored.get(canonical_normalized_filename)
|
||||
if lines_ignored is None:
|
||||
lines_ignored = py_db.filename_to_lines_where_exceptions_are_ignored[canonical_normalized_filename] = {}
|
||||
lines_ignored[int(line_number)] = 1
|
||||
else:
|
||||
sys.stderr.write(
|
||||
"pydev debugger: warning: trying to ignore exception thrown"
|
||||
" on file that does not exist: %s (will have no effect)\n" % (absolute_filename,)
|
||||
)
|
||||
|
||||
def cmd_enable_dont_trace(self, py_db, cmd_id, seq, text):
|
||||
if text:
|
||||
true_str = "true" # Not all 3.x versions support u'str', so, doing workaround.
|
||||
mode = text.strip() == true_str
|
||||
pydevd_dont_trace.trace_filter(mode)
|
||||
|
||||
def cmd_redirect_output(self, py_db, cmd_id, seq, text):
|
||||
if text:
|
||||
py_db.enable_output_redirection("STDOUT" in text, "STDERR" in text)
|
||||
|
||||
def cmd_get_next_statement_targets(self, py_db, cmd_id, seq, text):
|
||||
thread_id, frame_id = text.split("\t", 1)
|
||||
|
||||
py_db.post_method_as_internal_command(thread_id, internal_get_next_statement_targets, seq, thread_id, frame_id)
|
||||
|
||||
def cmd_get_smart_step_into_variants(self, py_db, cmd_id, seq, text):
|
||||
thread_id, frame_id, start_line, end_line = text.split("\t", 3)
|
||||
|
||||
py_db.post_method_as_internal_command(
|
||||
thread_id,
|
||||
internal_get_smart_step_into_variants,
|
||||
seq,
|
||||
thread_id,
|
||||
frame_id,
|
||||
start_line,
|
||||
end_line,
|
||||
set_additional_thread_info=set_additional_thread_info,
|
||||
)
|
||||
|
||||
def cmd_set_project_roots(self, py_db, cmd_id, seq, text):
|
||||
self.api.set_project_roots(py_db, text.split("\t"))
|
||||
|
||||
def cmd_thread_dump_to_stderr(self, py_db, cmd_id, seq, text):
|
||||
pydevd_utils.dump_threads()
|
||||
|
||||
def cmd_stop_on_start(self, py_db, cmd_id, seq, text):
|
||||
if text.strip() in ("True", "true", "1"):
|
||||
self.api.stop_on_entry()
|
||||
|
||||
def cmd_pydevd_json_config(self, py_db, cmd_id, seq, text):
|
||||
# Expected to receive a json string as:
|
||||
# {
|
||||
# 'skip_suspend_on_breakpoint_exception': [<exception names where we should suspend>],
|
||||
# 'skip_print_breakpoint_exception': [<exception names where we should print>],
|
||||
# 'multi_threads_single_notification': bool,
|
||||
# }
|
||||
msg = json.loads(text.strip())
|
||||
if "skip_suspend_on_breakpoint_exception" in msg:
|
||||
py_db.skip_suspend_on_breakpoint_exception = tuple(get_exception_class(x) for x in msg["skip_suspend_on_breakpoint_exception"])
|
||||
|
||||
if "skip_print_breakpoint_exception" in msg:
|
||||
py_db.skip_print_breakpoint_exception = tuple(get_exception_class(x) for x in msg["skip_print_breakpoint_exception"])
|
||||
|
||||
if "multi_threads_single_notification" in msg:
|
||||
py_db.multi_threads_single_notification = msg["multi_threads_single_notification"]
|
||||
|
||||
def cmd_get_exception_details(self, py_db, cmd_id, seq, text):
|
||||
thread_id = text
|
||||
t = pydevd_find_thread_by_id(thread_id)
|
||||
frame = None
|
||||
if t is not None and not getattr(t, "pydev_do_not_trace", None):
|
||||
additional_info = set_additional_thread_info(t)
|
||||
frame = additional_info.get_topmost_frame(t)
|
||||
try:
|
||||
# Note: provide the return even if the thread is empty.
|
||||
return py_db.cmd_factory.make_get_exception_details_message(py_db, seq, thread_id, frame)
|
||||
finally:
|
||||
frame = None
|
||||
t = None
|
||||
|
||||
|
||||
process_net_command = _PyDevCommandProcessor().process_net_command
|
||||
+1354
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,252 @@
|
||||
import sys
|
||||
from _pydevd_bundle import pydevd_xml
|
||||
from os.path import basename
|
||||
from _pydev_bundle import pydev_log
|
||||
from urllib.parse import unquote_plus
|
||||
from _pydevd_bundle.pydevd_constants import IS_PY311_OR_GREATER
|
||||
|
||||
|
||||
# ===================================================================================================
|
||||
# print_var_node
|
||||
# ===================================================================================================
|
||||
def print_var_node(xml_node, stream):
|
||||
name = xml_node.getAttribute("name")
|
||||
value = xml_node.getAttribute("value")
|
||||
val_type = xml_node.getAttribute("type")
|
||||
|
||||
found_as = xml_node.getAttribute("found_as")
|
||||
stream.write("Name: ")
|
||||
stream.write(unquote_plus(name))
|
||||
stream.write(", Value: ")
|
||||
stream.write(unquote_plus(value))
|
||||
stream.write(", Type: ")
|
||||
stream.write(unquote_plus(val_type))
|
||||
if found_as:
|
||||
stream.write(", Found as: %s" % (unquote_plus(found_as),))
|
||||
stream.write("\n")
|
||||
|
||||
|
||||
# ===================================================================================================
|
||||
# print_referrers
|
||||
# ===================================================================================================
|
||||
def print_referrers(obj, stream=None):
|
||||
if stream is None:
|
||||
stream = sys.stdout
|
||||
result = get_referrer_info(obj)
|
||||
from xml.dom.minidom import parseString
|
||||
|
||||
dom = parseString(result)
|
||||
|
||||
xml = dom.getElementsByTagName("xml")[0]
|
||||
for node in xml.childNodes:
|
||||
if node.nodeType == node.TEXT_NODE:
|
||||
continue
|
||||
|
||||
if node.localName == "for":
|
||||
stream.write("Searching references for: ")
|
||||
for child in node.childNodes:
|
||||
if child.nodeType == node.TEXT_NODE:
|
||||
continue
|
||||
print_var_node(child, stream)
|
||||
|
||||
elif node.localName == "var":
|
||||
stream.write("Referrer found: ")
|
||||
print_var_node(node, stream)
|
||||
|
||||
else:
|
||||
sys.stderr.write("Unhandled node: %s\n" % (node,))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# ===================================================================================================
|
||||
# get_referrer_info
|
||||
# ===================================================================================================
|
||||
def get_referrer_info(searched_obj):
|
||||
DEBUG = 0
|
||||
if DEBUG:
|
||||
sys.stderr.write("Getting referrers info.\n")
|
||||
try:
|
||||
try:
|
||||
if searched_obj is None:
|
||||
ret = ["<xml>\n"]
|
||||
|
||||
ret.append("<for>\n")
|
||||
ret.append(
|
||||
pydevd_xml.var_to_xml(
|
||||
searched_obj, "Skipping getting referrers for None", additional_in_xml=' id="%s"' % (id(searched_obj),)
|
||||
)
|
||||
)
|
||||
ret.append("</for>\n")
|
||||
ret.append("</xml>")
|
||||
ret = "".join(ret)
|
||||
return ret
|
||||
|
||||
obj_id = id(searched_obj)
|
||||
|
||||
try:
|
||||
if DEBUG:
|
||||
sys.stderr.write("Getting referrers...\n")
|
||||
import gc
|
||||
|
||||
referrers = gc.get_referrers(searched_obj)
|
||||
except:
|
||||
pydev_log.exception()
|
||||
ret = ["<xml>\n"]
|
||||
|
||||
ret.append("<for>\n")
|
||||
ret.append(
|
||||
pydevd_xml.var_to_xml(
|
||||
searched_obj, "Exception raised while trying to get_referrers.", additional_in_xml=' id="%s"' % (id(searched_obj),)
|
||||
)
|
||||
)
|
||||
ret.append("</for>\n")
|
||||
ret.append("</xml>")
|
||||
ret = "".join(ret)
|
||||
return ret
|
||||
|
||||
if DEBUG:
|
||||
sys.stderr.write("Found %s referrers.\n" % (len(referrers),))
|
||||
|
||||
curr_frame = sys._getframe()
|
||||
frame_type = type(curr_frame)
|
||||
|
||||
# Ignore this frame and any caller frame of this frame
|
||||
|
||||
ignore_frames = {} # Should be a set, but it's not available on all python versions.
|
||||
while curr_frame is not None:
|
||||
if basename(curr_frame.f_code.co_filename).startswith("pydev"):
|
||||
ignore_frames[curr_frame] = 1
|
||||
curr_frame = curr_frame.f_back
|
||||
|
||||
ret = ["<xml>\n"]
|
||||
|
||||
ret.append("<for>\n")
|
||||
if DEBUG:
|
||||
sys.stderr.write('Searching Referrers of obj with id="%s"\n' % (obj_id,))
|
||||
|
||||
ret.append(pydevd_xml.var_to_xml(searched_obj, 'Referrers of obj with id="%s"' % (obj_id,)))
|
||||
ret.append("</for>\n")
|
||||
|
||||
curr_frame = sys._getframe()
|
||||
all_objects = None
|
||||
|
||||
for r in referrers:
|
||||
try:
|
||||
if r in ignore_frames:
|
||||
continue # Skip the references we may add ourselves
|
||||
except:
|
||||
pass # Ok: unhashable type checked...
|
||||
|
||||
if r is referrers:
|
||||
continue
|
||||
|
||||
if r is curr_frame.f_locals:
|
||||
continue
|
||||
|
||||
r_type = type(r)
|
||||
r_id = str(id(r))
|
||||
|
||||
representation = str(r_type)
|
||||
|
||||
found_as = ""
|
||||
if r_type == frame_type:
|
||||
if DEBUG:
|
||||
sys.stderr.write("Found frame referrer: %r\n" % (r,))
|
||||
for key, val in r.f_locals.items():
|
||||
if val is searched_obj:
|
||||
found_as = key
|
||||
break
|
||||
|
||||
elif r_type == dict:
|
||||
if DEBUG:
|
||||
sys.stderr.write("Found dict referrer: %r\n" % (r,))
|
||||
|
||||
# Try to check if it's a value in the dict (and under which key it was found)
|
||||
for key, val in r.items():
|
||||
if val is searched_obj:
|
||||
found_as = key
|
||||
if DEBUG:
|
||||
sys.stderr.write(" Found as %r in dict\n" % (found_as,))
|
||||
break
|
||||
|
||||
# Ok, there's one annoying thing: many times we find it in a dict from an instance,
|
||||
# but with this we don't directly have the class, only the dict, so, to workaround that
|
||||
# we iterate over all reachable objects ad check if one of those has the given dict.
|
||||
if all_objects is None:
|
||||
all_objects = gc.get_objects()
|
||||
|
||||
for x in all_objects:
|
||||
try:
|
||||
if getattr(x, "__dict__", None) is r:
|
||||
r = x
|
||||
r_type = type(x)
|
||||
r_id = str(id(r))
|
||||
representation = str(r_type)
|
||||
break
|
||||
except:
|
||||
pass # Just ignore any error here (i.e.: ReferenceError, etc.)
|
||||
|
||||
elif r_type in (tuple, list):
|
||||
if DEBUG:
|
||||
sys.stderr.write("Found tuple referrer: %r\n" % (r,))
|
||||
|
||||
for i, x in enumerate(r):
|
||||
if x is searched_obj:
|
||||
found_as = "%s[%s]" % (r_type.__name__, i)
|
||||
if DEBUG:
|
||||
sys.stderr.write(" Found as %s in tuple: \n" % (found_as,))
|
||||
break
|
||||
|
||||
elif IS_PY311_OR_GREATER:
|
||||
# Up to Python 3.10, gc.get_referrers for an instance actually returned the
|
||||
# object.__dict__, but on Python 3.11 it returns the actual object, so,
|
||||
# handling is a bit easier (we don't need the workaround from the dict
|
||||
# case to find the actual instance, we just need to find the attribute name).
|
||||
if DEBUG:
|
||||
sys.stderr.write("Found dict referrer: %r\n" % (r,))
|
||||
|
||||
dct = getattr(r, "__dict__", None)
|
||||
if dct:
|
||||
# Try to check if it's a value in the dict (and under which key it was found)
|
||||
for key, val in dct.items():
|
||||
if val is searched_obj:
|
||||
found_as = key
|
||||
if DEBUG:
|
||||
sys.stderr.write(" Found as %r in object instance\n" % (found_as,))
|
||||
break
|
||||
|
||||
if found_as:
|
||||
if not isinstance(found_as, str):
|
||||
found_as = str(found_as)
|
||||
found_as = ' found_as="%s"' % (pydevd_xml.make_valid_xml_value(found_as),)
|
||||
|
||||
ret.append(pydevd_xml.var_to_xml(r, representation, additional_in_xml=' id="%s"%s' % (r_id, found_as)))
|
||||
finally:
|
||||
if DEBUG:
|
||||
sys.stderr.write("Done searching for references.\n")
|
||||
|
||||
# If we have any exceptions, don't keep dangling references from this frame to any of our objects.
|
||||
all_objects = None
|
||||
referrers = None
|
||||
searched_obj = None
|
||||
r = None
|
||||
x = None
|
||||
key = None
|
||||
val = None
|
||||
curr_frame = None
|
||||
ignore_frames = None
|
||||
except:
|
||||
pydev_log.exception()
|
||||
ret = ["<xml>\n"]
|
||||
|
||||
ret.append("<for>\n")
|
||||
ret.append(pydevd_xml.var_to_xml(searched_obj, "Error getting referrers for:", additional_in_xml=' id="%s"' % (id(searched_obj),)))
|
||||
ret.append("</for>\n")
|
||||
ret.append("</xml>")
|
||||
ret = "".join(ret)
|
||||
return ret
|
||||
|
||||
ret.append("</xml>")
|
||||
ret = "".join(ret)
|
||||
return ret
|
||||
@@ -0,0 +1,433 @@
|
||||
"""
|
||||
Based on the python xreload.
|
||||
|
||||
Changes
|
||||
======================
|
||||
|
||||
1. we don't recreate the old namespace from new classes. Rather, we keep the existing namespace,
|
||||
load a new version of it and update only some of the things we can inplace. That way, we don't break
|
||||
things such as singletons or end up with a second representation of the same class in memory.
|
||||
|
||||
2. If we find it to be a __metaclass__, we try to update it as a regular class.
|
||||
|
||||
3. We don't remove old attributes (and leave them lying around even if they're no longer used).
|
||||
|
||||
4. Reload hooks were changed
|
||||
|
||||
These changes make it more stable, especially in the common case (where in a debug session only the
|
||||
contents of a function are changed), besides providing flexibility for users that want to extend
|
||||
on it.
|
||||
|
||||
|
||||
|
||||
Hooks
|
||||
======================
|
||||
|
||||
Classes/modules can be specially crafted to work with the reload (so that it can, for instance,
|
||||
update some constant which was changed).
|
||||
|
||||
1. To participate in the change of some attribute:
|
||||
|
||||
In a module:
|
||||
|
||||
__xreload_old_new__(namespace, name, old, new)
|
||||
|
||||
in a class:
|
||||
|
||||
@classmethod
|
||||
__xreload_old_new__(cls, name, old, new)
|
||||
|
||||
A class or module may include a method called '__xreload_old_new__' which is called when we're
|
||||
unable to reload a given attribute.
|
||||
|
||||
|
||||
|
||||
2. To do something after the whole reload is finished:
|
||||
|
||||
In a module:
|
||||
|
||||
__xreload_after_reload_update__(namespace):
|
||||
|
||||
In a class:
|
||||
|
||||
@classmethod
|
||||
__xreload_after_reload_update__(cls):
|
||||
|
||||
|
||||
A class or module may include a method called '__xreload_after_reload_update__' which is called
|
||||
after the reload finishes.
|
||||
|
||||
|
||||
Important: when providing a hook, always use the namespace or cls provided and not anything in the global
|
||||
namespace, as the global namespace are only temporarily created during the reload and may not reflect the
|
||||
actual application state (while the cls and namespace passed are).
|
||||
|
||||
|
||||
Current limitations
|
||||
======================
|
||||
|
||||
|
||||
- Attributes/constants are added, but not changed (so singletons and the application state is not
|
||||
broken -- use provided hooks to workaround it).
|
||||
|
||||
- Code using metaclasses may not always work.
|
||||
|
||||
- Functions and methods using decorators (other than classmethod and staticmethod) are not handled
|
||||
correctly.
|
||||
|
||||
- Renamings are not handled correctly.
|
||||
|
||||
- Dependent modules are not reloaded.
|
||||
|
||||
- New __slots__ can't be added to existing classes.
|
||||
|
||||
|
||||
Info
|
||||
======================
|
||||
|
||||
Original: http://svn.python.org/projects/sandbox/trunk/xreload/xreload.py
|
||||
Note: it seems https://github.com/plone/plone.reload/blob/master/plone/reload/xreload.py enhances it (to check later)
|
||||
|
||||
Interesting alternative: https://code.google.com/p/reimport/
|
||||
|
||||
Alternative to reload().
|
||||
|
||||
This works by executing the module in a scratch namespace, and then patching classes, methods and
|
||||
functions in place. This avoids the need to patch instances. New objects are copied into the
|
||||
target namespace.
|
||||
|
||||
"""
|
||||
|
||||
from _pydev_bundle.pydev_imports import execfile
|
||||
from _pydevd_bundle import pydevd_dont_trace
|
||||
import types
|
||||
from _pydev_bundle import pydev_log
|
||||
from _pydevd_bundle.pydevd_constants import get_global_debugger
|
||||
|
||||
NO_DEBUG = 0
|
||||
LEVEL1 = 1
|
||||
LEVEL2 = 2
|
||||
|
||||
DEBUG = NO_DEBUG
|
||||
|
||||
|
||||
def write_err(*args):
|
||||
py_db = get_global_debugger()
|
||||
if py_db is not None:
|
||||
new_lst = []
|
||||
for a in args:
|
||||
new_lst.append(str(a))
|
||||
|
||||
msg = " ".join(new_lst)
|
||||
s = "code reload: %s\n" % (msg,)
|
||||
cmd = py_db.cmd_factory.make_io_message(s, 2)
|
||||
if py_db.writer is not None:
|
||||
py_db.writer.add_command(cmd)
|
||||
|
||||
|
||||
def notify_info0(*args):
|
||||
write_err(*args)
|
||||
|
||||
|
||||
def notify_info(*args):
|
||||
if DEBUG >= LEVEL1:
|
||||
write_err(*args)
|
||||
|
||||
|
||||
def notify_info2(*args):
|
||||
if DEBUG >= LEVEL2:
|
||||
write_err(*args)
|
||||
|
||||
|
||||
def notify_error(*args):
|
||||
write_err(*args)
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# code_objects_equal
|
||||
# =======================================================================================================================
|
||||
def code_objects_equal(code0, code1):
|
||||
for d in dir(code0):
|
||||
if d.startswith("_") or "line" in d or d in ("replace", "co_positions", "co_qualname"):
|
||||
continue
|
||||
if getattr(code0, d) != getattr(code1, d):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# xreload
|
||||
# =======================================================================================================================
|
||||
def xreload(mod):
|
||||
"""Reload a module in place, updating classes, methods and functions.
|
||||
|
||||
mod: a module object
|
||||
|
||||
Returns a boolean indicating whether a change was done.
|
||||
"""
|
||||
r = Reload(mod)
|
||||
r.apply()
|
||||
found_change = r.found_change
|
||||
r = None
|
||||
pydevd_dont_trace.clear_trace_filter_cache()
|
||||
return found_change
|
||||
|
||||
|
||||
# This isn't actually used... Initially I planned to reload variables which are immutable on the
|
||||
# namespace, but this can destroy places where we're saving state, which may not be what we want,
|
||||
# so, we're being conservative and giving the user hooks if he wants to do a reload.
|
||||
#
|
||||
# immutable_types = [int, str, float, tuple] #That should be common to all Python versions
|
||||
#
|
||||
# for name in 'long basestr unicode frozenset'.split():
|
||||
# try:
|
||||
# immutable_types.append(__builtins__[name])
|
||||
# except:
|
||||
# pass #Just ignore: not all python versions are created equal.
|
||||
# immutable_types = tuple(immutable_types)
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# Reload
|
||||
# =======================================================================================================================
|
||||
class Reload:
|
||||
def __init__(self, mod, mod_name=None, mod_filename=None):
|
||||
self.mod = mod
|
||||
if mod_name:
|
||||
self.mod_name = mod_name
|
||||
else:
|
||||
self.mod_name = mod.__name__ if mod is not None else None
|
||||
|
||||
if mod_filename:
|
||||
self.mod_filename = mod_filename
|
||||
else:
|
||||
self.mod_filename = mod.__file__ if mod is not None else None
|
||||
|
||||
self.found_change = False
|
||||
|
||||
def apply(self):
|
||||
mod = self.mod
|
||||
self._on_finish_callbacks = []
|
||||
try:
|
||||
# Get the module namespace (dict) early; this is part of the type check
|
||||
modns = mod.__dict__
|
||||
|
||||
# Execute the code. We copy the module dict to a temporary; then
|
||||
# clear the module dict; then execute the new code in the module
|
||||
# dict; then swap things back and around. This trick (due to
|
||||
# Glyph Lefkowitz) ensures that the (readonly) __globals__
|
||||
# attribute of methods and functions is set to the correct dict
|
||||
# object.
|
||||
new_namespace = modns.copy()
|
||||
new_namespace.clear()
|
||||
if self.mod_filename:
|
||||
new_namespace["__file__"] = self.mod_filename
|
||||
try:
|
||||
new_namespace["__builtins__"] = __builtins__
|
||||
except NameError:
|
||||
raise # Ok if not there.
|
||||
|
||||
if self.mod_name:
|
||||
new_namespace["__name__"] = self.mod_name
|
||||
if new_namespace["__name__"] == "__main__":
|
||||
# We do this because usually the __main__ starts-up the program, guarded by
|
||||
# the if __name__ == '__main__', but we don't want to start the program again
|
||||
# on a reload.
|
||||
new_namespace["__name__"] = "__main_reloaded__"
|
||||
|
||||
execfile(self.mod_filename, new_namespace, new_namespace)
|
||||
# Now we get to the hard part
|
||||
oldnames = set(modns)
|
||||
newnames = set(new_namespace)
|
||||
|
||||
# Create new tokens (note: not deleting existing)
|
||||
for name in newnames - oldnames:
|
||||
notify_info0("Added:", name, "to namespace")
|
||||
self.found_change = True
|
||||
modns[name] = new_namespace[name]
|
||||
|
||||
# Update in-place what we can
|
||||
for name in oldnames & newnames:
|
||||
self._update(modns, name, modns[name], new_namespace[name])
|
||||
|
||||
self._handle_namespace(modns)
|
||||
|
||||
for c in self._on_finish_callbacks:
|
||||
c()
|
||||
del self._on_finish_callbacks[:]
|
||||
except:
|
||||
pydev_log.exception()
|
||||
|
||||
def _handle_namespace(self, namespace, is_class_namespace=False):
|
||||
on_finish = None
|
||||
if is_class_namespace:
|
||||
xreload_after_update = getattr(namespace, "__xreload_after_reload_update__", None)
|
||||
if xreload_after_update is not None:
|
||||
self.found_change = True
|
||||
on_finish = lambda: xreload_after_update()
|
||||
|
||||
elif "__xreload_after_reload_update__" in namespace:
|
||||
xreload_after_update = namespace["__xreload_after_reload_update__"]
|
||||
self.found_change = True
|
||||
on_finish = lambda: xreload_after_update(namespace)
|
||||
|
||||
if on_finish is not None:
|
||||
# If a client wants to know about it, give him a chance.
|
||||
self._on_finish_callbacks.append(on_finish)
|
||||
|
||||
def _update(self, namespace, name, oldobj, newobj, is_class_namespace=False):
|
||||
"""Update oldobj, if possible in place, with newobj.
|
||||
|
||||
If oldobj is immutable, this simply returns newobj.
|
||||
|
||||
Args:
|
||||
oldobj: the object to be updated
|
||||
newobj: the object used as the source for the update
|
||||
"""
|
||||
try:
|
||||
notify_info2("Updating: ", oldobj)
|
||||
if oldobj is newobj:
|
||||
# Probably something imported
|
||||
return
|
||||
|
||||
if type(oldobj) is not type(newobj):
|
||||
# Cop-out: if the type changed, give up
|
||||
if name not in ("__builtins__",):
|
||||
notify_error("Type of: %s (old: %s != new: %s) changed... Skipping." % (name, type(oldobj), type(newobj)))
|
||||
return
|
||||
|
||||
if isinstance(newobj, types.FunctionType):
|
||||
self._update_function(oldobj, newobj)
|
||||
return
|
||||
|
||||
if isinstance(newobj, types.MethodType):
|
||||
self._update_method(oldobj, newobj)
|
||||
return
|
||||
|
||||
if isinstance(newobj, classmethod):
|
||||
self._update_classmethod(oldobj, newobj)
|
||||
return
|
||||
|
||||
if isinstance(newobj, staticmethod):
|
||||
self._update_staticmethod(oldobj, newobj)
|
||||
return
|
||||
|
||||
if hasattr(types, "ClassType"):
|
||||
classtype = (types.ClassType, type) # object is not instance of types.ClassType.
|
||||
else:
|
||||
classtype = type
|
||||
|
||||
if isinstance(newobj, classtype):
|
||||
self._update_class(oldobj, newobj)
|
||||
return
|
||||
|
||||
# New: dealing with metaclasses.
|
||||
if hasattr(newobj, "__metaclass__") and hasattr(newobj, "__class__") and newobj.__metaclass__ == newobj.__class__:
|
||||
self._update_class(oldobj, newobj)
|
||||
return
|
||||
|
||||
if namespace is not None:
|
||||
# Check for the `__xreload_old_new__` protocol (don't even compare things
|
||||
# as even doing a comparison may break things -- see: https://github.com/microsoft/debugpy/issues/615).
|
||||
xreload_old_new = None
|
||||
if is_class_namespace:
|
||||
xreload_old_new = getattr(namespace, "__xreload_old_new__", None)
|
||||
if xreload_old_new is not None:
|
||||
self.found_change = True
|
||||
xreload_old_new(name, oldobj, newobj)
|
||||
|
||||
elif "__xreload_old_new__" in namespace:
|
||||
xreload_old_new = namespace["__xreload_old_new__"]
|
||||
xreload_old_new(namespace, name, oldobj, newobj)
|
||||
self.found_change = True
|
||||
|
||||
# Too much information to the user...
|
||||
# else:
|
||||
# notify_info0('%s NOT updated. Create __xreload_old_new__(name, old, new) for custom reload' % (name,))
|
||||
|
||||
except:
|
||||
notify_error("Exception found when updating %s. Proceeding for other items." % (name,))
|
||||
pydev_log.exception()
|
||||
|
||||
# All of the following functions have the same signature as _update()
|
||||
|
||||
def _update_function(self, oldfunc, newfunc):
|
||||
"""Update a function object."""
|
||||
oldfunc.__doc__ = newfunc.__doc__
|
||||
oldfunc.__dict__.update(newfunc.__dict__)
|
||||
|
||||
try:
|
||||
newfunc.__code__
|
||||
attr_name = "__code__"
|
||||
except AttributeError:
|
||||
newfunc.func_code
|
||||
attr_name = "func_code"
|
||||
|
||||
old_code = getattr(oldfunc, attr_name)
|
||||
new_code = getattr(newfunc, attr_name)
|
||||
if not code_objects_equal(old_code, new_code):
|
||||
notify_info0("Updated function code:", oldfunc)
|
||||
setattr(oldfunc, attr_name, new_code)
|
||||
self.found_change = True
|
||||
|
||||
try:
|
||||
oldfunc.__defaults__ = newfunc.__defaults__
|
||||
except AttributeError:
|
||||
oldfunc.func_defaults = newfunc.func_defaults
|
||||
|
||||
return oldfunc
|
||||
|
||||
def _update_method(self, oldmeth, newmeth):
|
||||
"""Update a method object."""
|
||||
# XXX What if im_func is not a function?
|
||||
if hasattr(oldmeth, "im_func") and hasattr(newmeth, "im_func"):
|
||||
self._update(None, None, oldmeth.im_func, newmeth.im_func)
|
||||
elif hasattr(oldmeth, "__func__") and hasattr(newmeth, "__func__"):
|
||||
self._update(None, None, oldmeth.__func__, newmeth.__func__)
|
||||
return oldmeth
|
||||
|
||||
def _update_class(self, oldclass, newclass):
|
||||
"""Update a class object."""
|
||||
olddict = oldclass.__dict__
|
||||
newdict = newclass.__dict__
|
||||
|
||||
oldnames = set(olddict)
|
||||
newnames = set(newdict)
|
||||
|
||||
for name in newnames - oldnames:
|
||||
setattr(oldclass, name, newdict[name])
|
||||
notify_info0("Added:", name, "to", oldclass)
|
||||
self.found_change = True
|
||||
|
||||
# Note: not removing old things...
|
||||
# for name in oldnames - newnames:
|
||||
# notify_info('Removed:', name, 'from', oldclass)
|
||||
# delattr(oldclass, name)
|
||||
|
||||
for name in (oldnames & newnames) - set(["__dict__", "__doc__"]):
|
||||
self._update(oldclass, name, olddict[name], newdict[name], is_class_namespace=True)
|
||||
|
||||
old_bases = getattr(oldclass, "__bases__", None)
|
||||
new_bases = getattr(newclass, "__bases__", None)
|
||||
if str(old_bases) != str(new_bases):
|
||||
notify_error("Changing the hierarchy of a class is not supported. %s may be inconsistent." % (oldclass,))
|
||||
|
||||
self._handle_namespace(oldclass, is_class_namespace=True)
|
||||
|
||||
def _update_classmethod(self, oldcm, newcm):
|
||||
"""Update a classmethod update."""
|
||||
# While we can't modify the classmethod object itself (it has no
|
||||
# mutable attributes), we *can* extract the underlying function
|
||||
# (by calling __get__(), which returns a method object) and update
|
||||
# it in-place. We don't have the class available to pass to
|
||||
# __get__() but any object except None will do.
|
||||
self._update(None, None, oldcm.__get__(0), newcm.__get__(0))
|
||||
|
||||
def _update_staticmethod(self, oldsm, newsm):
|
||||
"""Update a staticmethod update."""
|
||||
# While we can't modify the staticmethod object itself (it has no
|
||||
# mutable attributes), we *can* extract the underlying function
|
||||
# (by calling __get__(), which returns it) and update it in-place.
|
||||
# We don't have the class available to pass to __get__() but any
|
||||
# object except None will do.
|
||||
self._update(None, None, oldsm.__get__(0), newsm.__get__(0))
|
||||
@@ -0,0 +1,830 @@
|
||||
from _pydev_bundle import pydev_log
|
||||
from _pydevd_bundle.pydevd_utils import hasattr_checked, DAPGrouper, Timer
|
||||
from io import StringIO
|
||||
import traceback
|
||||
from os.path import basename
|
||||
|
||||
from functools import partial
|
||||
from _pydevd_bundle.pydevd_constants import (
|
||||
IS_PY36_OR_GREATER,
|
||||
MethodWrapperType,
|
||||
RETURN_VALUES_DICT,
|
||||
DebugInfoHolder,
|
||||
IS_PYPY,
|
||||
GENERATED_LEN_ATTR_NAME,
|
||||
)
|
||||
from _pydevd_bundle.pydevd_safe_repr import SafeRepr
|
||||
from _pydevd_bundle import pydevd_constants
|
||||
|
||||
TOO_LARGE_MSG = "Maximum number of items (%s) reached. To show more items customize the value of the PYDEVD_CONTAINER_RANDOM_ACCESS_MAX_ITEMS environment variable."
|
||||
TOO_LARGE_ATTR = "Unable to handle:"
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# UnableToResolveVariableException
|
||||
# =======================================================================================================================
|
||||
class UnableToResolveVariableException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except:
|
||||
OrderedDict = dict
|
||||
|
||||
try:
|
||||
import java.lang # @UnresolvedImport
|
||||
except:
|
||||
pass
|
||||
|
||||
# =======================================================================================================================
|
||||
# See: pydevd_extension_api module for resolver interface
|
||||
# =======================================================================================================================
|
||||
|
||||
|
||||
def sorted_attributes_key(attr_name):
|
||||
if attr_name.startswith("__"):
|
||||
if attr_name.endswith("__"):
|
||||
# __ double under before and after __
|
||||
return (3, attr_name)
|
||||
else:
|
||||
# __ double under before
|
||||
return (2, attr_name)
|
||||
elif attr_name.startswith("_"):
|
||||
# _ single under
|
||||
return (1, attr_name)
|
||||
else:
|
||||
# Regular (Before anything)
|
||||
return (0, attr_name)
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# DefaultResolver
|
||||
# =======================================================================================================================
|
||||
class DefaultResolver:
|
||||
"""
|
||||
DefaultResolver is the class that'll actually resolve how to show some variable.
|
||||
"""
|
||||
|
||||
def resolve(self, var, attribute):
|
||||
return getattr(var, attribute)
|
||||
|
||||
def get_contents_debug_adapter_protocol(self, obj, fmt=None):
|
||||
if MethodWrapperType:
|
||||
dct, used___dict__ = self._get_py_dictionary(obj)
|
||||
else:
|
||||
dct = self._get_jy_dictionary(obj)[0]
|
||||
|
||||
lst = sorted(dct.items(), key=lambda tup: sorted_attributes_key(tup[0]))
|
||||
if used___dict__:
|
||||
eval_name = ".__dict__[%s]"
|
||||
else:
|
||||
eval_name = ".%s"
|
||||
|
||||
ret = []
|
||||
for attr_name, attr_value in lst:
|
||||
entry = (attr_name, attr_value, eval_name % attr_name)
|
||||
ret.append(entry)
|
||||
|
||||
return ret
|
||||
|
||||
def get_dictionary(self, var, names=None, used___dict__=False):
|
||||
if MethodWrapperType:
|
||||
return self._get_py_dictionary(var, names, used___dict__=used___dict__)[0]
|
||||
else:
|
||||
return self._get_jy_dictionary(var)[0]
|
||||
|
||||
def _get_jy_dictionary(self, obj):
|
||||
ret = {}
|
||||
found = java.util.HashMap()
|
||||
|
||||
original = obj
|
||||
if hasattr_checked(obj, "__class__") and obj.__class__ == java.lang.Class:
|
||||
# get info about superclasses
|
||||
classes = []
|
||||
classes.append(obj)
|
||||
c = obj.getSuperclass()
|
||||
while c != None:
|
||||
classes.append(c)
|
||||
c = c.getSuperclass()
|
||||
|
||||
# get info about interfaces
|
||||
interfs = []
|
||||
for obj in classes:
|
||||
interfs.extend(obj.getInterfaces())
|
||||
classes.extend(interfs)
|
||||
|
||||
# now is the time when we actually get info on the declared methods and fields
|
||||
for obj in classes:
|
||||
declaredMethods = obj.getDeclaredMethods()
|
||||
declaredFields = obj.getDeclaredFields()
|
||||
for i in range(len(declaredMethods)):
|
||||
name = declaredMethods[i].getName()
|
||||
ret[name] = declaredMethods[i].toString()
|
||||
found.put(name, 1)
|
||||
|
||||
for i in range(len(declaredFields)):
|
||||
name = declaredFields[i].getName()
|
||||
found.put(name, 1)
|
||||
# if declaredFields[i].isAccessible():
|
||||
declaredFields[i].setAccessible(True)
|
||||
# ret[name] = declaredFields[i].get( declaredFields[i] )
|
||||
try:
|
||||
ret[name] = declaredFields[i].get(original)
|
||||
except:
|
||||
ret[name] = declaredFields[i].toString()
|
||||
|
||||
# this simple dir does not always get all the info, that's why we have the part before
|
||||
# (e.g.: if we do a dir on String, some methods that are from other interfaces such as
|
||||
# charAt don't appear)
|
||||
try:
|
||||
d = dir(original)
|
||||
for name in d:
|
||||
if found.get(name) != 1:
|
||||
ret[name] = getattr(original, name)
|
||||
except:
|
||||
# sometimes we're unable to do a dir
|
||||
pass
|
||||
|
||||
return ret
|
||||
|
||||
def get_names(self, var):
|
||||
used___dict__ = False
|
||||
try:
|
||||
names = dir(var)
|
||||
except Exception:
|
||||
names = []
|
||||
if not names:
|
||||
if hasattr_checked(var, "__dict__"):
|
||||
names = list(var.__dict__)
|
||||
used___dict__ = True
|
||||
return names, used___dict__
|
||||
|
||||
def _get_py_dictionary(self, var, names=None, used___dict__=False):
|
||||
"""
|
||||
:return tuple(names, used___dict__), where used___dict__ means we have to access
|
||||
using obj.__dict__[name] instead of getattr(obj, name)
|
||||
"""
|
||||
|
||||
# On PyPy we never show functions. This is because of a corner case where PyPy becomes
|
||||
# absurdly slow -- it takes almost half a second to introspect a single numpy function (so,
|
||||
# the related test, "test_case_16_resolve_numpy_array", times out... this probably isn't
|
||||
# specific to numpy, but to any library where the CPython bridge is used, but as we
|
||||
# can't be sure in the debugger, we play it safe and don't show it at all).
|
||||
filter_function = IS_PYPY
|
||||
|
||||
if not names:
|
||||
names, used___dict__ = self.get_names(var)
|
||||
d = {}
|
||||
|
||||
# Be aware that the order in which the filters are applied attempts to
|
||||
# optimize the operation by removing as many items as possible in the
|
||||
# first filters, leaving fewer items for later filters
|
||||
|
||||
timer = Timer()
|
||||
cls = type(var)
|
||||
for name in names:
|
||||
try:
|
||||
name_as_str = name
|
||||
if name_as_str.__class__ != str:
|
||||
name_as_str = "%r" % (name_as_str,)
|
||||
|
||||
if not used___dict__:
|
||||
attr = getattr(var, name)
|
||||
else:
|
||||
attr = var.__dict__[name]
|
||||
|
||||
# filter functions?
|
||||
if filter_function:
|
||||
if inspect.isroutine(attr) or isinstance(attr, MethodWrapperType):
|
||||
continue
|
||||
except:
|
||||
# if some error occurs getting it, let's put it to the user.
|
||||
strIO = StringIO()
|
||||
traceback.print_exc(file=strIO)
|
||||
attr = strIO.getvalue()
|
||||
|
||||
finally:
|
||||
timer.report_if_getting_attr_slow(cls, name_as_str)
|
||||
|
||||
d[name_as_str] = attr
|
||||
|
||||
return d, used___dict__
|
||||
|
||||
|
||||
class DAPGrouperResolver:
|
||||
def get_contents_debug_adapter_protocol(self, obj, fmt=None):
|
||||
return obj.get_contents_debug_adapter_protocol()
|
||||
|
||||
|
||||
_basic_immutable_types = (int, float, complex, str, bytes, type(None), bool, frozenset)
|
||||
|
||||
|
||||
def _does_obj_repr_evaluate_to_obj(obj):
|
||||
"""
|
||||
If obj is an object where evaluating its representation leads to
|
||||
the same object, return True, otherwise, return False.
|
||||
"""
|
||||
try:
|
||||
if isinstance(obj, tuple):
|
||||
for o in obj:
|
||||
if not _does_obj_repr_evaluate_to_obj(o):
|
||||
return False
|
||||
return True
|
||||
else:
|
||||
return isinstance(obj, _basic_immutable_types)
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# DictResolver
|
||||
# =======================================================================================================================
|
||||
class DictResolver:
|
||||
sort_keys = not IS_PY36_OR_GREATER
|
||||
|
||||
def resolve(self, dct, key):
|
||||
if key in (GENERATED_LEN_ATTR_NAME, TOO_LARGE_ATTR):
|
||||
return None
|
||||
|
||||
if "(" not in key:
|
||||
# we have to treat that because the dict resolver is also used to directly resolve the global and local
|
||||
# scopes (which already have the items directly)
|
||||
try:
|
||||
return dct[key]
|
||||
except:
|
||||
return getattr(dct, key)
|
||||
|
||||
# ok, we have to iterate over the items to find the one that matches the id, because that's the only way
|
||||
# to actually find the reference from the string we have before.
|
||||
expected_id = int(key.split("(")[-1][:-1])
|
||||
for key, val in dct.items():
|
||||
if id(key) == expected_id:
|
||||
return val
|
||||
|
||||
raise UnableToResolveVariableException()
|
||||
|
||||
def key_to_str(self, key, fmt=None):
|
||||
if fmt is not None:
|
||||
if fmt.get("hex", False):
|
||||
safe_repr = SafeRepr()
|
||||
safe_repr.convert_to_hex = True
|
||||
return safe_repr(key)
|
||||
return "%r" % (key,)
|
||||
|
||||
def init_dict(self):
|
||||
return {}
|
||||
|
||||
def get_contents_debug_adapter_protocol(self, dct, fmt=None):
|
||||
"""
|
||||
This method is to be used in the case where the variables are all saved by its id (and as
|
||||
such don't need to have the `resolve` method called later on, so, keys don't need to
|
||||
embed the reference in the key).
|
||||
|
||||
Note that the return should be ordered.
|
||||
|
||||
:return list(tuple(name:str, value:object, evaluateName:str))
|
||||
"""
|
||||
ret = []
|
||||
|
||||
i = 0
|
||||
|
||||
found_representations = set()
|
||||
|
||||
for key, val in dct.items():
|
||||
i += 1
|
||||
key_as_str = self.key_to_str(key, fmt)
|
||||
|
||||
if key_as_str not in found_representations:
|
||||
found_representations.add(key_as_str)
|
||||
else:
|
||||
# If the key would be a duplicate, add the key id (otherwise
|
||||
# VSCode won't show all keys correctly).
|
||||
# See: https://github.com/microsoft/debugpy/issues/148
|
||||
key_as_str = "%s (id: %s)" % (key_as_str, id(key))
|
||||
found_representations.add(key_as_str)
|
||||
|
||||
if _does_obj_repr_evaluate_to_obj(key):
|
||||
s = self.key_to_str(key) # do not format the key
|
||||
eval_key_str = "[%s]" % (s,)
|
||||
else:
|
||||
eval_key_str = None
|
||||
ret.append((key_as_str, val, eval_key_str))
|
||||
if i >= pydevd_constants.PYDEVD_CONTAINER_RANDOM_ACCESS_MAX_ITEMS:
|
||||
ret.append((TOO_LARGE_ATTR, TOO_LARGE_MSG % (pydevd_constants.PYDEVD_CONTAINER_RANDOM_ACCESS_MAX_ITEMS,), None))
|
||||
break
|
||||
|
||||
# in case the class extends built-in type and has some additional fields
|
||||
from_default_resolver = defaultResolver.get_contents_debug_adapter_protocol(dct, fmt)
|
||||
|
||||
if from_default_resolver:
|
||||
ret = from_default_resolver + ret
|
||||
|
||||
if self.sort_keys:
|
||||
ret = sorted(ret, key=lambda tup: sorted_attributes_key(tup[0]))
|
||||
|
||||
ret.append((GENERATED_LEN_ATTR_NAME, len(dct), partial(_apply_evaluate_name, evaluate_name="len(%s)")))
|
||||
return ret
|
||||
|
||||
def get_dictionary(self, dct):
|
||||
ret = self.init_dict()
|
||||
|
||||
i = 0
|
||||
for key, val in dct.items():
|
||||
i += 1
|
||||
# we need to add the id because otherwise we cannot find the real object to get its contents later on.
|
||||
key = "%s (%s)" % (self.key_to_str(key), id(key))
|
||||
ret[key] = val
|
||||
if i >= pydevd_constants.PYDEVD_CONTAINER_RANDOM_ACCESS_MAX_ITEMS:
|
||||
ret[TOO_LARGE_ATTR] = TOO_LARGE_MSG % (pydevd_constants.PYDEVD_CONTAINER_RANDOM_ACCESS_MAX_ITEMS,)
|
||||
break
|
||||
|
||||
# in case if the class extends built-in type and has some additional fields
|
||||
additional_fields = defaultResolver.get_dictionary(dct)
|
||||
ret.update(additional_fields)
|
||||
ret[GENERATED_LEN_ATTR_NAME] = len(dct)
|
||||
return ret
|
||||
|
||||
|
||||
def _apply_evaluate_name(parent_name, evaluate_name):
|
||||
return evaluate_name % (parent_name,)
|
||||
|
||||
|
||||
class MoreItemsRange:
|
||||
def __init__(self, value, from_i, to_i):
|
||||
self.value = value
|
||||
self.from_i = from_i
|
||||
self.to_i = to_i
|
||||
|
||||
def get_contents_debug_adapter_protocol(self, _self, fmt=None):
|
||||
l = len(self.value)
|
||||
ret = []
|
||||
|
||||
format_str = "%0" + str(int(len(str(l - 1)))) + "d"
|
||||
if fmt is not None and fmt.get("hex", False):
|
||||
format_str = "0x%0" + str(int(len(hex(l).lstrip("0x")))) + "x"
|
||||
|
||||
for i, item in enumerate(self.value[self.from_i : self.to_i]):
|
||||
i += self.from_i
|
||||
ret.append((format_str % i, item, "[%s]" % i))
|
||||
return ret
|
||||
|
||||
def get_dictionary(self, _self, fmt=None):
|
||||
dct = {}
|
||||
for key, obj, _ in self.get_contents_debug_adapter_protocol(self, fmt):
|
||||
dct[key] = obj
|
||||
return dct
|
||||
|
||||
def resolve(self, attribute):
|
||||
"""
|
||||
:param var: that's the original object we're dealing with.
|
||||
:param attribute: that's the key to resolve
|
||||
-- either the dict key in get_dictionary or the name in the dap protocol.
|
||||
"""
|
||||
return self.value[int(attribute)]
|
||||
|
||||
def __eq__(self, o):
|
||||
return isinstance(o, MoreItemsRange) and self.value is o.value and self.from_i == o.from_i and self.to_i == o.to_i
|
||||
|
||||
def __str__(self):
|
||||
return "[%s:%s]" % (self.from_i, self.to_i)
|
||||
|
||||
__repr__ = __str__
|
||||
|
||||
|
||||
class MoreItems:
|
||||
def __init__(self, value, handled_items):
|
||||
self.value = value
|
||||
self.handled_items = handled_items
|
||||
|
||||
def get_contents_debug_adapter_protocol(self, _self, fmt=None):
|
||||
total_items = len(self.value)
|
||||
remaining = total_items - self.handled_items
|
||||
bucket_size = pydevd_constants.PYDEVD_CONTAINER_BUCKET_SIZE
|
||||
|
||||
from_i = self.handled_items
|
||||
to_i = from_i + min(bucket_size, remaining)
|
||||
|
||||
ret = []
|
||||
while remaining > 0:
|
||||
remaining -= bucket_size
|
||||
more_items_range = MoreItemsRange(self.value, from_i, to_i)
|
||||
ret.append((str(more_items_range), more_items_range, None))
|
||||
|
||||
from_i = to_i
|
||||
to_i = from_i + min(bucket_size, remaining)
|
||||
|
||||
return ret
|
||||
|
||||
def get_dictionary(self, _self, fmt=None):
|
||||
dct = {}
|
||||
for key, obj, _ in self.get_contents_debug_adapter_protocol(self, fmt):
|
||||
dct[key] = obj
|
||||
return dct
|
||||
|
||||
def resolve(self, attribute):
|
||||
from_i, to_i = attribute[1:-1].split(":")
|
||||
from_i = int(from_i)
|
||||
to_i = int(to_i)
|
||||
return MoreItemsRange(self.value, from_i, to_i)
|
||||
|
||||
def __eq__(self, o):
|
||||
return isinstance(o, MoreItems) and self.value is o.value
|
||||
|
||||
def __str__(self):
|
||||
return "..."
|
||||
|
||||
__repr__ = __str__
|
||||
|
||||
|
||||
class ForwardInternalResolverToObject:
|
||||
"""
|
||||
To be used when we provide some internal object that'll actually do the resolution.
|
||||
"""
|
||||
|
||||
def get_contents_debug_adapter_protocol(self, obj, fmt=None):
|
||||
return obj.get_contents_debug_adapter_protocol(fmt)
|
||||
|
||||
def get_dictionary(self, var, fmt={}):
|
||||
return var.get_dictionary(var, fmt)
|
||||
|
||||
def resolve(self, var, attribute):
|
||||
return var.resolve(attribute)
|
||||
|
||||
|
||||
class TupleResolver: # to enumerate tuples and lists
|
||||
def resolve(self, var, attribute):
|
||||
"""
|
||||
:param var: that's the original object we're dealing with.
|
||||
:param attribute: that's the key to resolve
|
||||
-- either the dict key in get_dictionary or the name in the dap protocol.
|
||||
"""
|
||||
if attribute in (GENERATED_LEN_ATTR_NAME, TOO_LARGE_ATTR):
|
||||
return None
|
||||
try:
|
||||
return var[int(attribute)]
|
||||
except:
|
||||
if attribute == "more":
|
||||
return MoreItems(var, pydevd_constants.PYDEVD_CONTAINER_INITIAL_EXPANDED_ITEMS)
|
||||
|
||||
return getattr(var, attribute)
|
||||
|
||||
def get_contents_debug_adapter_protocol(self, lst, fmt=None):
|
||||
"""
|
||||
This method is to be used in the case where the variables are all saved by its id (and as
|
||||
such don't need to have the `resolve` method called later on, so, keys don't need to
|
||||
embed the reference in the key).
|
||||
|
||||
Note that the return should be ordered.
|
||||
|
||||
:return list(tuple(name:str, value:object, evaluateName:str))
|
||||
"""
|
||||
lst_len = len(lst)
|
||||
ret = []
|
||||
|
||||
format_str = "%0" + str(int(len(str(lst_len - 1)))) + "d"
|
||||
if fmt is not None and fmt.get("hex", False):
|
||||
format_str = "0x%0" + str(int(len(hex(lst_len).lstrip("0x")))) + "x"
|
||||
|
||||
initial_expanded = pydevd_constants.PYDEVD_CONTAINER_INITIAL_EXPANDED_ITEMS
|
||||
for i, item in enumerate(lst):
|
||||
ret.append((format_str % i, item, "[%s]" % i))
|
||||
|
||||
if i >= initial_expanded - 1:
|
||||
if (lst_len - initial_expanded) < pydevd_constants.PYDEVD_CONTAINER_BUCKET_SIZE:
|
||||
# Special case: if we have just 1 more bucket just put it inline.
|
||||
item = MoreItemsRange(lst, initial_expanded, lst_len)
|
||||
|
||||
else:
|
||||
# Multiple buckets
|
||||
item = MoreItems(lst, initial_expanded)
|
||||
ret.append(("more", item, None))
|
||||
break
|
||||
|
||||
# Needed in case the class extends the built-in type and has some additional fields.
|
||||
from_default_resolver = defaultResolver.get_contents_debug_adapter_protocol(lst, fmt=fmt)
|
||||
if from_default_resolver:
|
||||
ret = from_default_resolver + ret
|
||||
|
||||
ret.append((GENERATED_LEN_ATTR_NAME, len(lst), partial(_apply_evaluate_name, evaluate_name="len(%s)")))
|
||||
return ret
|
||||
|
||||
def get_dictionary(self, var, fmt={}):
|
||||
l = len(var)
|
||||
d = {}
|
||||
|
||||
format_str = "%0" + str(int(len(str(l - 1)))) + "d"
|
||||
if fmt is not None and fmt.get("hex", False):
|
||||
format_str = "0x%0" + str(int(len(hex(l).lstrip("0x")))) + "x"
|
||||
|
||||
initial_expanded = pydevd_constants.PYDEVD_CONTAINER_INITIAL_EXPANDED_ITEMS
|
||||
for i, item in enumerate(var):
|
||||
d[format_str % i] = item
|
||||
|
||||
if i >= initial_expanded - 1:
|
||||
item = MoreItems(var, initial_expanded)
|
||||
d["more"] = item
|
||||
break
|
||||
|
||||
# in case if the class extends built-in type and has some additional fields
|
||||
additional_fields = defaultResolver.get_dictionary(var)
|
||||
d.update(additional_fields)
|
||||
d[GENERATED_LEN_ATTR_NAME] = len(var)
|
||||
return d
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# SetResolver
|
||||
# =======================================================================================================================
|
||||
class SetResolver:
|
||||
"""
|
||||
Resolves a set as dict id(object)->object
|
||||
"""
|
||||
|
||||
def get_contents_debug_adapter_protocol(self, obj, fmt=None):
|
||||
ret = []
|
||||
|
||||
for i, item in enumerate(obj):
|
||||
ret.append((str(id(item)), item, None))
|
||||
|
||||
if i >= pydevd_constants.PYDEVD_CONTAINER_RANDOM_ACCESS_MAX_ITEMS:
|
||||
ret.append((TOO_LARGE_ATTR, TOO_LARGE_MSG % (pydevd_constants.PYDEVD_CONTAINER_RANDOM_ACCESS_MAX_ITEMS,), None))
|
||||
break
|
||||
|
||||
# Needed in case the class extends the built-in type and has some additional fields.
|
||||
from_default_resolver = defaultResolver.get_contents_debug_adapter_protocol(obj, fmt=fmt)
|
||||
if from_default_resolver:
|
||||
ret = from_default_resolver + ret
|
||||
ret.append((GENERATED_LEN_ATTR_NAME, len(obj), partial(_apply_evaluate_name, evaluate_name="len(%s)")))
|
||||
return ret
|
||||
|
||||
def resolve(self, var, attribute):
|
||||
if attribute in (GENERATED_LEN_ATTR_NAME, TOO_LARGE_ATTR):
|
||||
return None
|
||||
|
||||
try:
|
||||
attribute = int(attribute)
|
||||
except:
|
||||
return getattr(var, attribute)
|
||||
|
||||
for v in var:
|
||||
if id(v) == attribute:
|
||||
return v
|
||||
|
||||
raise UnableToResolveVariableException("Unable to resolve %s in %s" % (attribute, var))
|
||||
|
||||
def get_dictionary(self, var):
|
||||
d = {}
|
||||
for i, item in enumerate(var):
|
||||
d[str(id(item))] = item
|
||||
|
||||
if i >= pydevd_constants.PYDEVD_CONTAINER_RANDOM_ACCESS_MAX_ITEMS:
|
||||
d[TOO_LARGE_ATTR] = TOO_LARGE_MSG % (pydevd_constants.PYDEVD_CONTAINER_RANDOM_ACCESS_MAX_ITEMS,)
|
||||
break
|
||||
|
||||
# in case if the class extends built-in type and has some additional fields
|
||||
additional_fields = defaultResolver.get_dictionary(var)
|
||||
d.update(additional_fields)
|
||||
d[GENERATED_LEN_ATTR_NAME] = len(var)
|
||||
return d
|
||||
|
||||
def change_var_from_name(self, container, name, new_value):
|
||||
# The name given in this case must be the id(item), so, we can actually
|
||||
# iterate in the set and see which item matches the given id.
|
||||
|
||||
try:
|
||||
# Check that the new value can actually be added to a set (i.e.: it's hashable/comparable).
|
||||
set().add(new_value)
|
||||
except:
|
||||
return None
|
||||
|
||||
for item in container:
|
||||
if str(id(item)) == name:
|
||||
container.remove(item)
|
||||
container.add(new_value)
|
||||
return str(id(new_value))
|
||||
|
||||
return None
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# InstanceResolver
|
||||
# =======================================================================================================================
|
||||
class InstanceResolver:
|
||||
def resolve(self, var, attribute):
|
||||
field = var.__class__.getDeclaredField(attribute)
|
||||
field.setAccessible(True)
|
||||
return field.get(var)
|
||||
|
||||
def get_dictionary(self, obj):
|
||||
ret = {}
|
||||
|
||||
declaredFields = obj.__class__.getDeclaredFields()
|
||||
for i in range(len(declaredFields)):
|
||||
name = declaredFields[i].getName()
|
||||
try:
|
||||
declaredFields[i].setAccessible(True)
|
||||
ret[name] = declaredFields[i].get(obj)
|
||||
except:
|
||||
pydev_log.exception()
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# JyArrayResolver
|
||||
# =======================================================================================================================
|
||||
class JyArrayResolver:
|
||||
"""
|
||||
This resolves a regular Object[] array from java
|
||||
"""
|
||||
|
||||
def resolve(self, var, attribute):
|
||||
if attribute == GENERATED_LEN_ATTR_NAME:
|
||||
return None
|
||||
return var[int(attribute)]
|
||||
|
||||
def get_dictionary(self, obj):
|
||||
ret = {}
|
||||
|
||||
for i in range(len(obj)):
|
||||
ret[i] = obj[i]
|
||||
|
||||
ret[GENERATED_LEN_ATTR_NAME] = len(obj)
|
||||
return ret
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# MultiValueDictResolver
|
||||
# =======================================================================================================================
|
||||
class MultiValueDictResolver(DictResolver):
|
||||
def resolve(self, dct, key):
|
||||
if key in (GENERATED_LEN_ATTR_NAME, TOO_LARGE_ATTR):
|
||||
return None
|
||||
|
||||
# ok, we have to iterate over the items to find the one that matches the id, because that's the only way
|
||||
# to actually find the reference from the string we have before.
|
||||
expected_id = int(key.split("(")[-1][:-1])
|
||||
for key in list(dct.keys()):
|
||||
val = dct.getlist(key)
|
||||
if id(key) == expected_id:
|
||||
return val
|
||||
|
||||
raise UnableToResolveVariableException()
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# DjangoFormResolver
|
||||
# =======================================================================================================================
|
||||
class DjangoFormResolver(DefaultResolver):
|
||||
def get_dictionary(self, var, names=None):
|
||||
# Do not call self.errors because it is a property and has side effects.
|
||||
names, used___dict__ = self.get_names(var)
|
||||
|
||||
has_errors_attr = False
|
||||
if "errors" in names:
|
||||
has_errors_attr = True
|
||||
names.remove("errors")
|
||||
|
||||
d = defaultResolver.get_dictionary(var, names=names, used___dict__=used___dict__)
|
||||
if has_errors_attr:
|
||||
try:
|
||||
errors_attr = getattr(var, "_errors")
|
||||
except:
|
||||
errors_attr = None
|
||||
d["errors"] = errors_attr
|
||||
return d
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# DequeResolver
|
||||
# =======================================================================================================================
|
||||
class DequeResolver(TupleResolver):
|
||||
def get_dictionary(self, var):
|
||||
d = TupleResolver.get_dictionary(self, var)
|
||||
d["maxlen"] = getattr(var, "maxlen", None)
|
||||
return d
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# OrderedDictResolver
|
||||
# =======================================================================================================================
|
||||
class OrderedDictResolver(DictResolver):
|
||||
sort_keys = False
|
||||
|
||||
def init_dict(self):
|
||||
return OrderedDict()
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# FrameResolver
|
||||
# =======================================================================================================================
|
||||
class FrameResolver:
|
||||
"""
|
||||
This resolves a frame.
|
||||
"""
|
||||
|
||||
def resolve(self, obj, attribute):
|
||||
if attribute == "__internals__":
|
||||
return defaultResolver.get_dictionary(obj)
|
||||
|
||||
if attribute == "stack":
|
||||
return self.get_frame_stack(obj)
|
||||
|
||||
if attribute == "f_locals":
|
||||
return obj.f_locals
|
||||
|
||||
return None
|
||||
|
||||
def get_dictionary(self, obj):
|
||||
ret = {}
|
||||
ret["__internals__"] = defaultResolver.get_dictionary(obj)
|
||||
ret["stack"] = self.get_frame_stack(obj)
|
||||
ret["f_locals"] = obj.f_locals
|
||||
return ret
|
||||
|
||||
def get_frame_stack(self, frame):
|
||||
ret = []
|
||||
if frame is not None:
|
||||
ret.append(self.get_frame_name(frame))
|
||||
|
||||
while frame.f_back:
|
||||
frame = frame.f_back
|
||||
ret.append(self.get_frame_name(frame))
|
||||
|
||||
return ret
|
||||
|
||||
def get_frame_name(self, frame):
|
||||
if frame is None:
|
||||
return "None"
|
||||
try:
|
||||
name = basename(frame.f_code.co_filename)
|
||||
return "frame: %s [%s:%s] id:%s" % (frame.f_code.co_name, name, frame.f_lineno, id(frame))
|
||||
except:
|
||||
return "frame object"
|
||||
|
||||
|
||||
defaultResolver = DefaultResolver()
|
||||
dictResolver = DictResolver()
|
||||
tupleResolver = TupleResolver()
|
||||
instanceResolver = InstanceResolver()
|
||||
jyArrayResolver = JyArrayResolver()
|
||||
setResolver = SetResolver()
|
||||
multiValueDictResolver = MultiValueDictResolver()
|
||||
djangoFormResolver = DjangoFormResolver()
|
||||
dequeResolver = DequeResolver()
|
||||
orderedDictResolver = OrderedDictResolver()
|
||||
frameResolver = FrameResolver()
|
||||
dapGrouperResolver = DAPGrouperResolver()
|
||||
forwardInternalResolverToObject = ForwardInternalResolverToObject()
|
||||
|
||||
|
||||
class InspectStub:
|
||||
def isbuiltin(self, _args):
|
||||
return False
|
||||
|
||||
def isroutine(self, object):
|
||||
return False
|
||||
|
||||
|
||||
try:
|
||||
import inspect
|
||||
except:
|
||||
inspect = InspectStub()
|
||||
|
||||
|
||||
def get_var_scope(attr_name, attr_value, evaluate_name, handle_return_values):
|
||||
if attr_name.startswith("'"):
|
||||
if attr_name.endswith("'"):
|
||||
# i.e.: strings denote that it is a regular value in some container.
|
||||
return ""
|
||||
else:
|
||||
i = attr_name.find("__' (")
|
||||
if i >= 0:
|
||||
# Handle attr_name such as: >>'__name__' (1732494379184)<<
|
||||
attr_name = attr_name[1 : i + 2]
|
||||
|
||||
if handle_return_values and attr_name == RETURN_VALUES_DICT:
|
||||
return ""
|
||||
|
||||
elif attr_name == GENERATED_LEN_ATTR_NAME:
|
||||
return ""
|
||||
|
||||
if attr_name.startswith("__") and attr_name.endswith("__"):
|
||||
return DAPGrouper.SCOPE_SPECIAL_VARS
|
||||
|
||||
if attr_name.startswith("_") or attr_name.endswith("__"):
|
||||
return DAPGrouper.SCOPE_PROTECTED_VARS
|
||||
|
||||
try:
|
||||
if inspect.isroutine(attr_value) or isinstance(attr_value, MethodWrapperType):
|
||||
return DAPGrouper.SCOPE_FUNCTION_VARS
|
||||
|
||||
elif inspect.isclass(attr_value):
|
||||
return DAPGrouper.SCOPE_CLASS_VARS
|
||||
except:
|
||||
# It's possible that isinstance throws an exception when dealing with user-code.
|
||||
if DebugInfoHolder.DEBUG_TRACE_LEVEL > 0:
|
||||
pydev_log.exception()
|
||||
|
||||
return ""
|
||||
@@ -0,0 +1,339 @@
|
||||
"""
|
||||
Vendored copy of runpy from the standard library.
|
||||
|
||||
It's vendored so that we can properly ignore it when used to start user code
|
||||
while still making it possible for the user to debug runpy itself.
|
||||
|
||||
runpy.py - locating and running Python code using the module namespace
|
||||
|
||||
Provides support for locating and running Python scripts using the Python
|
||||
module namespace instead of the native filesystem.
|
||||
|
||||
This allows Python code to play nicely with non-filesystem based PEP 302
|
||||
importers when locating support scripts as well as when importing modules.
|
||||
"""
|
||||
# Written by Nick Coghlan <ncoghlan at gmail.com>
|
||||
# to implement PEP 338 (Executing Modules as Scripts)
|
||||
|
||||
import sys
|
||||
import importlib.machinery # importlib first so we can test #15386 via -m
|
||||
import importlib.util
|
||||
import io
|
||||
import types
|
||||
import os
|
||||
|
||||
__all__ = [
|
||||
"run_module",
|
||||
"run_path",
|
||||
]
|
||||
|
||||
|
||||
# Note: fabioz: Don't use pkgutil (when handling caught exceptions we could end up
|
||||
# showing exceptions in pkgutil.get_imported (specifically the KeyError), so,
|
||||
# create a copy of the function we need to properly ignore this exception when
|
||||
# running the program.
|
||||
def pkgutil_get_importer(path_item):
|
||||
"""Retrieve a finder for the given path item
|
||||
|
||||
The returned finder is cached in sys.path_importer_cache
|
||||
if it was newly created by a path hook.
|
||||
|
||||
The cache (or part of it) can be cleared manually if a
|
||||
rescan of sys.path_hooks is necessary.
|
||||
"""
|
||||
try:
|
||||
importer = sys.path_importer_cache[path_item]
|
||||
except KeyError:
|
||||
for path_hook in sys.path_hooks:
|
||||
try:
|
||||
importer = path_hook(path_item)
|
||||
sys.path_importer_cache.setdefault(path_item, importer)
|
||||
break
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
importer = None
|
||||
return importer
|
||||
|
||||
|
||||
class _TempModule(object):
|
||||
"""Temporarily replace a module in sys.modules with an empty namespace"""
|
||||
|
||||
def __init__(self, mod_name):
|
||||
self.mod_name = mod_name
|
||||
self.module = types.ModuleType(mod_name)
|
||||
self._saved_module = []
|
||||
|
||||
def __enter__(self):
|
||||
mod_name = self.mod_name
|
||||
try:
|
||||
self._saved_module.append(sys.modules[mod_name])
|
||||
except KeyError:
|
||||
pass
|
||||
sys.modules[mod_name] = self.module
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
if self._saved_module:
|
||||
sys.modules[self.mod_name] = self._saved_module[0]
|
||||
else:
|
||||
del sys.modules[self.mod_name]
|
||||
self._saved_module = []
|
||||
|
||||
|
||||
class _ModifiedArgv0(object):
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
self._saved_value = self._sentinel = object()
|
||||
|
||||
def __enter__(self):
|
||||
if self._saved_value is not self._sentinel:
|
||||
raise RuntimeError("Already preserving saved value")
|
||||
self._saved_value = sys.argv[0]
|
||||
sys.argv[0] = self.value
|
||||
|
||||
def __exit__(self, *args):
|
||||
self.value = self._sentinel
|
||||
sys.argv[0] = self._saved_value
|
||||
|
||||
|
||||
# TODO: Replace these helpers with importlib._bootstrap_external functions.
|
||||
def _run_code(code, run_globals, init_globals=None, mod_name=None, mod_spec=None, pkg_name=None, script_name=None):
|
||||
"""Helper to run code in nominated namespace"""
|
||||
if init_globals is not None:
|
||||
run_globals.update(init_globals)
|
||||
if mod_spec is None:
|
||||
loader = None
|
||||
fname = script_name
|
||||
cached = None
|
||||
else:
|
||||
loader = mod_spec.loader
|
||||
fname = mod_spec.origin
|
||||
cached = mod_spec.cached
|
||||
if pkg_name is None:
|
||||
pkg_name = mod_spec.parent
|
||||
run_globals.update(
|
||||
__name__=mod_name, __file__=fname, __cached__=cached, __doc__=None, __loader__=loader, __package__=pkg_name, __spec__=mod_spec
|
||||
)
|
||||
exec(code, run_globals)
|
||||
return run_globals
|
||||
|
||||
|
||||
def _run_module_code(code, init_globals=None, mod_name=None, mod_spec=None, pkg_name=None, script_name=None):
|
||||
"""Helper to run code in new namespace with sys modified"""
|
||||
fname = script_name if mod_spec is None else mod_spec.origin
|
||||
with _TempModule(mod_name) as temp_module, _ModifiedArgv0(fname):
|
||||
mod_globals = temp_module.module.__dict__
|
||||
_run_code(code, mod_globals, init_globals, mod_name, mod_spec, pkg_name, script_name)
|
||||
# Copy the globals of the temporary module, as they
|
||||
# may be cleared when the temporary module goes away
|
||||
return mod_globals.copy()
|
||||
|
||||
|
||||
# Helper to get the full name, spec and code for a module
|
||||
def _get_module_details(mod_name, error=ImportError):
|
||||
if mod_name.startswith("."):
|
||||
raise error("Relative module names not supported")
|
||||
pkg_name, _, _ = mod_name.rpartition(".")
|
||||
if pkg_name:
|
||||
# Try importing the parent to avoid catching initialization errors
|
||||
try:
|
||||
__import__(pkg_name)
|
||||
except ImportError as e:
|
||||
# If the parent or higher ancestor package is missing, let the
|
||||
# error be raised by find_spec() below and then be caught. But do
|
||||
# not allow other errors to be caught.
|
||||
if e.name is None or (e.name != pkg_name and not pkg_name.startswith(e.name + ".")):
|
||||
raise
|
||||
# Warn if the module has already been imported under its normal name
|
||||
existing = sys.modules.get(mod_name)
|
||||
if existing is not None and not hasattr(existing, "__path__"):
|
||||
from warnings import warn
|
||||
|
||||
msg = (
|
||||
"{mod_name!r} found in sys.modules after import of "
|
||||
"package {pkg_name!r}, but prior to execution of "
|
||||
"{mod_name!r}; this may result in unpredictable "
|
||||
"behaviour".format(mod_name=mod_name, pkg_name=pkg_name)
|
||||
)
|
||||
warn(RuntimeWarning(msg))
|
||||
|
||||
try:
|
||||
spec = importlib.util.find_spec(mod_name)
|
||||
except (ImportError, AttributeError, TypeError, ValueError) as ex:
|
||||
# This hack fixes an impedance mismatch between pkgutil and
|
||||
# importlib, where the latter raises other errors for cases where
|
||||
# pkgutil previously raised ImportError
|
||||
msg = "Error while finding module specification for {!r} ({}: {})"
|
||||
if mod_name.endswith(".py"):
|
||||
msg += f". Try using '{mod_name[:-3]}' instead of " f"'{mod_name}' as the module name."
|
||||
raise error(msg.format(mod_name, type(ex).__name__, ex)) from ex
|
||||
if spec is None:
|
||||
raise error("No module named %s" % mod_name)
|
||||
if spec.submodule_search_locations is not None:
|
||||
if mod_name == "__main__" or mod_name.endswith(".__main__"):
|
||||
raise error("Cannot use package as __main__ module")
|
||||
try:
|
||||
pkg_main_name = mod_name + ".__main__"
|
||||
return _get_module_details(pkg_main_name, error)
|
||||
except error as e:
|
||||
if mod_name not in sys.modules:
|
||||
raise # No module loaded; being a package is irrelevant
|
||||
raise error(("%s; %r is a package and cannot " + "be directly executed") % (e, mod_name))
|
||||
loader = spec.loader
|
||||
if loader is None:
|
||||
raise error("%r is a namespace package and cannot be executed" % mod_name)
|
||||
try:
|
||||
code = loader.get_code(mod_name)
|
||||
except ImportError as e:
|
||||
raise error(format(e)) from e
|
||||
if code is None:
|
||||
raise error("No code object available for %s" % mod_name)
|
||||
return mod_name, spec, code
|
||||
|
||||
|
||||
class _Error(Exception):
|
||||
"""Error that _run_module_as_main() should report without a traceback"""
|
||||
|
||||
|
||||
# XXX ncoghlan: Should this be documented and made public?
|
||||
# (Current thoughts: don't repeat the mistake that lead to its
|
||||
# creation when run_module() no longer met the needs of
|
||||
# mainmodule.c, but couldn't be changed because it was public)
|
||||
def _run_module_as_main(mod_name, alter_argv=True):
|
||||
"""Runs the designated module in the __main__ namespace
|
||||
|
||||
Note that the executed module will have full access to the
|
||||
__main__ namespace. If this is not desirable, the run_module()
|
||||
function should be used to run the module code in a fresh namespace.
|
||||
|
||||
At the very least, these variables in __main__ will be overwritten:
|
||||
__name__
|
||||
__file__
|
||||
__cached__
|
||||
__loader__
|
||||
__package__
|
||||
"""
|
||||
try:
|
||||
if alter_argv or mod_name != "__main__": # i.e. -m switch
|
||||
mod_name, mod_spec, code = _get_module_details(mod_name, _Error)
|
||||
else: # i.e. directory or zipfile execution
|
||||
mod_name, mod_spec, code = _get_main_module_details(_Error)
|
||||
except _Error as exc:
|
||||
msg = "%s: %s" % (sys.executable, exc)
|
||||
sys.exit(msg)
|
||||
main_globals = sys.modules["__main__"].__dict__
|
||||
if alter_argv:
|
||||
sys.argv[0] = mod_spec.origin
|
||||
return _run_code(code, main_globals, None, "__main__", mod_spec)
|
||||
|
||||
|
||||
def run_module(mod_name, init_globals=None, run_name=None, alter_sys=False):
|
||||
"""Execute a module's code without importing it
|
||||
|
||||
Returns the resulting top level namespace dictionary
|
||||
"""
|
||||
mod_name, mod_spec, code = _get_module_details(mod_name)
|
||||
if run_name is None:
|
||||
run_name = mod_name
|
||||
if alter_sys:
|
||||
return _run_module_code(code, init_globals, run_name, mod_spec)
|
||||
else:
|
||||
# Leave the sys module alone
|
||||
return _run_code(code, {}, init_globals, run_name, mod_spec)
|
||||
|
||||
|
||||
def _get_main_module_details(error=ImportError):
|
||||
# Helper that gives a nicer error message when attempting to
|
||||
# execute a zipfile or directory by invoking __main__.py
|
||||
# Also moves the standard __main__ out of the way so that the
|
||||
# preexisting __loader__ entry doesn't cause issues
|
||||
main_name = "__main__"
|
||||
saved_main = sys.modules[main_name]
|
||||
del sys.modules[main_name]
|
||||
try:
|
||||
return _get_module_details(main_name)
|
||||
except ImportError as exc:
|
||||
if main_name in str(exc):
|
||||
raise error("can't find %r module in %r" % (main_name, sys.path[0])) from exc
|
||||
raise
|
||||
finally:
|
||||
sys.modules[main_name] = saved_main
|
||||
|
||||
|
||||
try:
|
||||
io_open_code = io.open_code
|
||||
except AttributeError:
|
||||
# Compatibility with Python 3.6/3.7
|
||||
import tokenize
|
||||
|
||||
io_open_code = tokenize.open
|
||||
|
||||
|
||||
def _get_code_from_file(run_name, fname):
|
||||
# Check for a compiled file first
|
||||
from pkgutil import read_code
|
||||
|
||||
decoded_path = os.path.abspath(os.fsdecode(fname))
|
||||
with io_open_code(decoded_path) as f:
|
||||
code = read_code(f)
|
||||
if code is None:
|
||||
# That didn't work, so try it as normal source code
|
||||
with io_open_code(decoded_path) as f:
|
||||
code = compile(f.read(), fname, "exec")
|
||||
return code, fname
|
||||
|
||||
|
||||
def run_path(path_name, init_globals=None, run_name=None):
|
||||
"""Execute code located at the specified filesystem location
|
||||
|
||||
Returns the resulting top level namespace dictionary
|
||||
|
||||
The file path may refer directly to a Python script (i.e.
|
||||
one that could be directly executed with execfile) or else
|
||||
it may refer to a zipfile or directory containing a top
|
||||
level __main__.py script.
|
||||
"""
|
||||
if run_name is None:
|
||||
run_name = "<run_path>"
|
||||
pkg_name = run_name.rpartition(".")[0]
|
||||
importer = pkgutil_get_importer(path_name)
|
||||
# Trying to avoid importing imp so as to not consume the deprecation warning.
|
||||
is_NullImporter = False
|
||||
if type(importer).__module__ == "imp":
|
||||
if type(importer).__name__ == "NullImporter":
|
||||
is_NullImporter = True
|
||||
if isinstance(importer, type(None)) or is_NullImporter:
|
||||
# Not a valid sys.path entry, so run the code directly
|
||||
# execfile() doesn't help as we want to allow compiled files
|
||||
code, fname = _get_code_from_file(run_name, path_name)
|
||||
return _run_module_code(code, init_globals, run_name, pkg_name=pkg_name, script_name=fname)
|
||||
else:
|
||||
# Finder is defined for path, so add it to
|
||||
# the start of sys.path
|
||||
sys.path.insert(0, path_name)
|
||||
try:
|
||||
# Here's where things are a little different from the run_module
|
||||
# case. There, we only had to replace the module in sys while the
|
||||
# code was running and doing so was somewhat optional. Here, we
|
||||
# have no choice and we have to remove it even while we read the
|
||||
# code. If we don't do this, a __loader__ attribute in the
|
||||
# existing __main__ module may prevent location of the new module.
|
||||
mod_name, mod_spec, code = _get_main_module_details()
|
||||
with _TempModule(run_name) as temp_module, _ModifiedArgv0(path_name):
|
||||
mod_globals = temp_module.module.__dict__
|
||||
return _run_code(code, mod_globals, init_globals, run_name, mod_spec, pkg_name).copy()
|
||||
finally:
|
||||
try:
|
||||
sys.path.remove(path_name)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Run the module specified as the next command line argument
|
||||
if len(sys.argv) < 2:
|
||||
print("No module specified for execution", file=sys.stderr)
|
||||
else:
|
||||
del sys.argv[0] # Make the requested module sys.argv[0]
|
||||
_run_module_as_main(sys.argv[0])
|
||||
@@ -0,0 +1,395 @@
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
# Gotten from ptvsd for supporting the format expected there.
|
||||
import sys
|
||||
from _pydevd_bundle.pydevd_constants import IS_PY36_OR_GREATER
|
||||
import locale
|
||||
from _pydev_bundle import pydev_log
|
||||
|
||||
|
||||
class SafeRepr(object):
|
||||
# Can be used to override the encoding from locale.getpreferredencoding()
|
||||
locale_preferred_encoding = None
|
||||
|
||||
# Can be used to override the encoding used for sys.stdout.encoding
|
||||
sys_stdout_encoding = None
|
||||
|
||||
# String types are truncated to maxstring_outer when at the outer-
|
||||
# most level, and truncated to maxstring_inner characters inside
|
||||
# collections.
|
||||
maxstring_outer = 2**16
|
||||
maxstring_inner = 128
|
||||
string_types = (str, bytes)
|
||||
bytes = bytes
|
||||
set_info = (set, "{", "}", False)
|
||||
frozenset_info = (frozenset, "frozenset({", "})", False)
|
||||
int_types = (int,)
|
||||
long_iter_types = (list, tuple, bytearray, range, dict, set, frozenset)
|
||||
|
||||
# Collection types are recursively iterated for each limit in
|
||||
# maxcollection.
|
||||
maxcollection = (60, 20)
|
||||
|
||||
# Specifies type, prefix string, suffix string, and whether to include a
|
||||
# comma if there is only one element. (Using a sequence rather than a
|
||||
# mapping because we use isinstance() to determine the matching type.)
|
||||
collection_types = [
|
||||
(tuple, "(", ")", True),
|
||||
(list, "[", "]", False),
|
||||
frozenset_info,
|
||||
set_info,
|
||||
]
|
||||
try:
|
||||
from collections import deque
|
||||
|
||||
collection_types.append((deque, "deque([", "])", False))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# type, prefix string, suffix string, item prefix string,
|
||||
# item key/value separator, item suffix string
|
||||
dict_types = [(dict, "{", "}", "", ": ", "")]
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
|
||||
dict_types.append((OrderedDict, "OrderedDict([", "])", "(", ", ", ")"))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# All other types are treated identically to strings, but using
|
||||
# different limits.
|
||||
maxother_outer = 2**16
|
||||
maxother_inner = 128
|
||||
|
||||
convert_to_hex = False
|
||||
raw_value = False
|
||||
|
||||
def __call__(self, obj):
|
||||
"""
|
||||
:param object obj:
|
||||
The object for which we want a representation.
|
||||
|
||||
:return str:
|
||||
Returns bytes encoded as utf-8 on py2 and str on py3.
|
||||
"""
|
||||
try:
|
||||
return "".join(self._repr(obj, 0))
|
||||
except Exception:
|
||||
try:
|
||||
return "An exception was raised: %r" % sys.exc_info()[1]
|
||||
except Exception:
|
||||
return "An exception was raised"
|
||||
|
||||
def _repr(self, obj, level):
|
||||
"""Returns an iterable of the parts in the final repr string."""
|
||||
|
||||
try:
|
||||
obj_repr = type(obj).__repr__
|
||||
except Exception:
|
||||
obj_repr = None
|
||||
|
||||
def has_obj_repr(t):
|
||||
r = t.__repr__
|
||||
try:
|
||||
return obj_repr == r
|
||||
except Exception:
|
||||
return obj_repr is r
|
||||
|
||||
for t, prefix, suffix, comma in self.collection_types:
|
||||
if isinstance(obj, t) and has_obj_repr(t):
|
||||
return self._repr_iter(obj, level, prefix, suffix, comma)
|
||||
|
||||
for t, prefix, suffix, item_prefix, item_sep, item_suffix in self.dict_types: # noqa
|
||||
if isinstance(obj, t) and has_obj_repr(t):
|
||||
return self._repr_dict(obj, level, prefix, suffix, item_prefix, item_sep, item_suffix)
|
||||
|
||||
for t in self.string_types:
|
||||
if isinstance(obj, t) and has_obj_repr(t):
|
||||
return self._repr_str(obj, level)
|
||||
|
||||
if self._is_long_iter(obj):
|
||||
return self._repr_long_iter(obj)
|
||||
|
||||
return self._repr_other(obj, level)
|
||||
|
||||
# Determines whether an iterable exceeds the limits set in
|
||||
# maxlimits, and is therefore unsafe to repr().
|
||||
def _is_long_iter(self, obj, level=0):
|
||||
try:
|
||||
# Strings have their own limits (and do not nest). Because
|
||||
# they don't have __iter__ in 2.x, this check goes before
|
||||
# the next one.
|
||||
if isinstance(obj, self.string_types):
|
||||
return len(obj) > self.maxstring_inner
|
||||
|
||||
# If it's not an iterable (and not a string), it's fine.
|
||||
if not hasattr(obj, "__iter__"):
|
||||
return False
|
||||
|
||||
# If it's not an instance of these collection types then it
|
||||
# is fine. Note: this is a fix for
|
||||
# https://github.com/Microsoft/ptvsd/issues/406
|
||||
if not isinstance(obj, self.long_iter_types):
|
||||
return False
|
||||
|
||||
# Iterable is its own iterator - this is a one-off iterable
|
||||
# like generator or enumerate(). We can't really count that,
|
||||
# but repr() for these should not include any elements anyway,
|
||||
# so we can treat it the same as non-iterables.
|
||||
if obj is iter(obj):
|
||||
return False
|
||||
|
||||
# range reprs fine regardless of length.
|
||||
if isinstance(obj, range):
|
||||
return False
|
||||
|
||||
# numpy and scipy collections (ndarray etc) have
|
||||
# self-truncating repr, so they're always safe.
|
||||
try:
|
||||
module = type(obj).__module__.partition(".")[0]
|
||||
if module in ("numpy", "scipy"):
|
||||
return False
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Iterables that nest too deep are considered long.
|
||||
if level >= len(self.maxcollection):
|
||||
return True
|
||||
|
||||
# It is too long if the length exceeds the limit, or any
|
||||
# of its elements are long iterables.
|
||||
if hasattr(obj, "__len__"):
|
||||
try:
|
||||
size = len(obj)
|
||||
except Exception:
|
||||
size = None
|
||||
if size is not None and size > self.maxcollection[level]:
|
||||
return True
|
||||
return any((self._is_long_iter(item, level + 1) for item in obj)) # noqa
|
||||
return any(i > self.maxcollection[level] or self._is_long_iter(item, level + 1) for i, item in enumerate(obj)) # noqa
|
||||
|
||||
except Exception:
|
||||
# If anything breaks, assume the worst case.
|
||||
return True
|
||||
|
||||
def _repr_iter(self, obj, level, prefix, suffix, comma_after_single_element=False):
|
||||
yield prefix
|
||||
|
||||
if level >= len(self.maxcollection):
|
||||
yield "..."
|
||||
else:
|
||||
count = self.maxcollection[level]
|
||||
yield_comma = False
|
||||
for item in obj:
|
||||
if yield_comma:
|
||||
yield ", "
|
||||
yield_comma = True
|
||||
|
||||
count -= 1
|
||||
if count <= 0:
|
||||
yield "..."
|
||||
break
|
||||
|
||||
for p in self._repr(item, 100 if item is obj else level + 1):
|
||||
yield p
|
||||
else:
|
||||
if comma_after_single_element:
|
||||
if count == self.maxcollection[level] - 1:
|
||||
yield ","
|
||||
yield suffix
|
||||
|
||||
def _repr_long_iter(self, obj):
|
||||
try:
|
||||
length = hex(len(obj)) if self.convert_to_hex else len(obj)
|
||||
obj_repr = "<%s, len() = %s>" % (type(obj).__name__, length)
|
||||
except Exception:
|
||||
try:
|
||||
obj_repr = "<" + type(obj).__name__ + ">"
|
||||
except Exception:
|
||||
obj_repr = "<no repr available for object>"
|
||||
yield obj_repr
|
||||
|
||||
def _repr_dict(self, obj, level, prefix, suffix, item_prefix, item_sep, item_suffix):
|
||||
if not obj:
|
||||
yield prefix + suffix
|
||||
return
|
||||
if level >= len(self.maxcollection):
|
||||
yield prefix + "..." + suffix
|
||||
return
|
||||
|
||||
yield prefix
|
||||
|
||||
count = self.maxcollection[level]
|
||||
yield_comma = False
|
||||
|
||||
if IS_PY36_OR_GREATER:
|
||||
# On Python 3.6 (onwards) dictionaries now keep
|
||||
# insertion order.
|
||||
sorted_keys = list(obj)
|
||||
else:
|
||||
try:
|
||||
sorted_keys = sorted(obj)
|
||||
except Exception:
|
||||
sorted_keys = list(obj)
|
||||
|
||||
for key in sorted_keys:
|
||||
if yield_comma:
|
||||
yield ", "
|
||||
yield_comma = True
|
||||
|
||||
count -= 1
|
||||
if count <= 0:
|
||||
yield "..."
|
||||
break
|
||||
|
||||
yield item_prefix
|
||||
for p in self._repr(key, level + 1):
|
||||
yield p
|
||||
|
||||
yield item_sep
|
||||
|
||||
try:
|
||||
item = obj[key]
|
||||
except Exception:
|
||||
yield "<?>"
|
||||
else:
|
||||
for p in self._repr(item, 100 if item is obj else level + 1):
|
||||
yield p
|
||||
yield item_suffix
|
||||
|
||||
yield suffix
|
||||
|
||||
def _repr_str(self, obj, level):
|
||||
try:
|
||||
if self.raw_value:
|
||||
# For raw value retrieval, ignore all limits.
|
||||
if isinstance(obj, bytes):
|
||||
yield obj.decode("latin-1")
|
||||
else:
|
||||
yield obj
|
||||
return
|
||||
|
||||
limit_inner = self.maxother_inner
|
||||
limit_outer = self.maxother_outer
|
||||
limit = limit_inner if level > 0 else limit_outer
|
||||
if len(obj) <= limit:
|
||||
# Note that we check the limit before doing the repr (so, the final string
|
||||
# may actually be considerably bigger on some cases, as besides
|
||||
# the additional u, b, ' chars, some chars may be escaped in repr, so
|
||||
# even a single char such as \U0010ffff may end up adding more
|
||||
# chars than expected).
|
||||
yield self._convert_to_unicode_or_bytes_repr(repr(obj))
|
||||
return
|
||||
|
||||
# Slightly imprecise calculations - we may end up with a string that is
|
||||
# up to 6 characters longer than limit. If you need precise formatting,
|
||||
# you are using the wrong class.
|
||||
left_count, right_count = max(1, int(2 * limit / 3)), max(1, int(limit / 3)) # noqa
|
||||
|
||||
# Important: only do repr after slicing to avoid duplicating a byte array that could be
|
||||
# huge.
|
||||
|
||||
# Note: we don't deal with high surrogates here because we're not dealing with the
|
||||
# repr() of a random object.
|
||||
# i.e.: A high surrogate unicode char may be splitted on Py2, but as we do a `repr`
|
||||
# afterwards, that's ok.
|
||||
|
||||
# Also, we just show the unicode/string/bytes repr() directly to make clear what the
|
||||
# input type was (so, on py2 a unicode would start with u' and on py3 a bytes would
|
||||
# start with b').
|
||||
|
||||
part1 = obj[:left_count]
|
||||
part1 = repr(part1)
|
||||
part1 = part1[: part1.rindex("'")] # Remove the last '
|
||||
|
||||
part2 = obj[-right_count:]
|
||||
part2 = repr(part2)
|
||||
part2 = part2[part2.index("'") + 1 :] # Remove the first ' (and possibly u or b).
|
||||
|
||||
yield part1
|
||||
yield "..."
|
||||
yield part2
|
||||
except:
|
||||
# This shouldn't really happen, but let's play it safe.
|
||||
pydev_log.exception("Error getting string representation to show.")
|
||||
for part in self._repr_obj(obj, level, self.maxother_inner, self.maxother_outer):
|
||||
yield part
|
||||
|
||||
def _repr_other(self, obj, level):
|
||||
return self._repr_obj(obj, level, self.maxother_inner, self.maxother_outer)
|
||||
|
||||
def _repr_obj(self, obj, level, limit_inner, limit_outer):
|
||||
try:
|
||||
if self.raw_value:
|
||||
# For raw value retrieval, ignore all limits.
|
||||
if isinstance(obj, bytes):
|
||||
yield obj.decode("latin-1")
|
||||
return
|
||||
|
||||
try:
|
||||
mv = memoryview(obj)
|
||||
except Exception:
|
||||
yield self._convert_to_unicode_or_bytes_repr(repr(obj))
|
||||
return
|
||||
else:
|
||||
# Map bytes to Unicode codepoints with same values.
|
||||
yield mv.tobytes().decode("latin-1")
|
||||
return
|
||||
elif self.convert_to_hex and isinstance(obj, self.int_types):
|
||||
obj_repr = hex(obj)
|
||||
else:
|
||||
obj_repr = repr(obj)
|
||||
except Exception:
|
||||
try:
|
||||
obj_repr = object.__repr__(obj)
|
||||
except Exception:
|
||||
try:
|
||||
obj_repr = "<no repr available for " + type(obj).__name__ + ">" # noqa
|
||||
except Exception:
|
||||
obj_repr = "<no repr available for object>"
|
||||
|
||||
limit = limit_inner if level > 0 else limit_outer
|
||||
|
||||
if limit >= len(obj_repr):
|
||||
yield self._convert_to_unicode_or_bytes_repr(obj_repr)
|
||||
return
|
||||
|
||||
# Slightly imprecise calculations - we may end up with a string that is
|
||||
# up to 3 characters longer than limit. If you need precise formatting,
|
||||
# you are using the wrong class.
|
||||
left_count, right_count = max(1, int(2 * limit / 3)), max(1, int(limit / 3)) # noqa
|
||||
|
||||
yield obj_repr[:left_count]
|
||||
yield "..."
|
||||
yield obj_repr[-right_count:]
|
||||
|
||||
def _convert_to_unicode_or_bytes_repr(self, obj_repr):
|
||||
return obj_repr
|
||||
|
||||
def _bytes_as_unicode_if_possible(self, obj_repr):
|
||||
# We try to decode with 3 possible encoding (sys.stdout.encoding,
|
||||
# locale.getpreferredencoding() and 'utf-8). If no encoding can decode
|
||||
# the input, we return the original bytes.
|
||||
try_encodings = []
|
||||
encoding = self.sys_stdout_encoding or getattr(sys.stdout, "encoding", "")
|
||||
if encoding:
|
||||
try_encodings.append(encoding.lower())
|
||||
|
||||
preferred_encoding = self.locale_preferred_encoding or locale.getpreferredencoding()
|
||||
if preferred_encoding:
|
||||
preferred_encoding = preferred_encoding.lower()
|
||||
if preferred_encoding not in try_encodings:
|
||||
try_encodings.append(preferred_encoding)
|
||||
|
||||
if "utf-8" not in try_encodings:
|
||||
try_encodings.append("utf-8")
|
||||
|
||||
for encoding in try_encodings:
|
||||
try:
|
||||
return obj_repr.decode(encoding)
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
|
||||
return obj_repr # Return the original version (in bytes)
|
||||
@@ -0,0 +1,130 @@
|
||||
"""
|
||||
Utility for saving locals.
|
||||
"""
|
||||
import sys
|
||||
from _pydevd_bundle.pydevd_constants import IS_PY313_OR_GREATER
|
||||
from _pydev_bundle import pydev_log
|
||||
|
||||
try:
|
||||
import types
|
||||
|
||||
frame_type = types.FrameType
|
||||
except:
|
||||
frame_type = type(sys._getframe())
|
||||
|
||||
|
||||
def is_save_locals_available():
|
||||
return save_locals_impl is not None
|
||||
|
||||
|
||||
def save_locals(frame):
|
||||
"""
|
||||
Copy values from locals_dict into the fast stack slots in the given frame.
|
||||
|
||||
Note: the 'save_locals' branch had a different approach wrapping the frame (much more code, but it gives ideas
|
||||
on how to save things partially, not the 'whole' locals).
|
||||
"""
|
||||
if not isinstance(frame, frame_type):
|
||||
# Fix exception when changing Django variable (receiving DjangoTemplateFrame)
|
||||
return
|
||||
|
||||
if save_locals_impl is not None:
|
||||
try:
|
||||
save_locals_impl(frame)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def make_save_locals_impl():
|
||||
"""
|
||||
Factory for the 'save_locals_impl' method. This may seem like a complicated pattern but it is essential that the method is created at
|
||||
module load time. Inner imports after module load time would cause an occasional debugger deadlock due to the importer lock and debugger
|
||||
lock being taken in different order in different threads.
|
||||
"""
|
||||
try:
|
||||
if "__pypy__" in sys.builtin_module_names:
|
||||
import __pypy__ # @UnresolvedImport
|
||||
|
||||
save_locals = __pypy__.locals_to_fast
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
if "__pypy__" in sys.builtin_module_names:
|
||||
|
||||
def save_locals_pypy_impl(frame):
|
||||
save_locals(frame)
|
||||
|
||||
return save_locals_pypy_impl
|
||||
|
||||
if IS_PY313_OR_GREATER:
|
||||
# No longer needed in Python 3.13 (deprecated)
|
||||
# See PEP 667
|
||||
return None
|
||||
|
||||
try:
|
||||
import ctypes
|
||||
|
||||
locals_to_fast = ctypes.pythonapi.PyFrame_LocalsToFast
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
|
||||
def save_locals_ctypes_impl(frame):
|
||||
locals_to_fast(ctypes.py_object(frame), ctypes.c_int(0))
|
||||
|
||||
return save_locals_ctypes_impl
|
||||
|
||||
return None
|
||||
|
||||
|
||||
save_locals_impl = make_save_locals_impl()
|
||||
|
||||
_SENTINEL = [] # Any mutable will do.
|
||||
|
||||
|
||||
def update_globals_and_locals(updated_globals, initial_globals, frame):
|
||||
# We don't have the locals and passed all in globals, so, we have to
|
||||
# manually choose how to update the variables.
|
||||
#
|
||||
# Note that the current implementation is a bit tricky: it does work in general
|
||||
# but if we do something as 'some_var = 10' and 'some_var' is already defined to have
|
||||
# the value '10' in the globals, we won't actually put that value in the locals
|
||||
# (which means that the frame locals won't be updated).
|
||||
# Still, the approach to have a single namespace was chosen because it was the only
|
||||
# one that enabled creating and using variables during the same evaluation.
|
||||
assert updated_globals is not None
|
||||
f_locals = None
|
||||
|
||||
removed = set(initial_globals).difference(updated_globals)
|
||||
|
||||
for key, val in updated_globals.items():
|
||||
if val is not initial_globals.get(key, _SENTINEL):
|
||||
if f_locals is None:
|
||||
# Note: we call f_locals only once because each time
|
||||
# we call it the values may be reset.
|
||||
f_locals = frame.f_locals
|
||||
|
||||
f_locals[key] = val
|
||||
|
||||
if removed:
|
||||
if f_locals is None:
|
||||
# Note: we call f_locals only once because each time
|
||||
# we call it the values may be reset.
|
||||
f_locals = frame.f_locals
|
||||
|
||||
for key in removed:
|
||||
try:
|
||||
del f_locals[key]
|
||||
except Exception:
|
||||
# Python 3.13.0 has issues here:
|
||||
# https://github.com/python/cpython/pull/125616
|
||||
# This should be backported from the pull request
|
||||
# but we still need to handle it in this version
|
||||
try:
|
||||
if key in f_locals:
|
||||
f_locals[key] = None
|
||||
except Exception as e:
|
||||
pydev_log.info("Unable to remove key: %s from locals. Exception: %s", key, e)
|
||||
|
||||
if f_locals is not None:
|
||||
save_locals(frame)
|
||||
@@ -0,0 +1,201 @@
|
||||
from _pydev_bundle import pydev_log
|
||||
|
||||
try:
|
||||
import trace
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
trace._warn = lambda *args: None # workaround for http://bugs.python.org/issue17143 (PY-8706)
|
||||
|
||||
import os
|
||||
from _pydevd_bundle.pydevd_comm import CMD_SIGNATURE_CALL_TRACE, NetCommand
|
||||
from _pydevd_bundle import pydevd_xml
|
||||
from _pydevd_bundle.pydevd_utils import get_clsname_for_code
|
||||
|
||||
|
||||
class Signature(object):
|
||||
def __init__(self, file, name):
|
||||
self.file = file
|
||||
self.name = name
|
||||
self.args = []
|
||||
self.args_str = []
|
||||
self.return_type = None
|
||||
|
||||
def add_arg(self, name, type):
|
||||
self.args.append((name, type))
|
||||
self.args_str.append("%s:%s" % (name, type))
|
||||
|
||||
def set_args(self, frame, recursive=False):
|
||||
self.args = []
|
||||
|
||||
code = frame.f_code
|
||||
locals = frame.f_locals
|
||||
|
||||
for i in range(0, code.co_argcount):
|
||||
name = code.co_varnames[i]
|
||||
class_name = get_type_of_value(locals[name], recursive=recursive)
|
||||
|
||||
self.add_arg(name, class_name)
|
||||
|
||||
def __str__(self):
|
||||
return "%s %s(%s)" % (self.file, self.name, ", ".join(self.args_str))
|
||||
|
||||
|
||||
def get_type_of_value(value, ignore_module_name=("__main__", "__builtin__", "builtins"), recursive=False):
|
||||
tp = type(value)
|
||||
class_name = tp.__name__
|
||||
if class_name == "instance": # old-style classes
|
||||
tp = value.__class__
|
||||
class_name = tp.__name__
|
||||
|
||||
if hasattr(tp, "__module__") and tp.__module__ and tp.__module__ not in ignore_module_name:
|
||||
class_name = "%s.%s" % (tp.__module__, class_name)
|
||||
|
||||
if class_name == "list":
|
||||
class_name = "List"
|
||||
if len(value) > 0 and recursive:
|
||||
class_name += "[%s]" % get_type_of_value(value[0], recursive=recursive)
|
||||
return class_name
|
||||
|
||||
if class_name == "dict":
|
||||
class_name = "Dict"
|
||||
if len(value) > 0 and recursive:
|
||||
for k, v in value.items():
|
||||
class_name += "[%s, %s]" % (get_type_of_value(k, recursive=recursive), get_type_of_value(v, recursive=recursive))
|
||||
break
|
||||
return class_name
|
||||
|
||||
if class_name == "tuple":
|
||||
class_name = "Tuple"
|
||||
if len(value) > 0 and recursive:
|
||||
class_name += "["
|
||||
class_name += ", ".join(get_type_of_value(v, recursive=recursive) for v in value)
|
||||
class_name += "]"
|
||||
|
||||
return class_name
|
||||
|
||||
|
||||
def _modname(path):
|
||||
"""Return a plausible module name for the path"""
|
||||
base = os.path.basename(path)
|
||||
filename, ext = os.path.splitext(base)
|
||||
return filename
|
||||
|
||||
|
||||
class SignatureFactory(object):
|
||||
def __init__(self):
|
||||
self._caller_cache = {}
|
||||
self.cache = CallSignatureCache()
|
||||
|
||||
def create_signature(self, frame, filename, with_args=True):
|
||||
try:
|
||||
_, modulename, funcname = self.file_module_function_of(frame)
|
||||
signature = Signature(filename, funcname)
|
||||
if with_args:
|
||||
signature.set_args(frame, recursive=True)
|
||||
return signature
|
||||
except:
|
||||
pydev_log.exception()
|
||||
|
||||
def file_module_function_of(self, frame): # this code is take from trace module and fixed to work with new-style classes
|
||||
code = frame.f_code
|
||||
filename = code.co_filename
|
||||
if filename:
|
||||
modulename = _modname(filename)
|
||||
else:
|
||||
modulename = None
|
||||
|
||||
funcname = code.co_name
|
||||
clsname = None
|
||||
if code in self._caller_cache:
|
||||
if self._caller_cache[code] is not None:
|
||||
clsname = self._caller_cache[code]
|
||||
else:
|
||||
self._caller_cache[code] = None
|
||||
clsname = get_clsname_for_code(code, frame)
|
||||
if clsname is not None:
|
||||
# cache the result - assumption is that new.* is
|
||||
# not called later to disturb this relationship
|
||||
# _caller_cache could be flushed if functions in
|
||||
# the new module get called.
|
||||
self._caller_cache[code] = clsname
|
||||
|
||||
if clsname is not None:
|
||||
funcname = "%s.%s" % (clsname, funcname)
|
||||
|
||||
return filename, modulename, funcname
|
||||
|
||||
|
||||
def get_signature_info(signature):
|
||||
return signature.file, signature.name, " ".join([arg[1] for arg in signature.args])
|
||||
|
||||
|
||||
def get_frame_info(frame):
|
||||
co = frame.f_code
|
||||
return co.co_name, frame.f_lineno, co.co_filename
|
||||
|
||||
|
||||
class CallSignatureCache(object):
|
||||
def __init__(self):
|
||||
self.cache = {}
|
||||
|
||||
def add(self, signature):
|
||||
filename, name, args_type = get_signature_info(signature)
|
||||
calls_from_file = self.cache.setdefault(filename, {})
|
||||
name_calls = calls_from_file.setdefault(name, {})
|
||||
name_calls[args_type] = None
|
||||
|
||||
def is_in_cache(self, signature):
|
||||
filename, name, args_type = get_signature_info(signature)
|
||||
if args_type in self.cache.get(filename, {}).get(name, {}):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def create_signature_message(signature):
|
||||
cmdTextList = ["<xml>"]
|
||||
|
||||
cmdTextList.append(
|
||||
'<call_signature file="%s" name="%s">'
|
||||
% (pydevd_xml.make_valid_xml_value(signature.file), pydevd_xml.make_valid_xml_value(signature.name))
|
||||
)
|
||||
|
||||
for arg in signature.args:
|
||||
cmdTextList.append(
|
||||
'<arg name="%s" type="%s"></arg>' % (pydevd_xml.make_valid_xml_value(arg[0]), pydevd_xml.make_valid_xml_value(arg[1]))
|
||||
)
|
||||
|
||||
if signature.return_type is not None:
|
||||
cmdTextList.append('<return type="%s"></return>' % (pydevd_xml.make_valid_xml_value(signature.return_type)))
|
||||
|
||||
cmdTextList.append("</call_signature></xml>")
|
||||
cmdText = "".join(cmdTextList)
|
||||
return NetCommand(CMD_SIGNATURE_CALL_TRACE, 0, cmdText)
|
||||
|
||||
|
||||
def send_signature_call_trace(dbg, frame, filename):
|
||||
if dbg.signature_factory and dbg.in_project_scope(frame):
|
||||
signature = dbg.signature_factory.create_signature(frame, filename)
|
||||
if signature is not None:
|
||||
if dbg.signature_factory.cache is not None:
|
||||
if not dbg.signature_factory.cache.is_in_cache(signature):
|
||||
dbg.signature_factory.cache.add(signature)
|
||||
dbg.writer.add_command(create_signature_message(signature))
|
||||
return True
|
||||
else:
|
||||
# we don't send signature if it is cached
|
||||
return False
|
||||
else:
|
||||
dbg.writer.add_command(create_signature_message(signature))
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def send_signature_return_trace(dbg, frame, filename, return_value):
|
||||
if dbg.signature_factory and dbg.in_project_scope(frame):
|
||||
signature = dbg.signature_factory.create_signature(frame, filename, with_args=False)
|
||||
signature.return_type = get_type_of_value(return_value, recursive=True)
|
||||
dbg.writer.add_command(create_signature_message(signature))
|
||||
return True
|
||||
|
||||
return False
|
||||
@@ -0,0 +1,152 @@
|
||||
import bisect
|
||||
from _pydevd_bundle.pydevd_constants import NULL, KeyifyList
|
||||
import pydevd_file_utils
|
||||
|
||||
|
||||
class SourceMappingEntry(object):
|
||||
__slots__ = ["source_filename", "line", "end_line", "runtime_line", "runtime_source"]
|
||||
|
||||
def __init__(self, line, end_line, runtime_line, runtime_source):
|
||||
assert isinstance(runtime_source, str)
|
||||
|
||||
self.line = int(line)
|
||||
self.end_line = int(end_line)
|
||||
self.runtime_line = int(runtime_line)
|
||||
self.runtime_source = runtime_source # Something as <ipython-cell-xxx>
|
||||
|
||||
# Should be set after translated to server (absolute_source_filename).
|
||||
# This is what's sent to the client afterwards (so, its case should not be normalized).
|
||||
self.source_filename = None
|
||||
|
||||
def contains_line(self, i):
|
||||
return self.line <= i <= self.end_line
|
||||
|
||||
def contains_runtime_line(self, i):
|
||||
line_count = self.end_line + self.line
|
||||
runtime_end_line = self.runtime_line + line_count
|
||||
return self.runtime_line <= i <= runtime_end_line
|
||||
|
||||
def __str__(self):
|
||||
return "SourceMappingEntry(%s)" % (", ".join("%s=%r" % (attr, getattr(self, attr)) for attr in self.__slots__))
|
||||
|
||||
__repr__ = __str__
|
||||
|
||||
|
||||
class SourceMapping(object):
|
||||
def __init__(self, on_source_mapping_changed=NULL):
|
||||
self._mappings_to_server = {} # dict(normalized(file.py) to [SourceMappingEntry])
|
||||
self._mappings_to_client = {} # dict(<cell> to File.py)
|
||||
self._cache = {}
|
||||
self._on_source_mapping_changed = on_source_mapping_changed
|
||||
|
||||
def set_source_mapping(self, absolute_filename, mapping):
|
||||
"""
|
||||
:param str absolute_filename:
|
||||
The filename for the source mapping (bytes on py2 and str on py3).
|
||||
|
||||
:param list(SourceMappingEntry) mapping:
|
||||
A list with the source mapping entries to be applied to the given filename.
|
||||
|
||||
:return str:
|
||||
An error message if it was not possible to set the mapping or an empty string if
|
||||
everything is ok.
|
||||
"""
|
||||
# Let's first validate if it's ok to apply that mapping.
|
||||
# File mappings must be 1:N, not M:N (i.e.: if there's a mapping from file1.py to <cell1>,
|
||||
# there can be no other mapping from any other file to <cell1>).
|
||||
# This is a limitation to make it easier to remove existing breakpoints when new breakpoints are
|
||||
# set to a file (so, any file matching that breakpoint can be removed instead of needing to check
|
||||
# which lines are corresponding to that file).
|
||||
for map_entry in mapping:
|
||||
existing_source_filename = self._mappings_to_client.get(map_entry.runtime_source)
|
||||
if existing_source_filename and existing_source_filename != absolute_filename:
|
||||
return "Cannot apply mapping from %s to %s (it conflicts with mapping: %s to %s)" % (
|
||||
absolute_filename,
|
||||
map_entry.runtime_source,
|
||||
existing_source_filename,
|
||||
map_entry.runtime_source,
|
||||
)
|
||||
|
||||
try:
|
||||
absolute_normalized_filename = pydevd_file_utils.normcase(absolute_filename)
|
||||
current_mapping = self._mappings_to_server.get(absolute_normalized_filename, [])
|
||||
for map_entry in current_mapping:
|
||||
del self._mappings_to_client[map_entry.runtime_source]
|
||||
|
||||
self._mappings_to_server[absolute_normalized_filename] = sorted(mapping, key=lambda entry: entry.line)
|
||||
|
||||
for map_entry in mapping:
|
||||
self._mappings_to_client[map_entry.runtime_source] = absolute_filename
|
||||
finally:
|
||||
self._cache.clear()
|
||||
self._on_source_mapping_changed()
|
||||
return ""
|
||||
|
||||
def map_to_client(self, runtime_source_filename, lineno):
|
||||
key = (lineno, "client", runtime_source_filename)
|
||||
try:
|
||||
return self._cache[key]
|
||||
except KeyError:
|
||||
for _, mapping in list(self._mappings_to_server.items()):
|
||||
for map_entry in mapping:
|
||||
if map_entry.runtime_source == runtime_source_filename: # <cell1>
|
||||
if map_entry.contains_runtime_line(lineno): # matches line range
|
||||
self._cache[key] = (map_entry.source_filename, map_entry.line + (lineno - map_entry.runtime_line), True)
|
||||
return self._cache[key]
|
||||
|
||||
self._cache[key] = (runtime_source_filename, lineno, False) # Mark that no translation happened in the cache.
|
||||
return self._cache[key]
|
||||
|
||||
def has_mapping_entry(self, runtime_source_filename):
|
||||
"""
|
||||
:param runtime_source_filename:
|
||||
Something as <ipython-cell-xxx>
|
||||
"""
|
||||
# Note that we're not interested in the line here, just on knowing if a given filename
|
||||
# (from the server) has a mapping for it.
|
||||
key = ("has_entry", runtime_source_filename)
|
||||
try:
|
||||
return self._cache[key]
|
||||
except KeyError:
|
||||
for _absolute_normalized_filename, mapping in list(self._mappings_to_server.items()):
|
||||
for map_entry in mapping:
|
||||
if map_entry.runtime_source == runtime_source_filename:
|
||||
self._cache[key] = True
|
||||
return self._cache[key]
|
||||
|
||||
self._cache[key] = False
|
||||
return self._cache[key]
|
||||
|
||||
def map_to_server(self, absolute_filename, lineno):
|
||||
"""
|
||||
Convert something as 'file1.py' at line 10 to '<ipython-cell-xxx>' at line 2.
|
||||
|
||||
Note that the name should be already normalized at this point.
|
||||
"""
|
||||
absolute_normalized_filename = pydevd_file_utils.normcase(absolute_filename)
|
||||
|
||||
changed = False
|
||||
mappings = self._mappings_to_server.get(absolute_normalized_filename)
|
||||
if mappings:
|
||||
i = bisect.bisect(KeyifyList(mappings, lambda entry: entry.line), lineno)
|
||||
if i >= len(mappings):
|
||||
i -= 1
|
||||
|
||||
if i == 0:
|
||||
entry = mappings[i]
|
||||
|
||||
else:
|
||||
entry = mappings[i - 1]
|
||||
|
||||
if not entry.contains_line(lineno):
|
||||
entry = mappings[i]
|
||||
if not entry.contains_line(lineno):
|
||||
entry = None
|
||||
|
||||
if entry is not None:
|
||||
lineno = entry.runtime_line + (lineno - entry.line)
|
||||
|
||||
absolute_filename = entry.runtime_source
|
||||
changed = True
|
||||
|
||||
return absolute_filename, lineno, changed
|
||||
@@ -0,0 +1,412 @@
|
||||
from __future__ import nested_scopes
|
||||
|
||||
import weakref
|
||||
import sys
|
||||
|
||||
from _pydevd_bundle.pydevd_comm import get_global_debugger
|
||||
from _pydevd_bundle.pydevd_constants import call_only_once
|
||||
from _pydev_bundle._pydev_saved_modules import threading
|
||||
from _pydevd_bundle.pydevd_custom_frames import update_custom_frame, remove_custom_frame, add_custom_frame
|
||||
import stackless # @UnresolvedImport
|
||||
from _pydev_bundle import pydev_log
|
||||
|
||||
|
||||
# Used so that we don't loose the id (because we'll remove when it's not alive and would generate a new id for the
|
||||
# same tasklet).
|
||||
class TaskletToLastId:
|
||||
"""
|
||||
So, why not a WeakKeyDictionary?
|
||||
The problem is that removals from the WeakKeyDictionary will create a new tasklet (as it adds a callback to
|
||||
remove the key when it's garbage-collected), so, we can get into a recursion.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.tasklet_ref_to_last_id = {}
|
||||
self._i = 0
|
||||
|
||||
def get(self, tasklet):
|
||||
return self.tasklet_ref_to_last_id.get(weakref.ref(tasklet))
|
||||
|
||||
def __setitem__(self, tasklet, last_id):
|
||||
self.tasklet_ref_to_last_id[weakref.ref(tasklet)] = last_id
|
||||
self._i += 1
|
||||
if self._i % 100 == 0: # Collect at each 100 additions to the dict (no need to rush).
|
||||
for tasklet_ref in list(self.tasklet_ref_to_last_id.keys()):
|
||||
if tasklet_ref() is None:
|
||||
del self.tasklet_ref_to_last_id[tasklet_ref]
|
||||
|
||||
|
||||
_tasklet_to_last_id = TaskletToLastId()
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# _TaskletInfo
|
||||
# =======================================================================================================================
|
||||
class _TaskletInfo:
|
||||
_last_id = 0
|
||||
|
||||
def __init__(self, tasklet_weakref, tasklet):
|
||||
self.frame_id = None
|
||||
self.tasklet_weakref = tasklet_weakref
|
||||
|
||||
last_id = _tasklet_to_last_id.get(tasklet)
|
||||
if last_id is None:
|
||||
_TaskletInfo._last_id += 1
|
||||
last_id = _TaskletInfo._last_id
|
||||
_tasklet_to_last_id[tasklet] = last_id
|
||||
|
||||
self._tasklet_id = last_id
|
||||
|
||||
self.update_name()
|
||||
|
||||
def update_name(self):
|
||||
tasklet = self.tasklet_weakref()
|
||||
if tasklet:
|
||||
if tasklet.blocked:
|
||||
state = "blocked"
|
||||
elif tasklet.paused:
|
||||
state = "paused"
|
||||
elif tasklet.scheduled:
|
||||
state = "scheduled"
|
||||
else:
|
||||
state = "<UNEXPECTED>"
|
||||
|
||||
try:
|
||||
name = tasklet.name
|
||||
except AttributeError:
|
||||
if tasklet.is_main:
|
||||
name = "MainTasklet"
|
||||
else:
|
||||
name = "Tasklet-%s" % (self._tasklet_id,)
|
||||
|
||||
thread_id = tasklet.thread_id
|
||||
if thread_id != -1:
|
||||
for thread in threading.enumerate():
|
||||
if thread.ident == thread_id:
|
||||
if thread.name:
|
||||
thread_name = "of %s" % (thread.name,)
|
||||
else:
|
||||
thread_name = "of Thread-%s" % (thread.name or str(thread_id),)
|
||||
break
|
||||
else:
|
||||
# should not happen.
|
||||
thread_name = "of Thread-%s" % (str(thread_id),)
|
||||
thread = None
|
||||
else:
|
||||
# tasklet is no longer bound to a thread, because its thread ended
|
||||
thread_name = "without thread"
|
||||
|
||||
tid = id(tasklet)
|
||||
tasklet = None
|
||||
else:
|
||||
state = "dead"
|
||||
name = "Tasklet-%s" % (self._tasklet_id,)
|
||||
thread_name = ""
|
||||
tid = "-"
|
||||
self.tasklet_name = "%s %s %s (%s)" % (state, name, thread_name, tid)
|
||||
|
||||
if not hasattr(stackless.tasklet, "trace_function"):
|
||||
# bug https://bitbucket.org/stackless-dev/stackless/issue/42
|
||||
# is not fixed. Stackless releases before 2014
|
||||
def update_name(self):
|
||||
tasklet = self.tasklet_weakref()
|
||||
if tasklet:
|
||||
try:
|
||||
name = tasklet.name
|
||||
except AttributeError:
|
||||
if tasklet.is_main:
|
||||
name = "MainTasklet"
|
||||
else:
|
||||
name = "Tasklet-%s" % (self._tasklet_id,)
|
||||
|
||||
thread_id = tasklet.thread_id
|
||||
for thread in threading.enumerate():
|
||||
if thread.ident == thread_id:
|
||||
if thread.name:
|
||||
thread_name = "of %s" % (thread.name,)
|
||||
else:
|
||||
thread_name = "of Thread-%s" % (thread.name or str(thread_id),)
|
||||
break
|
||||
else:
|
||||
# should not happen.
|
||||
thread_name = "of Thread-%s" % (str(thread_id),)
|
||||
thread = None
|
||||
|
||||
tid = id(tasklet)
|
||||
tasklet = None
|
||||
else:
|
||||
name = "Tasklet-%s" % (self._tasklet_id,)
|
||||
thread_name = ""
|
||||
tid = "-"
|
||||
self.tasklet_name = "%s %s (%s)" % (name, thread_name, tid)
|
||||
|
||||
|
||||
_weak_tasklet_registered_to_info = {}
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# get_tasklet_info
|
||||
# =======================================================================================================================
|
||||
def get_tasklet_info(tasklet):
|
||||
return register_tasklet_info(tasklet)
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# register_tasklet_info
|
||||
# =======================================================================================================================
|
||||
def register_tasklet_info(tasklet):
|
||||
r = weakref.ref(tasklet)
|
||||
info = _weak_tasklet_registered_to_info.get(r)
|
||||
if info is None:
|
||||
info = _weak_tasklet_registered_to_info[r] = _TaskletInfo(r, tasklet)
|
||||
|
||||
return info
|
||||
|
||||
|
||||
_application_set_schedule_callback = None
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# _schedule_callback
|
||||
# =======================================================================================================================
|
||||
def _schedule_callback(prev, next):
|
||||
"""
|
||||
Called when a context is stopped or a new context is made runnable.
|
||||
"""
|
||||
try:
|
||||
if not prev and not next:
|
||||
return
|
||||
|
||||
current_frame = sys._getframe()
|
||||
|
||||
if next:
|
||||
register_tasklet_info(next)
|
||||
|
||||
# Ok, making next runnable: set the tracing facility in it.
|
||||
debugger = get_global_debugger()
|
||||
if debugger is not None:
|
||||
next.trace_function = debugger.get_thread_local_trace_func()
|
||||
frame = next.frame
|
||||
if frame is current_frame:
|
||||
frame = frame.f_back
|
||||
if hasattr(frame, "f_trace"): # Note: can be None (but hasattr should cover for that too).
|
||||
frame.f_trace = debugger.get_thread_local_trace_func()
|
||||
|
||||
debugger = None
|
||||
|
||||
if prev:
|
||||
register_tasklet_info(prev)
|
||||
|
||||
try:
|
||||
for tasklet_ref, tasklet_info in list(_weak_tasklet_registered_to_info.items()): # Make sure it's a copy!
|
||||
tasklet = tasklet_ref()
|
||||
if tasklet is None or not tasklet.alive:
|
||||
# Garbage-collected already!
|
||||
try:
|
||||
del _weak_tasklet_registered_to_info[tasklet_ref]
|
||||
except KeyError:
|
||||
pass
|
||||
if tasklet_info.frame_id is not None:
|
||||
remove_custom_frame(tasklet_info.frame_id)
|
||||
else:
|
||||
is_running = stackless.get_thread_info(tasklet.thread_id)[1] is tasklet
|
||||
if tasklet is prev or (tasklet is not next and not is_running):
|
||||
# the tasklet won't run after this scheduler action:
|
||||
# - the tasklet is the previous tasklet
|
||||
# - it is not the next tasklet and it is not an already running tasklet
|
||||
frame = tasklet.frame
|
||||
if frame is current_frame:
|
||||
frame = frame.f_back
|
||||
if frame is not None:
|
||||
# print >>sys.stderr, "SchedCB: %r, %d, '%s', '%s'" % (tasklet, frame.f_lineno, _filename, base)
|
||||
debugger = get_global_debugger()
|
||||
if debugger is not None and debugger.get_file_type(frame) is None:
|
||||
tasklet_info.update_name()
|
||||
if tasklet_info.frame_id is None:
|
||||
tasklet_info.frame_id = add_custom_frame(frame, tasklet_info.tasklet_name, tasklet.thread_id)
|
||||
else:
|
||||
update_custom_frame(tasklet_info.frame_id, frame, tasklet.thread_id, name=tasklet_info.tasklet_name)
|
||||
debugger = None
|
||||
|
||||
elif tasklet is next or is_running:
|
||||
if tasklet_info.frame_id is not None:
|
||||
# Remove info about stackless suspended when it starts to run.
|
||||
remove_custom_frame(tasklet_info.frame_id)
|
||||
tasklet_info.frame_id = None
|
||||
|
||||
finally:
|
||||
tasklet = None
|
||||
tasklet_info = None
|
||||
frame = None
|
||||
|
||||
except:
|
||||
pydev_log.exception()
|
||||
|
||||
if _application_set_schedule_callback is not None:
|
||||
return _application_set_schedule_callback(prev, next)
|
||||
|
||||
|
||||
if not hasattr(stackless.tasklet, "trace_function"):
|
||||
# Older versions of Stackless, released before 2014
|
||||
# This code does not work reliable! It is affected by several
|
||||
# stackless bugs: Stackless issues #44, #42, #40
|
||||
def _schedule_callback(prev, next):
|
||||
"""
|
||||
Called when a context is stopped or a new context is made runnable.
|
||||
"""
|
||||
try:
|
||||
if not prev and not next:
|
||||
return
|
||||
|
||||
if next:
|
||||
register_tasklet_info(next)
|
||||
|
||||
# Ok, making next runnable: set the tracing facility in it.
|
||||
debugger = get_global_debugger()
|
||||
if debugger is not None and next.frame:
|
||||
if hasattr(next.frame, "f_trace"):
|
||||
next.frame.f_trace = debugger.get_thread_local_trace_func()
|
||||
debugger = None
|
||||
|
||||
if prev:
|
||||
register_tasklet_info(prev)
|
||||
|
||||
try:
|
||||
for tasklet_ref, tasklet_info in list(_weak_tasklet_registered_to_info.items()): # Make sure it's a copy!
|
||||
tasklet = tasklet_ref()
|
||||
if tasklet is None or not tasklet.alive:
|
||||
# Garbage-collected already!
|
||||
try:
|
||||
del _weak_tasklet_registered_to_info[tasklet_ref]
|
||||
except KeyError:
|
||||
pass
|
||||
if tasklet_info.frame_id is not None:
|
||||
remove_custom_frame(tasklet_info.frame_id)
|
||||
else:
|
||||
if tasklet.paused or tasklet.blocked or tasklet.scheduled:
|
||||
if tasklet.frame and tasklet.frame.f_back:
|
||||
f_back = tasklet.frame.f_back
|
||||
debugger = get_global_debugger()
|
||||
if debugger is not None and debugger.get_file_type(f_back) is None:
|
||||
if tasklet_info.frame_id is None:
|
||||
tasklet_info.frame_id = add_custom_frame(f_back, tasklet_info.tasklet_name, tasklet.thread_id)
|
||||
else:
|
||||
update_custom_frame(tasklet_info.frame_id, f_back, tasklet.thread_id)
|
||||
debugger = None
|
||||
|
||||
elif tasklet.is_current:
|
||||
if tasklet_info.frame_id is not None:
|
||||
# Remove info about stackless suspended when it starts to run.
|
||||
remove_custom_frame(tasklet_info.frame_id)
|
||||
tasklet_info.frame_id = None
|
||||
|
||||
finally:
|
||||
tasklet = None
|
||||
tasklet_info = None
|
||||
f_back = None
|
||||
|
||||
except:
|
||||
pydev_log.exception()
|
||||
|
||||
if _application_set_schedule_callback is not None:
|
||||
return _application_set_schedule_callback(prev, next)
|
||||
|
||||
_original_setup = stackless.tasklet.setup
|
||||
|
||||
# =======================================================================================================================
|
||||
# setup
|
||||
# =======================================================================================================================
|
||||
def setup(self, *args, **kwargs):
|
||||
"""
|
||||
Called to run a new tasklet: rebind the creation so that we can trace it.
|
||||
"""
|
||||
|
||||
f = self.tempval
|
||||
|
||||
def new_f(old_f, args, kwargs):
|
||||
debugger = get_global_debugger()
|
||||
if debugger is not None:
|
||||
debugger.enable_tracing()
|
||||
|
||||
debugger = None
|
||||
|
||||
# Remove our own traces :)
|
||||
self.tempval = old_f
|
||||
register_tasklet_info(self)
|
||||
|
||||
# Hover old_f to see the stackless being created and *args and **kwargs to see its parameters.
|
||||
return old_f(*args, **kwargs)
|
||||
|
||||
# This is the way to tell stackless that the function it should execute is our function, not the original one. Note:
|
||||
# setting tempval is the same as calling bind(new_f), but it seems that there's no other way to get the currently
|
||||
# bound function, so, keeping on using tempval instead of calling bind (which is actually the same thing in a better
|
||||
# API).
|
||||
|
||||
self.tempval = new_f
|
||||
|
||||
return _original_setup(self, f, args, kwargs)
|
||||
|
||||
# =======================================================================================================================
|
||||
# __call__
|
||||
# =======================================================================================================================
|
||||
def __call__(self, *args, **kwargs):
|
||||
"""
|
||||
Called to run a new tasklet: rebind the creation so that we can trace it.
|
||||
"""
|
||||
|
||||
return setup(self, *args, **kwargs)
|
||||
|
||||
_original_run = stackless.run
|
||||
|
||||
# =======================================================================================================================
|
||||
# run
|
||||
# =======================================================================================================================
|
||||
def run(*args, **kwargs):
|
||||
debugger = get_global_debugger()
|
||||
if debugger is not None:
|
||||
debugger.enable_tracing()
|
||||
debugger = None
|
||||
|
||||
return _original_run(*args, **kwargs)
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# patch_stackless
|
||||
# =======================================================================================================================
|
||||
def patch_stackless():
|
||||
"""
|
||||
This function should be called to patch the stackless module so that new tasklets are properly tracked in the
|
||||
debugger.
|
||||
"""
|
||||
global _application_set_schedule_callback
|
||||
_application_set_schedule_callback = stackless.set_schedule_callback(_schedule_callback)
|
||||
|
||||
def set_schedule_callback(callable):
|
||||
global _application_set_schedule_callback
|
||||
old = _application_set_schedule_callback
|
||||
_application_set_schedule_callback = callable
|
||||
return old
|
||||
|
||||
def get_schedule_callback():
|
||||
global _application_set_schedule_callback
|
||||
return _application_set_schedule_callback
|
||||
|
||||
set_schedule_callback.__doc__ = stackless.set_schedule_callback.__doc__
|
||||
if hasattr(stackless, "get_schedule_callback"):
|
||||
get_schedule_callback.__doc__ = stackless.get_schedule_callback.__doc__
|
||||
stackless.set_schedule_callback = set_schedule_callback
|
||||
stackless.get_schedule_callback = get_schedule_callback
|
||||
|
||||
if not hasattr(stackless.tasklet, "trace_function"):
|
||||
# Older versions of Stackless, released before 2014
|
||||
__call__.__doc__ = stackless.tasklet.__call__.__doc__
|
||||
stackless.tasklet.__call__ = __call__
|
||||
|
||||
setup.__doc__ = stackless.tasklet.setup.__doc__
|
||||
stackless.tasklet.setup = setup
|
||||
|
||||
run.__doc__ = stackless.run.__doc__
|
||||
stackless.run = run
|
||||
|
||||
|
||||
patch_stackless = call_only_once(patch_stackless)
|
||||
@@ -0,0 +1,544 @@
|
||||
from contextlib import contextmanager
|
||||
import sys
|
||||
|
||||
from _pydevd_bundle.pydevd_constants import get_frame, RETURN_VALUES_DICT, ForkSafeLock, GENERATED_LEN_ATTR_NAME, silence_warnings_decorator
|
||||
from _pydevd_bundle.pydevd_xml import get_variable_details, get_type
|
||||
from _pydev_bundle.pydev_override import overrides
|
||||
from _pydevd_bundle.pydevd_resolver import sorted_attributes_key, TOO_LARGE_ATTR, get_var_scope
|
||||
from _pydevd_bundle.pydevd_safe_repr import SafeRepr
|
||||
from _pydev_bundle import pydev_log
|
||||
from _pydevd_bundle import pydevd_vars
|
||||
from _pydev_bundle.pydev_imports import Exec
|
||||
from _pydevd_bundle.pydevd_frame_utils import FramesList
|
||||
from _pydevd_bundle.pydevd_utils import ScopeRequest, DAPGrouper, Timer
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class _AbstractVariable(object):
|
||||
# Default attributes in class, set in instance.
|
||||
|
||||
name = None
|
||||
value = None
|
||||
evaluate_name = None
|
||||
|
||||
def __init__(self, py_db):
|
||||
assert py_db is not None
|
||||
self.py_db = py_db
|
||||
|
||||
def get_name(self):
|
||||
return self.name
|
||||
|
||||
def get_value(self):
|
||||
return self.value
|
||||
|
||||
def get_variable_reference(self):
|
||||
return id(self.value)
|
||||
|
||||
def get_var_data(self, fmt: Optional[dict] = None, context: Optional[str] = None, **safe_repr_custom_attrs):
|
||||
"""
|
||||
:param dict fmt:
|
||||
Format expected by the DAP (keys: 'hex': bool, 'rawString': bool)
|
||||
|
||||
:param context:
|
||||
This is the context in which the variable is being requested. Valid values:
|
||||
"watch",
|
||||
"repl",
|
||||
"hover",
|
||||
"clipboard"
|
||||
"""
|
||||
timer = Timer()
|
||||
safe_repr = SafeRepr()
|
||||
if fmt is not None:
|
||||
safe_repr.convert_to_hex = fmt.get("hex", False)
|
||||
safe_repr.raw_value = fmt.get("rawString", False)
|
||||
for key, val in safe_repr_custom_attrs.items():
|
||||
setattr(safe_repr, key, val)
|
||||
|
||||
type_name, _type_qualifier, _is_exception_on_eval, resolver, value = get_variable_details(
|
||||
self.value, to_string=safe_repr, context=context
|
||||
)
|
||||
|
||||
is_raw_string = type_name in ("str", "bytes", "bytearray")
|
||||
|
||||
attributes = []
|
||||
|
||||
if is_raw_string:
|
||||
attributes.append("rawString")
|
||||
|
||||
name = self.name
|
||||
|
||||
if self._is_return_value:
|
||||
attributes.append("readOnly")
|
||||
name = "(return) %s" % (name,)
|
||||
|
||||
elif name in (TOO_LARGE_ATTR, GENERATED_LEN_ATTR_NAME):
|
||||
attributes.append("readOnly")
|
||||
|
||||
try:
|
||||
if self.value.__class__ == DAPGrouper:
|
||||
type_name = ""
|
||||
except:
|
||||
pass # Ignore errors accessing __class__.
|
||||
|
||||
var_data = {
|
||||
"name": name,
|
||||
"value": value,
|
||||
"type": type_name,
|
||||
}
|
||||
|
||||
if self.evaluate_name is not None:
|
||||
var_data["evaluateName"] = self.evaluate_name
|
||||
|
||||
if resolver is not None: # I.e.: it's a container
|
||||
var_data["variablesReference"] = self.get_variable_reference()
|
||||
else:
|
||||
var_data["variablesReference"] = 0 # It's mandatory (although if == 0 it doesn't have children).
|
||||
|
||||
if len(attributes) > 0:
|
||||
var_data["presentationHint"] = {"attributes": attributes}
|
||||
|
||||
timer.report_if_compute_repr_attr_slow("", name, type_name)
|
||||
return var_data
|
||||
|
||||
def get_children_variables(self, fmt=None, scope=None):
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_child_variable_named(self, name, fmt=None, scope=None):
|
||||
for child_var in self.get_children_variables(fmt=fmt, scope=scope):
|
||||
if child_var.get_name() == name:
|
||||
return child_var
|
||||
return None
|
||||
|
||||
def _group_entries(self, lst, handle_return_values):
|
||||
scope_to_grouper = {}
|
||||
|
||||
group_entries = []
|
||||
if isinstance(self.value, DAPGrouper):
|
||||
new_lst = lst
|
||||
else:
|
||||
new_lst = []
|
||||
get_presentation = self.py_db.variable_presentation.get_presentation
|
||||
# Now that we have the contents, group items.
|
||||
for attr_name, attr_value, evaluate_name in lst:
|
||||
scope = get_var_scope(attr_name, attr_value, evaluate_name, handle_return_values)
|
||||
|
||||
entry = (attr_name, attr_value, evaluate_name)
|
||||
if scope:
|
||||
presentation = get_presentation(scope)
|
||||
if presentation == "hide":
|
||||
continue
|
||||
|
||||
elif presentation == "inline":
|
||||
new_lst.append(entry)
|
||||
|
||||
else: # group
|
||||
if scope not in scope_to_grouper:
|
||||
grouper = DAPGrouper(scope)
|
||||
scope_to_grouper[scope] = grouper
|
||||
else:
|
||||
grouper = scope_to_grouper[scope]
|
||||
|
||||
grouper.contents_debug_adapter_protocol.append(entry)
|
||||
|
||||
else:
|
||||
new_lst.append(entry)
|
||||
|
||||
for scope in DAPGrouper.SCOPES_SORTED:
|
||||
grouper = scope_to_grouper.get(scope)
|
||||
if grouper is not None:
|
||||
group_entries.append((scope, grouper, None))
|
||||
|
||||
return new_lst, group_entries
|
||||
|
||||
|
||||
class _ObjectVariable(_AbstractVariable):
|
||||
def __init__(self, py_db, name, value, register_variable, is_return_value=False, evaluate_name=None, frame=None):
|
||||
_AbstractVariable.__init__(self, py_db)
|
||||
self.frame = frame
|
||||
self.name = name
|
||||
self.value = value
|
||||
self._register_variable = register_variable
|
||||
self._register_variable(self)
|
||||
self._is_return_value = is_return_value
|
||||
self.evaluate_name = evaluate_name
|
||||
|
||||
@silence_warnings_decorator
|
||||
@overrides(_AbstractVariable.get_children_variables)
|
||||
def get_children_variables(self, fmt=None, scope=None):
|
||||
_type, _type_name, resolver = get_type(self.value)
|
||||
|
||||
children_variables = []
|
||||
if resolver is not None: # i.e.: it's a container.
|
||||
if hasattr(resolver, "get_contents_debug_adapter_protocol"):
|
||||
# The get_contents_debug_adapter_protocol needs to return sorted.
|
||||
lst = resolver.get_contents_debug_adapter_protocol(self.value, fmt=fmt)
|
||||
else:
|
||||
# If there's no special implementation, the default is sorting the keys.
|
||||
dct = resolver.get_dictionary(self.value)
|
||||
lst = sorted(dct.items(), key=lambda tup: sorted_attributes_key(tup[0]))
|
||||
# No evaluate name in this case.
|
||||
lst = [(key, value, None) for (key, value) in lst]
|
||||
|
||||
lst, group_entries = self._group_entries(lst, handle_return_values=False)
|
||||
if group_entries:
|
||||
lst = group_entries + lst
|
||||
parent_evaluate_name = self.evaluate_name
|
||||
if parent_evaluate_name:
|
||||
for key, val, evaluate_name in lst:
|
||||
if evaluate_name is not None:
|
||||
if callable(evaluate_name):
|
||||
evaluate_name = evaluate_name(parent_evaluate_name)
|
||||
else:
|
||||
evaluate_name = parent_evaluate_name + evaluate_name
|
||||
variable = _ObjectVariable(self.py_db, key, val, self._register_variable, evaluate_name=evaluate_name, frame=self.frame)
|
||||
children_variables.append(variable)
|
||||
else:
|
||||
for key, val, evaluate_name in lst:
|
||||
# No evaluate name
|
||||
variable = _ObjectVariable(self.py_db, key, val, self._register_variable, frame=self.frame)
|
||||
children_variables.append(variable)
|
||||
|
||||
return children_variables
|
||||
|
||||
def change_variable(self, name, value, py_db, fmt=None):
|
||||
children_variable = self.get_child_variable_named(name)
|
||||
if children_variable is None:
|
||||
return None
|
||||
|
||||
var_data = children_variable.get_var_data()
|
||||
evaluate_name = var_data.get("evaluateName")
|
||||
|
||||
if not evaluate_name:
|
||||
# Note: right now we only pass control to the resolver in the cases where
|
||||
# there's no evaluate name (the idea being that if we can evaluate it,
|
||||
# we can use that evaluation to set the value too -- if in the future
|
||||
# a case where this isn't true is found this logic may need to be changed).
|
||||
_type, _type_name, container_resolver = get_type(self.value)
|
||||
if hasattr(container_resolver, "change_var_from_name"):
|
||||
try:
|
||||
new_value = eval(value)
|
||||
except:
|
||||
return None
|
||||
new_key = container_resolver.change_var_from_name(self.value, name, new_value)
|
||||
if new_key is not None:
|
||||
return _ObjectVariable(self.py_db, new_key, new_value, self._register_variable, evaluate_name=None, frame=self.frame)
|
||||
|
||||
return None
|
||||
else:
|
||||
return None
|
||||
|
||||
frame = self.frame
|
||||
if frame is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
# This handles the simple cases (such as dict, list, object)
|
||||
Exec("%s=%s" % (evaluate_name, value), frame.f_globals, frame.f_locals)
|
||||
except:
|
||||
return None
|
||||
|
||||
return self.get_child_variable_named(name, fmt=fmt)
|
||||
|
||||
|
||||
def sorted_variables_key(obj):
|
||||
return sorted_attributes_key(obj.name)
|
||||
|
||||
|
||||
class _FrameVariable(_AbstractVariable):
|
||||
def __init__(self, py_db, frame, register_variable):
|
||||
_AbstractVariable.__init__(self, py_db)
|
||||
self.frame = frame
|
||||
|
||||
self.name = self.frame.f_code.co_name
|
||||
self.value = frame
|
||||
|
||||
self._register_variable = register_variable
|
||||
self._register_variable(self)
|
||||
|
||||
def change_variable(self, name, value, py_db, fmt=None):
|
||||
frame = self.frame
|
||||
|
||||
pydevd_vars.change_attr_expression(frame, name, value, py_db)
|
||||
|
||||
return self.get_child_variable_named(name, fmt=fmt)
|
||||
|
||||
@silence_warnings_decorator
|
||||
@overrides(_AbstractVariable.get_children_variables)
|
||||
def get_children_variables(self, fmt=None, scope=None):
|
||||
children_variables = []
|
||||
if scope is not None:
|
||||
assert isinstance(scope, ScopeRequest)
|
||||
scope = scope.scope
|
||||
|
||||
if scope in ("locals", None):
|
||||
dct = self.frame.f_locals
|
||||
elif scope == "globals":
|
||||
dct = self.frame.f_globals
|
||||
else:
|
||||
raise AssertionError("Unexpected scope: %s" % (scope,))
|
||||
|
||||
lst, group_entries = self._group_entries(
|
||||
[(x[0], x[1], None) for x in list(dct.items()) if x[0] != "_pydev_stop_at_break"], handle_return_values=True
|
||||
)
|
||||
group_variables = []
|
||||
|
||||
for key, val, _ in group_entries:
|
||||
# Make sure that the contents in the group are also sorted.
|
||||
val.contents_debug_adapter_protocol.sort(key=lambda v: sorted_attributes_key(v[0]))
|
||||
variable = _ObjectVariable(self.py_db, key, val, self._register_variable, False, key, frame=self.frame)
|
||||
group_variables.append(variable)
|
||||
|
||||
for key, val, _ in lst:
|
||||
is_return_value = key == RETURN_VALUES_DICT
|
||||
if is_return_value:
|
||||
for return_key, return_value in val.items():
|
||||
variable = _ObjectVariable(
|
||||
self.py_db,
|
||||
return_key,
|
||||
return_value,
|
||||
self._register_variable,
|
||||
is_return_value,
|
||||
"%s[%r]" % (key, return_key),
|
||||
frame=self.frame,
|
||||
)
|
||||
children_variables.append(variable)
|
||||
else:
|
||||
variable = _ObjectVariable(self.py_db, key, val, self._register_variable, is_return_value, key, frame=self.frame)
|
||||
children_variables.append(variable)
|
||||
|
||||
# Frame variables always sorted.
|
||||
children_variables.sort(key=sorted_variables_key)
|
||||
if group_variables:
|
||||
# Groups have priority over other variables.
|
||||
children_variables = group_variables + children_variables
|
||||
|
||||
return children_variables
|
||||
|
||||
|
||||
class _FramesTracker(object):
|
||||
"""
|
||||
This is a helper class to be used to track frames when a thread becomes suspended.
|
||||
"""
|
||||
|
||||
def __init__(self, suspended_frames_manager, py_db):
|
||||
self._suspended_frames_manager = suspended_frames_manager
|
||||
self.py_db = py_db
|
||||
self._frame_id_to_frame = {}
|
||||
|
||||
# Note that a given frame may appear in multiple threads when we have custom
|
||||
# frames added, but as those are coroutines, this map will point to the actual
|
||||
# main thread (which is the one that needs to be suspended for us to get the
|
||||
# variables).
|
||||
self._frame_id_to_main_thread_id = {}
|
||||
|
||||
# A map of the suspended thread id -> list(frames ids) -- note that
|
||||
# frame ids are kept in order (the first one is the suspended frame).
|
||||
self._thread_id_to_frame_ids = {}
|
||||
|
||||
self._thread_id_to_frames_list = {}
|
||||
|
||||
# The main suspended thread (if this is a coroutine this isn't the id of the
|
||||
# coroutine thread, it's the id of the actual suspended thread).
|
||||
self._main_thread_id = None
|
||||
|
||||
# Helper to know if it was already untracked.
|
||||
self._untracked = False
|
||||
|
||||
# We need to be thread-safe!
|
||||
self._lock = ForkSafeLock(rlock=True)
|
||||
|
||||
self._variable_reference_to_variable = {}
|
||||
|
||||
def _register_variable(self, variable):
|
||||
variable_reference = variable.get_variable_reference()
|
||||
self._variable_reference_to_variable[variable_reference] = variable
|
||||
|
||||
def obtain_as_variable(self, name, value, evaluate_name=None, frame=None):
|
||||
if evaluate_name is None:
|
||||
evaluate_name = name
|
||||
|
||||
variable_reference = id(value)
|
||||
variable = self._variable_reference_to_variable.get(variable_reference)
|
||||
if variable is not None:
|
||||
return variable
|
||||
|
||||
# Still not created, let's do it now.
|
||||
return _ObjectVariable(
|
||||
self.py_db, name, value, self._register_variable, is_return_value=False, evaluate_name=evaluate_name, frame=frame
|
||||
)
|
||||
|
||||
def get_main_thread_id(self):
|
||||
return self._main_thread_id
|
||||
|
||||
def get_variable(self, variable_reference):
|
||||
return self._variable_reference_to_variable[variable_reference]
|
||||
|
||||
def track(self, thread_id, frames_list, frame_custom_thread_id=None):
|
||||
"""
|
||||
:param thread_id:
|
||||
The thread id to be used for this frame.
|
||||
|
||||
:param FramesList frames_list:
|
||||
A list of frames to be tracked (the first is the topmost frame which is suspended at the given thread).
|
||||
|
||||
:param frame_custom_thread_id:
|
||||
If None this this is the id of the thread id for the custom frame (i.e.: coroutine).
|
||||
"""
|
||||
assert frames_list.__class__ == FramesList
|
||||
with self._lock:
|
||||
coroutine_or_main_thread_id = frame_custom_thread_id or thread_id
|
||||
|
||||
if coroutine_or_main_thread_id in self._suspended_frames_manager._thread_id_to_tracker:
|
||||
sys.stderr.write("pydevd: Something is wrong. Tracker being added twice to the same thread id.\n")
|
||||
|
||||
self._suspended_frames_manager._thread_id_to_tracker[coroutine_or_main_thread_id] = self
|
||||
self._main_thread_id = thread_id
|
||||
|
||||
frame_ids_from_thread = self._thread_id_to_frame_ids.setdefault(coroutine_or_main_thread_id, [])
|
||||
|
||||
self._thread_id_to_frames_list[coroutine_or_main_thread_id] = frames_list
|
||||
for frame in frames_list:
|
||||
frame_id = id(frame)
|
||||
self._frame_id_to_frame[frame_id] = frame
|
||||
_FrameVariable(self.py_db, frame, self._register_variable) # Instancing is enough to register.
|
||||
self._suspended_frames_manager._variable_reference_to_frames_tracker[frame_id] = self
|
||||
frame_ids_from_thread.append(frame_id)
|
||||
|
||||
self._frame_id_to_main_thread_id[frame_id] = thread_id
|
||||
|
||||
frame = None
|
||||
|
||||
def untrack_all(self):
|
||||
with self._lock:
|
||||
if self._untracked:
|
||||
# Calling multiple times is expected for the set next statement.
|
||||
return
|
||||
self._untracked = True
|
||||
for thread_id in self._thread_id_to_frame_ids:
|
||||
self._suspended_frames_manager._thread_id_to_tracker.pop(thread_id, None)
|
||||
|
||||
for frame_id in self._frame_id_to_frame:
|
||||
del self._suspended_frames_manager._variable_reference_to_frames_tracker[frame_id]
|
||||
|
||||
self._frame_id_to_frame.clear()
|
||||
self._frame_id_to_main_thread_id.clear()
|
||||
self._thread_id_to_frame_ids.clear()
|
||||
self._thread_id_to_frames_list.clear()
|
||||
self._main_thread_id = None
|
||||
self._suspended_frames_manager = None
|
||||
self._variable_reference_to_variable.clear()
|
||||
|
||||
def get_frames_list(self, thread_id):
|
||||
with self._lock:
|
||||
return self._thread_id_to_frames_list.get(thread_id)
|
||||
|
||||
def find_frame(self, thread_id, frame_id):
|
||||
with self._lock:
|
||||
return self._frame_id_to_frame.get(frame_id)
|
||||
|
||||
def create_thread_suspend_command(self, thread_id, stop_reason, message, trace_suspend_type, thread, additional_info):
|
||||
with self._lock:
|
||||
# First one is topmost frame suspended.
|
||||
frames_list = self._thread_id_to_frames_list[thread_id]
|
||||
|
||||
cmd = self.py_db.cmd_factory.make_thread_suspend_message(
|
||||
self.py_db, thread_id, frames_list, stop_reason, message, trace_suspend_type, thread, additional_info
|
||||
)
|
||||
|
||||
frames_list = None
|
||||
return cmd
|
||||
|
||||
|
||||
class SuspendedFramesManager(object):
|
||||
def __init__(self):
|
||||
self._thread_id_to_fake_frames = {}
|
||||
self._thread_id_to_tracker = {}
|
||||
|
||||
# Mappings
|
||||
self._variable_reference_to_frames_tracker = {}
|
||||
|
||||
def _get_tracker_for_variable_reference(self, variable_reference):
|
||||
tracker = self._variable_reference_to_frames_tracker.get(variable_reference)
|
||||
if tracker is not None:
|
||||
return tracker
|
||||
|
||||
for _thread_id, tracker in self._thread_id_to_tracker.items():
|
||||
try:
|
||||
tracker.get_variable(variable_reference)
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
return tracker
|
||||
|
||||
return None
|
||||
|
||||
def get_thread_id_for_variable_reference(self, variable_reference):
|
||||
"""
|
||||
We can't evaluate variable references values on any thread, only in the suspended
|
||||
thread (the main reason for this is that in UI frameworks inspecting a UI object
|
||||
from a different thread can potentially crash the application).
|
||||
|
||||
:param int variable_reference:
|
||||
The variable reference (can be either a frame id or a reference to a previously
|
||||
gotten variable).
|
||||
|
||||
:return str:
|
||||
The thread id for the thread to be used to inspect the given variable reference or
|
||||
None if the thread was already resumed.
|
||||
"""
|
||||
frames_tracker = self._get_tracker_for_variable_reference(variable_reference)
|
||||
if frames_tracker is not None:
|
||||
return frames_tracker.get_main_thread_id()
|
||||
return None
|
||||
|
||||
def get_frame_tracker(self, thread_id):
|
||||
return self._thread_id_to_tracker.get(thread_id)
|
||||
|
||||
def get_variable(self, variable_reference):
|
||||
"""
|
||||
:raises KeyError
|
||||
"""
|
||||
frames_tracker = self._get_tracker_for_variable_reference(variable_reference)
|
||||
if frames_tracker is None:
|
||||
raise KeyError()
|
||||
return frames_tracker.get_variable(variable_reference)
|
||||
|
||||
def get_frames_list(self, thread_id):
|
||||
tracker = self._thread_id_to_tracker.get(thread_id)
|
||||
if tracker is None:
|
||||
return None
|
||||
return tracker.get_frames_list(thread_id)
|
||||
|
||||
@contextmanager
|
||||
def track_frames(self, py_db):
|
||||
tracker = _FramesTracker(self, py_db)
|
||||
try:
|
||||
yield tracker
|
||||
finally:
|
||||
tracker.untrack_all()
|
||||
|
||||
def add_fake_frame(self, thread_id, frame_id, frame):
|
||||
self._thread_id_to_fake_frames.setdefault(thread_id, {})[int(frame_id)] = frame
|
||||
|
||||
def find_frame(self, thread_id, frame_id):
|
||||
try:
|
||||
if frame_id == "*":
|
||||
return get_frame() # any frame is specified with "*"
|
||||
frame_id = int(frame_id)
|
||||
|
||||
fake_frames = self._thread_id_to_fake_frames.get(thread_id)
|
||||
if fake_frames is not None:
|
||||
frame = fake_frames.get(frame_id)
|
||||
if frame is not None:
|
||||
return frame
|
||||
|
||||
frames_tracker = self._thread_id_to_tracker.get(thread_id)
|
||||
if frames_tracker is not None:
|
||||
frame = frames_tracker.find_frame(thread_id, frame_id)
|
||||
if frame is not None:
|
||||
return frame
|
||||
|
||||
return None
|
||||
except:
|
||||
pydev_log.exception()
|
||||
return None
|
||||
@@ -0,0 +1,106 @@
|
||||
from _pydevd_bundle import pydevd_utils
|
||||
from _pydevd_bundle.pydevd_additional_thread_info import set_additional_thread_info
|
||||
from _pydevd_bundle.pydevd_comm_constants import CMD_STEP_INTO, CMD_THREAD_SUSPEND
|
||||
from _pydevd_bundle.pydevd_constants import PYTHON_SUSPEND, STATE_SUSPEND, get_thread_id, STATE_RUN, PYDEVD_USE_SYS_MONITORING
|
||||
from _pydev_bundle._pydev_saved_modules import threading
|
||||
from _pydev_bundle import pydev_log
|
||||
import sys
|
||||
from _pydevd_sys_monitoring import pydevd_sys_monitoring
|
||||
|
||||
|
||||
def pydevd_find_thread_by_id(thread_id):
|
||||
try:
|
||||
threads = threading.enumerate()
|
||||
for i in threads:
|
||||
tid = get_thread_id(i)
|
||||
if thread_id == tid or thread_id.endswith("|" + tid):
|
||||
return i
|
||||
|
||||
# This can happen when a request comes for a thread which was previously removed.
|
||||
pydev_log.info("Could not find thread %s.", thread_id)
|
||||
pydev_log.info("Available: %s.", ([get_thread_id(t) for t in threads],))
|
||||
except:
|
||||
pydev_log.exception()
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def mark_thread_suspended(thread, stop_reason: int, original_step_cmd: int = -1):
|
||||
info = set_additional_thread_info(thread)
|
||||
info.suspend_type = PYTHON_SUSPEND
|
||||
if original_step_cmd != -1:
|
||||
stop_reason = original_step_cmd
|
||||
thread.stop_reason = stop_reason
|
||||
|
||||
# Note: don't set the 'pydev_original_step_cmd' here if unset.
|
||||
|
||||
if info.pydev_step_cmd == -1:
|
||||
# If the step command is not specified, set it to step into
|
||||
# to make sure it'll break as soon as possible.
|
||||
info.pydev_step_cmd = CMD_STEP_INTO
|
||||
info.pydev_step_stop = None
|
||||
|
||||
# Mark as suspended as the last thing.
|
||||
info.pydev_state = STATE_SUSPEND
|
||||
info.update_stepping_info()
|
||||
return info
|
||||
|
||||
|
||||
def internal_run_thread(thread, set_additional_thread_info):
|
||||
info = set_additional_thread_info(thread)
|
||||
info.pydev_original_step_cmd = -1
|
||||
info.pydev_step_cmd = -1
|
||||
info.pydev_step_stop = None
|
||||
info.pydev_state = STATE_RUN
|
||||
info.update_stepping_info()
|
||||
|
||||
|
||||
def resume_threads(thread_id, except_thread=None):
|
||||
pydev_log.info("Resuming threads: %s (except thread: %s)", thread_id, except_thread)
|
||||
threads = []
|
||||
if thread_id == "*":
|
||||
threads = pydevd_utils.get_non_pydevd_threads()
|
||||
|
||||
elif thread_id.startswith("__frame__:"):
|
||||
pydev_log.critical("Can't make tasklet run: %s", thread_id)
|
||||
|
||||
else:
|
||||
threads = [pydevd_find_thread_by_id(thread_id)]
|
||||
|
||||
for t in threads:
|
||||
if t is None or t is except_thread:
|
||||
pydev_log.info("Skipped resuming thread: %s", t)
|
||||
continue
|
||||
|
||||
internal_run_thread(t, set_additional_thread_info=set_additional_thread_info)
|
||||
|
||||
|
||||
def suspend_all_threads(py_db, except_thread):
|
||||
"""
|
||||
Suspend all except the one passed as a parameter.
|
||||
:param except_thread:
|
||||
"""
|
||||
if PYDEVD_USE_SYS_MONITORING:
|
||||
pydevd_sys_monitoring.update_monitor_events(suspend_requested=True)
|
||||
|
||||
pydev_log.info("Suspending all threads except: %s", except_thread)
|
||||
all_threads = pydevd_utils.get_non_pydevd_threads()
|
||||
for t in all_threads:
|
||||
if getattr(t, "pydev_do_not_trace", None):
|
||||
pass # skip some other threads, i.e. ipython history saving thread from debug console
|
||||
else:
|
||||
if t is except_thread:
|
||||
continue
|
||||
info = mark_thread_suspended(t, CMD_THREAD_SUSPEND)
|
||||
frame = info.get_topmost_frame(t)
|
||||
|
||||
# Reset the tracing as in this case as it could've set scopes to be untraced.
|
||||
if frame is not None:
|
||||
try:
|
||||
py_db.set_trace_for_frame_and_parents(t.ident, frame)
|
||||
finally:
|
||||
frame = None
|
||||
|
||||
if PYDEVD_USE_SYS_MONITORING:
|
||||
# After suspending the frames we need the monitoring to be reset.
|
||||
pydevd_sys_monitoring.restart_events()
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user