mirror of
https://github.com/aykhans/AzSuicideDataVisualization.git
synced 2025-07-02 14:27:31 +00:00
first commit
This commit is contained in:
16
.venv/Lib/site-packages/debugpy/common/__init__.py
Normal file
16
.venv/Lib/site-packages/debugpy/common/__init__.py
Normal file
@ -0,0 +1,16 @@
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import os
|
||||
|
||||
|
||||
__all__ = []
|
||||
|
||||
# The lower time bound for assuming that the process hasn't spawned successfully.
|
||||
PROCESS_SPAWN_TIMEOUT = float(os.getenv("DEBUGPY_PROCESS_SPAWN_TIMEOUT", 15))
|
||||
|
||||
# The lower time bound for assuming that the process hasn't exited gracefully.
|
||||
PROCESS_EXIT_TIMEOUT = float(os.getenv("DEBUGPY_PROCESS_EXIT_TIMEOUT", 5))
|
213
.venv/Lib/site-packages/debugpy/common/compat.py
Normal file
213
.venv/Lib/site-packages/debugpy/common/compat.py
Normal file
@ -0,0 +1,213 @@
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# # Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
"""Python 2/3 compatibility helpers.
|
||||
"""
|
||||
|
||||
import functools
|
||||
import inspect
|
||||
import itertools
|
||||
import sys
|
||||
|
||||
from debugpy.common import fmt
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
# Py2
|
||||
import __builtin__ as builtins # noqa
|
||||
from __builtin__ import unicode, bytes, xrange, reload # noqa
|
||||
|
||||
izip = itertools.izip
|
||||
|
||||
import Queue as queue # noqa
|
||||
|
||||
def force_str(s, encoding="ascii", errors="strict"):
|
||||
"""Converts s to str (which is bytes on Python 2, and unicode on Python 3), using
|
||||
the provided encoding if necessary. If s is already str, it is returned as is.
|
||||
|
||||
If errors="strict", str is bytes, and s is str, its encoding is verified by decoding
|
||||
it; UnicodeError is raised if it cannot be decoded.
|
||||
"""
|
||||
return force_bytes(s, encoding, errors)
|
||||
|
||||
|
||||
else:
|
||||
# Py3
|
||||
import builtins # noqa
|
||||
from builtins import bytes # noqa
|
||||
|
||||
unicode = str
|
||||
xrange = range
|
||||
izip = zip
|
||||
from importlib import reload # noqa
|
||||
import queue # noqa
|
||||
|
||||
def force_str(s, encoding="ascii", errors="strict"):
|
||||
"""Converts s to str (which is bytes on Python 2, and unicode on Python 3), using
|
||||
the provided encoding if necessary. If s is already str, it is returned as is.
|
||||
|
||||
If errors="strict", str is bytes, and s is str, its encoding is verified by decoding
|
||||
it; UnicodeError is raised if it cannot be decoded.
|
||||
"""
|
||||
return force_unicode(s, encoding, errors)
|
||||
|
||||
|
||||
def force_unicode(s, encoding, errors="strict"):
|
||||
"""Converts s to Unicode, using the provided encoding. If s is already Unicode,
|
||||
it is returned as is.
|
||||
"""
|
||||
return s.decode(encoding, errors) if isinstance(s, bytes) else unicode(s)
|
||||
|
||||
|
||||
def force_bytes(s, encoding, errors="strict"):
|
||||
"""Converts s to bytes, using the provided encoding. If s is already bytes,
|
||||
it is returned as is.
|
||||
|
||||
If errors="strict" and s is bytes, its encoding is verified by decoding it;
|
||||
UnicodeError is raised if it cannot be decoded.
|
||||
"""
|
||||
if isinstance(s, unicode):
|
||||
return s.encode(encoding, errors)
|
||||
else:
|
||||
s = bytes(s)
|
||||
if errors == "strict":
|
||||
# Return value ignored - invoked solely for verification.
|
||||
s.decode(encoding, errors)
|
||||
return s
|
||||
|
||||
|
||||
def force_ascii(s, errors="strict"):
|
||||
"""Same as force_bytes(s, "ascii", errors)
|
||||
"""
|
||||
return force_bytes(s, "ascii", errors)
|
||||
|
||||
|
||||
def force_utf8(s, errors="strict"):
|
||||
"""Same as force_bytes(s, "utf8", errors)
|
||||
"""
|
||||
return force_bytes(s, "utf8", errors)
|
||||
|
||||
|
||||
def filename(s, errors="strict"):
|
||||
"""Same as force_unicode(s, sys.getfilesystemencoding(), errors)
|
||||
"""
|
||||
return force_unicode(s, sys.getfilesystemencoding(), errors)
|
||||
|
||||
|
||||
def filename_bytes(s, errors="strict"):
|
||||
"""Same as force_bytes(s, sys.getfilesystemencoding(), errors)
|
||||
"""
|
||||
return force_bytes(s, sys.getfilesystemencoding(), errors)
|
||||
|
||||
|
||||
def filename_str(s, errors="strict"):
|
||||
"""Same as force_str(s, sys.getfilesystemencoding(), errors)
|
||||
"""
|
||||
return force_str(s, sys.getfilesystemencoding(), errors)
|
||||
|
||||
|
||||
def nameof(obj, quote=False):
|
||||
"""Returns the most descriptive name of a Python module, class, or function,
|
||||
as a Unicode string
|
||||
|
||||
If quote=True, name is quoted with repr().
|
||||
|
||||
Best-effort, but guaranteed to not fail - always returns something.
|
||||
"""
|
||||
|
||||
try:
|
||||
name = obj.__qualname__
|
||||
except Exception:
|
||||
try:
|
||||
name = obj.__name__
|
||||
except Exception:
|
||||
# Fall back to raw repr(), and skip quoting.
|
||||
try:
|
||||
name = repr(obj)
|
||||
except Exception:
|
||||
return "<unknown>"
|
||||
else:
|
||||
quote = False
|
||||
|
||||
if quote:
|
||||
try:
|
||||
name = repr(name)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return force_unicode(name, "utf-8", "replace")
|
||||
|
||||
|
||||
def unicode_repr(obj):
|
||||
"""Like repr(), but guarantees that the result is Unicode even on Python 2.
|
||||
"""
|
||||
return force_unicode(repr(obj), "ascii")
|
||||
|
||||
|
||||
def srcnameof(obj):
|
||||
"""Returns the most descriptive name of a Python module, class, or function,
|
||||
including source information (filename and linenumber), if available.
|
||||
|
||||
Best-effort, but guaranteed to not fail - always returns something.
|
||||
"""
|
||||
|
||||
name = nameof(obj, quote=True)
|
||||
|
||||
# Get the source information if possible.
|
||||
try:
|
||||
src_file = filename(inspect.getsourcefile(obj), "replace")
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
name += fmt(" (file {0!r}", src_file)
|
||||
try:
|
||||
_, src_lineno = inspect.getsourcelines(obj)
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
name += fmt(", line {0}", src_lineno)
|
||||
name += ")"
|
||||
|
||||
return name
|
||||
|
||||
|
||||
def kwonly(f):
|
||||
"""Makes all arguments with default values keyword-only.
|
||||
|
||||
If the default value is kwonly.required, then the argument must be specified.
|
||||
"""
|
||||
|
||||
try:
|
||||
inspect.getfullargspec
|
||||
except AttributeError:
|
||||
arg_names, args_name, kwargs_name, arg_defaults = inspect.getargspec(f)
|
||||
else:
|
||||
arg_names, args_name, kwargs_name, arg_defaults, _, _, _ = inspect.getfullargspec(
|
||||
f
|
||||
)
|
||||
|
||||
assert args_name is None and kwargs_name is None
|
||||
argc = len(arg_names)
|
||||
pos_argc = argc - len(arg_defaults)
|
||||
required_names = {
|
||||
name
|
||||
for name, val in zip(arg_names[pos_argc:], arg_defaults)
|
||||
if val is kwonly.required
|
||||
}
|
||||
|
||||
@functools.wraps(f)
|
||||
def kwonly_f(*args, **kwargs):
|
||||
if len(args) > pos_argc:
|
||||
raise TypeError("too many positional arguments")
|
||||
if not required_names.issubset(kwargs):
|
||||
missing_names = required_names.difference(kwargs)
|
||||
missing_names = ", ".join(repr(s) for s in missing_names)
|
||||
raise TypeError("missing required keyword-only arguments: " + missing_names)
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return kwonly_f
|
||||
|
||||
|
||||
kwonly.required = object()
|
57
.venv/Lib/site-packages/debugpy/common/fmt.py
Normal file
57
.venv/Lib/site-packages/debugpy/common/fmt.py
Normal file
@ -0,0 +1,57 @@
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
"""Provides a custom string.Formatter with JSON support.
|
||||
|
||||
The formatter object is directly exposed as a module, such that all its members
|
||||
can be invoked directly after it has been imported::
|
||||
|
||||
from debugpy.common import fmt
|
||||
fmt("{0} is {value}", name, value=x)
|
||||
"""
|
||||
|
||||
import string
|
||||
import sys
|
||||
import types
|
||||
|
||||
|
||||
class Formatter(string.Formatter, types.ModuleType):
|
||||
"""A custom string.Formatter with support for JSON pretty-printing.
|
||||
|
||||
Adds {!j} format specification. When used, the corresponding value is converted
|
||||
to string using json_encoder.encode().
|
||||
|
||||
Since string.Formatter in Python <3.4 does not support unnumbered placeholders,
|
||||
they must always be numbered explicitly - "{0} {1}" rather than "{} {}". Named
|
||||
placeholders are supported.
|
||||
"""
|
||||
|
||||
# Because globals() go away after the module object substitution, all method bodies
|
||||
# below must access globals via self instead, or re-import modules locally.
|
||||
|
||||
from debugpy.common import json
|
||||
|
||||
def __init__(self):
|
||||
# Set self up as a proper module, and copy globals.
|
||||
# types must be re-imported, because globals aren't there yet at this point.
|
||||
import types
|
||||
|
||||
types.ModuleType.__init__(self, __name__)
|
||||
self.__dict__.update(sys.modules[__name__].__dict__)
|
||||
|
||||
def __call__(self, format_string, *args, **kwargs):
|
||||
"""Same as self.format().
|
||||
"""
|
||||
return self.format(format_string, *args, **kwargs)
|
||||
|
||||
def convert_field(self, value, conversion):
|
||||
if conversion == "j":
|
||||
return self.json.JsonObject(value)
|
||||
return super(self.Formatter, self).convert_field(value, conversion)
|
||||
|
||||
|
||||
# Replace the standard module object for this module with a Formatter instance.
|
||||
sys.modules[__name__] = Formatter()
|
273
.venv/Lib/site-packages/debugpy/common/json.py
Normal file
273
.venv/Lib/site-packages/debugpy/common/json.py
Normal file
@ -0,0 +1,273 @@
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
"""Improved JSON serialization.
|
||||
"""
|
||||
|
||||
import json
|
||||
import operator
|
||||
|
||||
|
||||
JsonDecoder = json.JSONDecoder
|
||||
|
||||
|
||||
class JsonEncoder(json.JSONEncoder):
|
||||
"""Customizable JSON encoder.
|
||||
|
||||
If the object implements __getstate__, then that method is invoked, and its
|
||||
result is serialized instead of the object itself.
|
||||
"""
|
||||
|
||||
def default(self, value):
|
||||
try:
|
||||
get_state = value.__getstate__
|
||||
except AttributeError:
|
||||
return super(JsonEncoder, self).default(value)
|
||||
else:
|
||||
return get_state()
|
||||
|
||||
|
||||
class JsonObject(object):
|
||||
"""A wrapped Python object that formats itself as JSON when asked for a string
|
||||
representation via str() or format().
|
||||
"""
|
||||
|
||||
json_encoder_factory = JsonEncoder
|
||||
"""Used by __format__ when format_spec is not empty."""
|
||||
|
||||
json_encoder = json_encoder_factory(indent=4)
|
||||
"""The default encoder used by __format__ when format_spec is empty."""
|
||||
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self.value)
|
||||
|
||||
def __str__(self):
|
||||
return format(self)
|
||||
|
||||
def __format__(self, format_spec):
|
||||
"""If format_spec is empty, uses self.json_encoder to serialize self.value
|
||||
as a string. Otherwise, format_spec is treated as an argument list to be
|
||||
passed to self.json_encoder_factory - which defaults to JSONEncoder - and
|
||||
then the resulting formatter is used to serialize self.value as a string.
|
||||
|
||||
Example::
|
||||
|
||||
fmt("{0!j} {0!j:indent=4,sort_keys=True}", x)
|
||||
"""
|
||||
if format_spec:
|
||||
# At this point, format_spec is a string that looks something like
|
||||
# "indent=4,sort_keys=True". What we want is to build a function call
|
||||
# from that which looks like:
|
||||
#
|
||||
# json_encoder_factory(indent=4,sort_keys=True)
|
||||
#
|
||||
# which we can then eval() to create our encoder instance.
|
||||
make_encoder = "json_encoder_factory(" + format_spec + ")"
|
||||
encoder = eval(
|
||||
make_encoder, {"json_encoder_factory": self.json_encoder_factory}
|
||||
)
|
||||
else:
|
||||
encoder = self.json_encoder
|
||||
return encoder.encode(self.value)
|
||||
|
||||
|
||||
# JSON property validators, for use with MessageDict.
|
||||
#
|
||||
# A validator is invoked with the actual value of the JSON property passed to it as
|
||||
# the sole argument; or if the property is missing in JSON, then () is passed. Note
|
||||
# that None represents an actual null in JSON, while () is a missing value.
|
||||
#
|
||||
# The validator must either raise TypeError or ValueError describing why the property
|
||||
# value is invalid, or else return the value of the property, possibly after performing
|
||||
# some substitutions - e.g. replacing () with some default value.
|
||||
|
||||
|
||||
def of_type(*classinfo, **kwargs):
|
||||
"""Returns a validator for a JSON property that requires it to have a value of
|
||||
the specified type. If optional=True, () is also allowed.
|
||||
|
||||
The meaning of classinfo is the same as for isinstance().
|
||||
"""
|
||||
|
||||
assert len(classinfo)
|
||||
optional = kwargs.pop("optional", False)
|
||||
assert not len(kwargs)
|
||||
|
||||
def validate(value):
|
||||
if (optional and value == ()) or isinstance(value, classinfo):
|
||||
return value
|
||||
else:
|
||||
if not optional and value == ():
|
||||
raise ValueError("must be specified")
|
||||
raise TypeError("must be " + " or ".join(t.__name__ for t in classinfo))
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
def default(default):
|
||||
"""Returns a validator for a JSON property with a default value.
|
||||
|
||||
The validator will only allow property values that have the same type as the
|
||||
specified default value.
|
||||
"""
|
||||
|
||||
def validate(value):
|
||||
if value == ():
|
||||
return default
|
||||
elif isinstance(value, type(default)):
|
||||
return value
|
||||
else:
|
||||
raise TypeError("must be {0}".format(type(default).__name__))
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
def enum(*values, **kwargs):
|
||||
"""Returns a validator for a JSON enum.
|
||||
|
||||
The validator will only allow the property to have one of the specified values.
|
||||
|
||||
If optional=True, and the property is missing, the first value specified is used
|
||||
as the default.
|
||||
"""
|
||||
|
||||
assert len(values)
|
||||
optional = kwargs.pop("optional", False)
|
||||
assert not len(kwargs)
|
||||
|
||||
def validate(value):
|
||||
if optional and value == ():
|
||||
return values[0]
|
||||
elif value in values:
|
||||
return value
|
||||
else:
|
||||
raise ValueError("must be one of: {0!r}".format(list(values)))
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
def array(validate_item=False, vectorize=False, size=None):
|
||||
"""Returns a validator for a JSON array.
|
||||
|
||||
If the property is missing, it is treated as if it were []. Otherwise, it must
|
||||
be a list.
|
||||
|
||||
If validate_item=False, it's treated as if it were (lambda x: x) - i.e. any item
|
||||
is considered valid, and is unchanged. If validate_item is a type or a tuple,
|
||||
it's treated as if it were json.of_type(validate).
|
||||
|
||||
Every item in the list is replaced with validate_item(item) in-place, propagating
|
||||
any exceptions raised by the latter. If validate_item is a type or a tuple, it is
|
||||
treated as if it were json.of_type(validate_item).
|
||||
|
||||
If vectorize=True, and the value is neither a list nor a dict, it is treated as
|
||||
if it were a single-element list containing that single value - e.g. "foo" is
|
||||
then the same as ["foo"]; but {} is an error, and not [{}].
|
||||
|
||||
If size is not None, it can be an int, a tuple of one int, a tuple of two ints,
|
||||
or a set. If it's an int, the array must have exactly that many elements. If it's
|
||||
a tuple of one int, it's the minimum length. If it's a tuple of two ints, they
|
||||
are the minimum and the maximum lengths. If it's a set, it's the set of sizes that
|
||||
are valid - e.g. for {2, 4}, the array can be either 2 or 4 elements long.
|
||||
"""
|
||||
|
||||
if not validate_item:
|
||||
validate_item = lambda x: x
|
||||
elif isinstance(validate_item, type) or isinstance(validate_item, tuple):
|
||||
validate_item = of_type(validate_item)
|
||||
|
||||
if size is None:
|
||||
validate_size = lambda _: True
|
||||
elif isinstance(size, set):
|
||||
size = {operator.index(n) for n in size}
|
||||
validate_size = lambda value: (
|
||||
len(value) in size
|
||||
or "must have {0} elements".format(
|
||||
" or ".join(str(n) for n in sorted(size))
|
||||
)
|
||||
)
|
||||
elif isinstance(size, tuple):
|
||||
assert 1 <= len(size) <= 2
|
||||
size = tuple(operator.index(n) for n in size)
|
||||
min_len, max_len = (size + (None,))[0:2]
|
||||
validate_size = lambda value: (
|
||||
"must have at least {0} elements".format(min_len)
|
||||
if len(value) < min_len
|
||||
else "must have at most {0} elements".format(max_len)
|
||||
if max_len is not None and len(value) < max_len
|
||||
else True
|
||||
)
|
||||
else:
|
||||
size = operator.index(size)
|
||||
validate_size = lambda value: (
|
||||
len(value) == size or "must have {0} elements".format(size)
|
||||
)
|
||||
|
||||
def validate(value):
|
||||
if value == ():
|
||||
value = []
|
||||
elif vectorize and not isinstance(value, (list, dict)):
|
||||
value = [value]
|
||||
|
||||
of_type(list)(value)
|
||||
|
||||
size_err = validate_size(value) # True if valid, str if error
|
||||
if size_err is not True:
|
||||
raise ValueError(size_err)
|
||||
|
||||
for i, item in enumerate(value):
|
||||
try:
|
||||
value[i] = validate_item(item)
|
||||
except (TypeError, ValueError) as exc:
|
||||
raise type(exc)(fmt("[{0!j}] {1}", i, exc))
|
||||
return value
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
def object(validate_value=False):
|
||||
"""Returns a validator for a JSON object.
|
||||
|
||||
If the property is missing, it is treated as if it were {}. Otherwise, it must
|
||||
be a dict.
|
||||
|
||||
If validate_value=False, it's treated as if it were (lambda x: x) - i.e. any
|
||||
value is considered valid, and is unchanged. If validate_value is a type or a
|
||||
tuple, it's treated as if it were json.of_type(validate_value).
|
||||
|
||||
Every value in the dict is replaced with validate_value(value) in-place, propagating
|
||||
any exceptions raised by the latter. If validate_value is a type or a tuple, it is
|
||||
treated as if it were json.of_type(validate_value). Keys are not affected.
|
||||
"""
|
||||
|
||||
if isinstance(validate_value, type) or isinstance(validate_value, tuple):
|
||||
validate_value = of_type(validate_value)
|
||||
|
||||
def validate(value):
|
||||
if value == ():
|
||||
return {}
|
||||
|
||||
of_type(dict)(value)
|
||||
if validate_value:
|
||||
for k, v in value.items():
|
||||
try:
|
||||
value[k] = validate_value(v)
|
||||
except (TypeError, ValueError) as exc:
|
||||
raise type(exc)(fmt("[{0!j}] {1}", k, exc))
|
||||
return value
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
# A helper to resolve the circular dependency between common.fmt and common.json
|
||||
# on Python 2.
|
||||
def fmt(*args, **kwargs):
|
||||
from debugpy.common import fmt
|
||||
|
||||
return fmt(*args, **kwargs)
|
383
.venv/Lib/site-packages/debugpy/common/log.py
Normal file
383
.venv/Lib/site-packages/debugpy/common/log.py
Normal file
@ -0,0 +1,383 @@
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import atexit
|
||||
import contextlib
|
||||
import functools
|
||||
import inspect
|
||||
import io
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
import threading
|
||||
import traceback
|
||||
|
||||
import debugpy
|
||||
from debugpy.common import compat, fmt, timestamp, util
|
||||
|
||||
|
||||
LEVELS = ("debug", "info", "warning", "error")
|
||||
"""Logging levels, lowest to highest importance.
|
||||
"""
|
||||
|
||||
log_dir = os.getenv("DEBUGPY_LOG_DIR")
|
||||
"""If not None, debugger logs its activity to a file named debugpy.*-<pid>.log
|
||||
in the specified directory, where <pid> is the return value of os.getpid().
|
||||
"""
|
||||
|
||||
timestamp_format = "09.3f"
|
||||
"""Format spec used for timestamps. Can be changed to dial precision up or down.
|
||||
"""
|
||||
|
||||
_lock = threading.RLock()
|
||||
_tls = threading.local()
|
||||
_files = {} # filename -> LogFile
|
||||
_levels = set() # combined for all log files
|
||||
|
||||
|
||||
def _update_levels():
|
||||
global _levels
|
||||
_levels = frozenset(level for file in _files.values() for level in file.levels)
|
||||
|
||||
|
||||
class LogFile(object):
|
||||
def __init__(self, filename, file, levels=LEVELS, close_file=True):
|
||||
info("Also logging to {0!j}.", filename)
|
||||
|
||||
self.filename = filename
|
||||
self.file = file
|
||||
self.close_file = close_file
|
||||
self._levels = frozenset(levels)
|
||||
|
||||
with _lock:
|
||||
_files[self.filename] = self
|
||||
_update_levels()
|
||||
info(
|
||||
"{0} {1}\n{2} {3} ({4}-bit)\ndebugpy {5}",
|
||||
platform.platform(),
|
||||
platform.machine(),
|
||||
platform.python_implementation(),
|
||||
platform.python_version(),
|
||||
64 if sys.maxsize > 2 ** 32 else 32,
|
||||
debugpy.__version__,
|
||||
_to_files=[self],
|
||||
)
|
||||
|
||||
@property
|
||||
def levels(self):
|
||||
return self._levels
|
||||
|
||||
@levels.setter
|
||||
def levels(self, value):
|
||||
with _lock:
|
||||
self._levels = frozenset(LEVELS if value is all else value)
|
||||
_update_levels()
|
||||
|
||||
def write(self, level, output):
|
||||
if level in self.levels:
|
||||
try:
|
||||
self.file.write(output)
|
||||
self.file.flush()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def close(self):
|
||||
with _lock:
|
||||
del _files[self.filename]
|
||||
_update_levels()
|
||||
info("Not logging to {0!j} anymore.", self.filename)
|
||||
|
||||
if self.close_file:
|
||||
try:
|
||||
self.file.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.close()
|
||||
|
||||
|
||||
class NoLog(object):
|
||||
file = filename = None
|
||||
|
||||
__bool__ = __nonzero__ = lambda self: False
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
pass
|
||||
|
||||
|
||||
# Used to inject a newline into stderr if logging there, to clean up the output
|
||||
# when it's intermixed with regular prints from other sources.
|
||||
def newline(level="info"):
|
||||
with _lock:
|
||||
stderr.write(level, "\n")
|
||||
|
||||
|
||||
def write(level, text, _to_files=all):
|
||||
assert level in LEVELS
|
||||
|
||||
t = timestamp.current()
|
||||
format_string = "{0}+{1:" + timestamp_format + "}: "
|
||||
prefix = fmt(format_string, level[0].upper(), t)
|
||||
|
||||
text = getattr(_tls, "prefix", "") + text
|
||||
indent = "\n" + (" " * len(prefix))
|
||||
output = indent.join(text.split("\n"))
|
||||
output = prefix + output + "\n\n"
|
||||
|
||||
with _lock:
|
||||
if _to_files is all:
|
||||
_to_files = _files.values()
|
||||
for file in _to_files:
|
||||
file.write(level, output)
|
||||
|
||||
return text
|
||||
|
||||
|
||||
def write_format(level, format_string, *args, **kwargs):
|
||||
# Don't spend cycles doing expensive formatting if we don't have to. Errors are
|
||||
# always formatted, so that error() can return the text even if it's not logged.
|
||||
if level != "error" and level not in _levels:
|
||||
return
|
||||
|
||||
try:
|
||||
text = fmt(format_string, *args, **kwargs)
|
||||
except Exception:
|
||||
reraise_exception()
|
||||
|
||||
return write(level, text, kwargs.pop("_to_files", all))
|
||||
|
||||
|
||||
debug = functools.partial(write_format, "debug")
|
||||
info = functools.partial(write_format, "info")
|
||||
warning = functools.partial(write_format, "warning")
|
||||
|
||||
|
||||
def error(*args, **kwargs):
|
||||
"""Logs an error.
|
||||
|
||||
Returns the output wrapped in AssertionError. Thus, the following::
|
||||
|
||||
raise log.error(...)
|
||||
|
||||
has the same effect as::
|
||||
|
||||
log.error(...)
|
||||
assert False, fmt(...)
|
||||
"""
|
||||
return AssertionError(write_format("error", *args, **kwargs))
|
||||
|
||||
|
||||
def _exception(format_string="", *args, **kwargs):
|
||||
level = kwargs.pop("level", "error")
|
||||
exc_info = kwargs.pop("exc_info", sys.exc_info())
|
||||
|
||||
if format_string:
|
||||
format_string += "\n\n"
|
||||
format_string += "{exception}\nStack where logged:\n{stack}"
|
||||
|
||||
exception = "".join(traceback.format_exception(*exc_info))
|
||||
|
||||
f = inspect.currentframe()
|
||||
f = f.f_back if f else f # don't log this frame
|
||||
try:
|
||||
stack = "".join(traceback.format_stack(f))
|
||||
finally:
|
||||
del f # avoid cycles
|
||||
|
||||
write_format(
|
||||
level, format_string, *args, exception=exception, stack=stack, **kwargs
|
||||
)
|
||||
|
||||
|
||||
def swallow_exception(format_string="", *args, **kwargs):
|
||||
"""Logs an exception with full traceback.
|
||||
|
||||
If format_string is specified, it is formatted with fmt(*args, **kwargs), and
|
||||
prepended to the exception traceback on a separate line.
|
||||
|
||||
If exc_info is specified, the exception it describes will be logged. Otherwise,
|
||||
sys.exc_info() - i.e. the exception being handled currently - will be logged.
|
||||
|
||||
If level is specified, the exception will be logged as a message of that level.
|
||||
The default is "error".
|
||||
"""
|
||||
|
||||
_exception(format_string, *args, **kwargs)
|
||||
|
||||
|
||||
def reraise_exception(format_string="", *args, **kwargs):
|
||||
"""Like swallow_exception(), but re-raises the current exception after logging it.
|
||||
"""
|
||||
|
||||
assert "exc_info" not in kwargs
|
||||
_exception(format_string, *args, **kwargs)
|
||||
raise
|
||||
|
||||
|
||||
def to_file(filename=None, prefix=None, levels=LEVELS):
|
||||
"""Starts logging all messages at the specified levels to the designated file.
|
||||
|
||||
Either filename or prefix must be specified, but not both.
|
||||
|
||||
If filename is specified, it designates the log file directly.
|
||||
|
||||
If prefix is specified, the log file is automatically created in options.log_dir,
|
||||
with filename computed as prefix + os.getpid(). If log_dir is None, no log file
|
||||
is created, and the function returns immediately.
|
||||
|
||||
If the file with the specified or computed name is already being used as a log
|
||||
file, it is not overwritten, but its levels are updated as specified.
|
||||
|
||||
The function returns an object with a close() method. When the object is closed,
|
||||
logs are not written into that file anymore. Alternatively, the returned object
|
||||
can be used in a with-statement:
|
||||
|
||||
with log.to_file("some.log"):
|
||||
# now also logging to some.log
|
||||
# not logging to some.log anymore
|
||||
"""
|
||||
|
||||
assert (filename is not None) ^ (prefix is not None)
|
||||
|
||||
if filename is None:
|
||||
if log_dir is None:
|
||||
return NoLog()
|
||||
try:
|
||||
os.makedirs(log_dir)
|
||||
except OSError:
|
||||
pass
|
||||
filename = fmt("{0}/{1}-{2}.log", log_dir, prefix, os.getpid())
|
||||
|
||||
file = _files.get(filename)
|
||||
if file is None:
|
||||
file = LogFile(filename, io.open(filename, "w", encoding="utf-8"), levels)
|
||||
else:
|
||||
file.levels = levels
|
||||
return file
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def prefixed(format_string, *args, **kwargs):
|
||||
"""Adds a prefix to all messages logged from the current thread for the duration
|
||||
of the context manager.
|
||||
"""
|
||||
prefix = fmt(format_string, *args, **kwargs)
|
||||
old_prefix = getattr(_tls, "prefix", "")
|
||||
_tls.prefix = prefix + old_prefix
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
_tls.prefix = old_prefix
|
||||
|
||||
|
||||
def describe_environment(header):
|
||||
import sysconfig
|
||||
import site # noqa
|
||||
|
||||
result = [header, "\n\n"]
|
||||
|
||||
def report(*args, **kwargs):
|
||||
result.append(fmt(*args, **kwargs))
|
||||
|
||||
def report_paths(get_paths, label=None):
|
||||
prefix = fmt(" {0}: ", label or get_paths)
|
||||
|
||||
expr = None
|
||||
if not callable(get_paths):
|
||||
expr = get_paths
|
||||
get_paths = lambda: util.evaluate(expr)
|
||||
try:
|
||||
paths = get_paths()
|
||||
except AttributeError:
|
||||
report("{0}<missing>\n", prefix)
|
||||
return
|
||||
except Exception:
|
||||
swallow_exception(
|
||||
"Error evaluating {0}",
|
||||
repr(expr) if expr else compat.srcnameof(get_paths),
|
||||
)
|
||||
return
|
||||
|
||||
if not isinstance(paths, (list, tuple)):
|
||||
paths = [paths]
|
||||
|
||||
for p in sorted(paths):
|
||||
report("{0}{1}", prefix, p)
|
||||
rp = os.path.realpath(p)
|
||||
if p != rp:
|
||||
report("({0})", rp)
|
||||
report("\n")
|
||||
|
||||
prefix = " " * len(prefix)
|
||||
|
||||
report("System paths:\n")
|
||||
report_paths("sys.prefix")
|
||||
report_paths("sys.base_prefix")
|
||||
report_paths("sys.real_prefix")
|
||||
report_paths("site.getsitepackages()")
|
||||
report_paths("site.getusersitepackages()")
|
||||
|
||||
site_packages = [
|
||||
p
|
||||
for p in sys.path
|
||||
if os.path.exists(p)
|
||||
and os.path.basename(p) == compat.filename_str("site-packages")
|
||||
]
|
||||
report_paths(lambda: site_packages, "sys.path (site-packages)")
|
||||
|
||||
for name in sysconfig.get_path_names():
|
||||
expr = fmt("sysconfig.get_path({0!r})", name)
|
||||
report_paths(expr)
|
||||
|
||||
report_paths("os.__file__")
|
||||
report_paths("threading.__file__")
|
||||
|
||||
result = "".join(result).rstrip("\n")
|
||||
info("{0}", result)
|
||||
|
||||
|
||||
stderr = LogFile(
|
||||
"<stderr>",
|
||||
sys.stderr,
|
||||
levels=os.getenv("DEBUGPY_LOG_STDERR", "warning error").split(),
|
||||
close_file=False,
|
||||
)
|
||||
|
||||
|
||||
@atexit.register
|
||||
def _close_files():
|
||||
for file in tuple(_files.values()):
|
||||
file.close()
|
||||
|
||||
|
||||
# The following are helper shortcuts for printf debugging. They must never be used
|
||||
# in production code.
|
||||
|
||||
|
||||
def _repr(value): # pragma: no cover
|
||||
warning("$REPR {0!r}", value)
|
||||
|
||||
|
||||
def _vars(*names): # pragma: no cover
|
||||
locals = inspect.currentframe().f_back.f_locals
|
||||
if names:
|
||||
locals = {name: locals[name] for name in names if name in locals}
|
||||
warning("$VARS {0!r}", locals)
|
||||
|
||||
|
||||
def _stack(): # pragma: no cover
|
||||
stack = "\n".join(traceback.format_stack())
|
||||
warning("$STACK:\n\n{0}", stack)
|
1559
.venv/Lib/site-packages/debugpy/common/messaging.py
Normal file
1559
.venv/Lib/site-packages/debugpy/common/messaging.py
Normal file
File diff suppressed because it is too large
Load Diff
54
.venv/Lib/site-packages/debugpy/common/modules.py
Normal file
54
.venv/Lib/site-packages/debugpy/common/modules.py
Normal file
@ -0,0 +1,54 @@
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
"""Provides facilities to use objects as modules, enabling __getattr__, __call__
|
||||
etc on module level.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import types
|
||||
|
||||
|
||||
def module(name):
|
||||
"""A decorator for classes that implement modules.
|
||||
|
||||
Idiomatic usage is with __name__, so that an instance of the class replaces the
|
||||
module in which it is defined::
|
||||
|
||||
# foo.py
|
||||
@module(__name__)
|
||||
class Foo(object):
|
||||
def __call__(self): ...
|
||||
|
||||
# bar.py
|
||||
import foo
|
||||
foo()
|
||||
|
||||
"Regular" globals, including imports, don't work with class modules. Class or
|
||||
instance attributes must be used consistently for this purpose, and accessed via
|
||||
self inside method bodies::
|
||||
|
||||
@module(__name__)
|
||||
class Foo(object):
|
||||
import sys
|
||||
|
||||
def __call__(self):
|
||||
if self.sys.version_info < (3,): ...
|
||||
"""
|
||||
|
||||
def decorate(cls):
|
||||
class Module(cls, types.ModuleType):
|
||||
def __init__(self):
|
||||
# Set self up as a proper module, and copy pre-existing globals.
|
||||
types.ModuleType.__init__(self, name)
|
||||
self.__dict__.update(sys.modules[name].__dict__)
|
||||
|
||||
cls.__init__(self)
|
||||
|
||||
Module.__name__ = cls.__name__
|
||||
sys.modules[name] = Module()
|
||||
|
||||
return decorate
|
187
.venv/Lib/site-packages/debugpy/common/singleton.py
Normal file
187
.venv/Lib/site-packages/debugpy/common/singleton.py
Normal file
@ -0,0 +1,187 @@
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import functools
|
||||
import threading
|
||||
|
||||
|
||||
class Singleton(object):
|
||||
"""A base class for a class of a singleton object.
|
||||
|
||||
For any derived class T, the first invocation of T() will create the instance,
|
||||
and any future invocations of T() will return that instance.
|
||||
|
||||
Concurrent invocations of T() from different threads are safe.
|
||||
"""
|
||||
|
||||
# A dual-lock scheme is necessary to be thread safe while avoiding deadlocks.
|
||||
# _lock_lock is shared by all singleton types, and is used to construct their
|
||||
# respective _lock instances when invoked for a new type. Then _lock is used
|
||||
# to synchronize all further access for that type, including __init__. This way,
|
||||
# __init__ for any given singleton can access another singleton, and not get
|
||||
# deadlocked if that other singleton is trying to access it.
|
||||
_lock_lock = threading.RLock()
|
||||
_lock = None
|
||||
|
||||
# Specific subclasses will get their own _instance set in __new__.
|
||||
_instance = None
|
||||
|
||||
_is_shared = None # True if shared, False if exclusive
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
# Allow arbitrary args and kwargs if shared=False, because that is guaranteed
|
||||
# to construct a new singleton if it succeeds. Otherwise, this call might end
|
||||
# up returning an existing instance, which might have been constructed with
|
||||
# different arguments, so allowing them is misleading.
|
||||
assert not kwargs.get("shared", False) or (len(args) + len(kwargs)) == 0, (
|
||||
"Cannot use constructor arguments when accessing a Singleton without "
|
||||
"specifying shared=False."
|
||||
)
|
||||
|
||||
# Avoid locking as much as possible with repeated double-checks - the most
|
||||
# common path is when everything is already allocated.
|
||||
if not cls._instance:
|
||||
# If there's no per-type lock, allocate it.
|
||||
if cls._lock is None:
|
||||
with cls._lock_lock:
|
||||
if cls._lock is None:
|
||||
cls._lock = threading.RLock()
|
||||
|
||||
# Now that we have a per-type lock, we can synchronize construction.
|
||||
if not cls._instance:
|
||||
with cls._lock:
|
||||
if not cls._instance:
|
||||
cls._instance = object.__new__(cls)
|
||||
# To prevent having __init__ invoked multiple times, call
|
||||
# it here directly, and then replace it with a stub that
|
||||
# does nothing - that stub will get auto-invoked on return,
|
||||
# and on all future singleton accesses.
|
||||
cls._instance.__init__()
|
||||
cls.__init__ = lambda *args, **kwargs: None
|
||||
|
||||
return cls._instance
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initializes the singleton instance. Guaranteed to only be invoked once for
|
||||
any given type derived from Singleton.
|
||||
|
||||
If shared=False, the caller is requesting a singleton instance for their own
|
||||
exclusive use. This is only allowed if the singleton has not been created yet;
|
||||
if so, it is created and marked as being in exclusive use. While it is marked
|
||||
as such, all attempts to obtain an existing instance of it immediately raise
|
||||
an exception. The singleton can eventually be promoted to shared use by calling
|
||||
share() on it.
|
||||
"""
|
||||
|
||||
shared = kwargs.pop("shared", True)
|
||||
with self:
|
||||
if shared:
|
||||
assert (
|
||||
type(self)._is_shared is not False
|
||||
), "Cannot access a non-shared Singleton."
|
||||
type(self)._is_shared = True
|
||||
else:
|
||||
assert type(self)._is_shared is None, "Singleton is already created."
|
||||
|
||||
def __enter__(self):
|
||||
"""Lock this singleton to prevent concurrent access."""
|
||||
type(self)._lock.acquire()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, exc_tb):
|
||||
"""Unlock this singleton to allow concurrent access."""
|
||||
type(self)._lock.release()
|
||||
|
||||
def share(self):
|
||||
"""Share this singleton, if it was originally created with shared=False."""
|
||||
type(self)._is_shared = True
|
||||
|
||||
|
||||
class ThreadSafeSingleton(Singleton):
|
||||
"""A singleton that incorporates a lock for thread-safe access to its members.
|
||||
|
||||
The lock can be acquired using the context manager protocol, and thus idiomatic
|
||||
use is in conjunction with a with-statement. For example, given derived class T::
|
||||
|
||||
with T() as t:
|
||||
t.x = t.frob(t.y)
|
||||
|
||||
All access to the singleton from the outside should follow this pattern for both
|
||||
attributes and method calls. Singleton members can assume that self is locked by
|
||||
the caller while they're executing, but recursive locking of the same singleton
|
||||
on the same thread is also permitted.
|
||||
"""
|
||||
|
||||
threadsafe_attrs = frozenset()
|
||||
"""Names of attributes that are guaranteed to be used in a thread-safe manner.
|
||||
|
||||
This is typically used in conjunction with share() to simplify synchronization.
|
||||
"""
|
||||
|
||||
readonly_attrs = frozenset()
|
||||
"""Names of attributes that are readonly. These can be read without locking, but
|
||||
cannot be written at all.
|
||||
|
||||
Every derived class gets its own separate set. Thus, for any given singleton type
|
||||
T, an attribute can be made readonly after setting it, with T.readonly_attrs.add().
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(ThreadSafeSingleton, self).__init__(*args, **kwargs)
|
||||
# Make sure each derived class gets a separate copy.
|
||||
type(self).readonly_attrs = set(type(self).readonly_attrs)
|
||||
|
||||
# Prevent callers from reading or writing attributes without locking, except for
|
||||
# reading attributes listed in threadsafe_attrs, and methods specifically marked
|
||||
# with @threadsafe_method. Such methods should perform the necessary locking to
|
||||
# ensure thread safety for the callers.
|
||||
|
||||
@staticmethod
|
||||
def assert_locked(self):
|
||||
lock = type(self)._lock
|
||||
assert lock.acquire(blocking=False), (
|
||||
"ThreadSafeSingleton accessed without locking. Either use with-statement, "
|
||||
"or if it is a method or property, mark it as @threadsafe_method or with "
|
||||
"@autolocked_method, as appropriate."
|
||||
)
|
||||
lock.release()
|
||||
|
||||
def __getattribute__(self, name):
|
||||
value = object.__getattribute__(self, name)
|
||||
if name not in (type(self).threadsafe_attrs | type(self).readonly_attrs):
|
||||
if not getattr(value, "is_threadsafe_method", False):
|
||||
ThreadSafeSingleton.assert_locked(self)
|
||||
return value
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
assert name not in type(self).readonly_attrs, "This attribute is read-only."
|
||||
if name not in type(self).threadsafe_attrs:
|
||||
ThreadSafeSingleton.assert_locked(self)
|
||||
return object.__setattr__(self, name, value)
|
||||
|
||||
|
||||
def threadsafe_method(func):
|
||||
"""Marks a method of a ThreadSafeSingleton-derived class as inherently thread-safe.
|
||||
|
||||
A method so marked must either not use any singleton state, or lock it appropriately.
|
||||
"""
|
||||
|
||||
func.is_threadsafe_method = True
|
||||
return func
|
||||
|
||||
|
||||
def autolocked_method(func):
|
||||
"""Automatically synchronizes all calls of a method of a ThreadSafeSingleton-derived
|
||||
class by locking the singleton for the duration of each call.
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
@threadsafe_method
|
||||
def lock_and_call(self, *args, **kwargs):
|
||||
with self:
|
||||
return func(self, *args, **kwargs)
|
||||
|
||||
return lock_and_call
|
124
.venv/Lib/site-packages/debugpy/common/sockets.py
Normal file
124
.venv/Lib/site-packages/debugpy/common/sockets.py
Normal file
@ -0,0 +1,124 @@
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import socket
|
||||
import sys
|
||||
import threading
|
||||
|
||||
from debugpy.common import log
|
||||
|
||||
|
||||
def create_server(host, port=0, backlog=socket.SOMAXCONN, timeout=None):
|
||||
"""Return a local server socket listening on the given port."""
|
||||
|
||||
assert backlog > 0
|
||||
if host is None:
|
||||
host = "127.0.0.1"
|
||||
if port is None:
|
||||
port = 0
|
||||
|
||||
try:
|
||||
server = _new_sock()
|
||||
server.bind((host, port))
|
||||
if timeout is not None:
|
||||
server.settimeout(timeout)
|
||||
server.listen(backlog)
|
||||
except Exception:
|
||||
server.close()
|
||||
raise
|
||||
return server
|
||||
|
||||
|
||||
def create_client():
|
||||
"""Return a client socket that may be connected to a remote address."""
|
||||
return _new_sock()
|
||||
|
||||
|
||||
def _new_sock():
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP)
|
||||
if sys.platform == "win32":
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1)
|
||||
else:
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
|
||||
# Set TCP keepalive on an open socket.
|
||||
# It activates after 1 second (TCP_KEEPIDLE,) of idleness,
|
||||
# then sends a keepalive ping once every 3 seconds (TCP_KEEPINTVL),
|
||||
# and closes the connection after 5 failed ping (TCP_KEEPCNT), or 15 seconds
|
||||
try:
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
|
||||
except (AttributeError, OSError):
|
||||
pass # May not be available everywhere.
|
||||
try:
|
||||
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 1)
|
||||
except (AttributeError, OSError):
|
||||
pass # May not be available everywhere.
|
||||
try:
|
||||
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 3)
|
||||
except (AttributeError, OSError):
|
||||
pass # May not be available everywhere.
|
||||
try:
|
||||
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 5)
|
||||
except (AttributeError, OSError):
|
||||
pass # May not be available everywhere.
|
||||
return sock
|
||||
|
||||
|
||||
def shut_down(sock, how=socket.SHUT_RDWR):
|
||||
"""Shut down the given socket."""
|
||||
sock.shutdown(how)
|
||||
|
||||
|
||||
def close_socket(sock):
|
||||
"""Shutdown and close the socket."""
|
||||
try:
|
||||
shut_down(sock)
|
||||
except Exception:
|
||||
pass
|
||||
sock.close()
|
||||
|
||||
|
||||
def serve(name, handler, host, port=0, backlog=socket.SOMAXCONN, timeout=None):
|
||||
"""Accepts TCP connections on the specified host and port, and invokes the
|
||||
provided handler function for every new connection.
|
||||
|
||||
Returns the created server socket.
|
||||
"""
|
||||
|
||||
assert backlog > 0
|
||||
|
||||
try:
|
||||
listener = create_server(host, port, backlog, timeout)
|
||||
except Exception:
|
||||
log.reraise_exception(
|
||||
"Error listening for incoming {0} connections on {1}:{2}:", name, host, port
|
||||
)
|
||||
host, port = listener.getsockname()
|
||||
log.info("Listening for incoming {0} connections on {1}:{2}...", name, host, port)
|
||||
|
||||
def accept_worker():
|
||||
while True:
|
||||
try:
|
||||
sock, (other_host, other_port) = listener.accept()
|
||||
except (OSError, socket.error):
|
||||
# Listener socket has been closed.
|
||||
break
|
||||
|
||||
log.info(
|
||||
"Accepted incoming {0} connection from {1}:{2}.",
|
||||
name,
|
||||
other_host,
|
||||
other_port,
|
||||
)
|
||||
handler(sock)
|
||||
|
||||
thread = threading.Thread(target=accept_worker)
|
||||
thread.daemon = True
|
||||
thread.pydev_do_not_trace = True
|
||||
thread.is_pydev_daemon_thread = True
|
||||
thread.start()
|
||||
|
||||
return listener
|
66
.venv/Lib/site-packages/debugpy/common/stacks.py
Normal file
66
.venv/Lib/site-packages/debugpy/common/stacks.py
Normal file
@ -0,0 +1,66 @@
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
"""Provides facilities to dump all stacks of all threads in the process.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import threading
|
||||
import traceback
|
||||
|
||||
from debugpy.common import log
|
||||
|
||||
|
||||
def dump():
|
||||
"""Dump stacks of all threads in this process, except for the current thread.
|
||||
"""
|
||||
|
||||
tid = threading.current_thread().ident
|
||||
pid = os.getpid()
|
||||
|
||||
log.info("Dumping stacks for process {0}...", pid)
|
||||
|
||||
for t_ident, frame in sys._current_frames().items():
|
||||
if t_ident == tid:
|
||||
continue
|
||||
|
||||
for t in threading.enumerate():
|
||||
if t.ident == tid:
|
||||
t_name = t.name
|
||||
t_daemon = t.daemon
|
||||
break
|
||||
else:
|
||||
t_name = t_daemon = "<unknown>"
|
||||
|
||||
stack = "".join(traceback.format_stack(frame))
|
||||
log.info(
|
||||
"Stack of thread {0} (tid={1}, pid={2}, daemon={3}):\n\n{4}",
|
||||
t_name,
|
||||
t_ident,
|
||||
pid,
|
||||
t_daemon,
|
||||
stack,
|
||||
)
|
||||
|
||||
log.info("Finished dumping stacks for process {0}.", pid)
|
||||
|
||||
|
||||
def dump_after(secs):
|
||||
"""Invokes dump() on a background thread after waiting for the specified time.
|
||||
"""
|
||||
|
||||
def dumper():
|
||||
time.sleep(secs)
|
||||
try:
|
||||
dump()
|
||||
except:
|
||||
log.swallow_exception()
|
||||
|
||||
thread = threading.Thread(target=dumper)
|
||||
thread.daemon = True
|
||||
thread.start()
|
31
.venv/Lib/site-packages/debugpy/common/timestamp.py
Normal file
31
.venv/Lib/site-packages/debugpy/common/timestamp.py
Normal file
@ -0,0 +1,31 @@
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
"""Provides monotonic timestamps with a resetable zero.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import time
|
||||
|
||||
__all__ = ["current", "reset"]
|
||||
|
||||
|
||||
if sys.version_info >= (3, 5):
|
||||
clock = time.monotonic
|
||||
else:
|
||||
clock = time.clock
|
||||
|
||||
|
||||
def current():
|
||||
return clock() - timestamp_zero
|
||||
|
||||
|
||||
def reset():
|
||||
global timestamp_zero
|
||||
timestamp_zero = clock()
|
||||
|
||||
|
||||
reset()
|
68
.venv/Lib/site-packages/debugpy/common/util.py
Normal file
68
.venv/Lib/site-packages/debugpy/common/util.py
Normal file
@ -0,0 +1,68 @@
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See LICENSE in the project root
|
||||
# for license information.
|
||||
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from debugpy.common import compat
|
||||
|
||||
|
||||
def evaluate(code, path=__file__, mode="eval"):
|
||||
# Setting file path here to avoid breaking here if users have set
|
||||
# "break on exception raised" setting. This code can potentially run
|
||||
# in user process and is indistinguishable if the path is not set.
|
||||
# We use the path internally to skip exception inside the debugger.
|
||||
expr = compile(code, path, "eval")
|
||||
return eval(expr, {}, sys.modules)
|
||||
|
||||
|
||||
class Observable(object):
|
||||
"""An object with change notifications."""
|
||||
|
||||
observers = () # used when attributes are set before __init__ is invoked
|
||||
|
||||
def __init__(self):
|
||||
self.observers = []
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
try:
|
||||
return super(Observable, self).__setattr__(name, value)
|
||||
finally:
|
||||
for ob in self.observers:
|
||||
ob(self, name)
|
||||
|
||||
|
||||
class Env(dict):
|
||||
"""A dict for environment variables.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def snapshot():
|
||||
"""Returns a snapshot of the current environment.
|
||||
"""
|
||||
return Env(os.environ)
|
||||
|
||||
def copy(self, updated_from=None):
|
||||
result = Env(self)
|
||||
if updated_from is not None:
|
||||
result.update(updated_from)
|
||||
return result
|
||||
|
||||
def prepend_to(self, key, entry):
|
||||
"""Prepends a new entry to a PATH-style environment variable, creating
|
||||
it if it doesn't exist already.
|
||||
"""
|
||||
try:
|
||||
tail = os.path.pathsep + self[key]
|
||||
except KeyError:
|
||||
tail = ""
|
||||
self[key] = entry + tail
|
||||
|
||||
def for_popen(self):
|
||||
"""Returns a copy of this dict, with all strings converted to the type
|
||||
suitable for subprocess.Popen() and other similar APIs.
|
||||
"""
|
||||
return {compat.filename_str(k): compat.filename_str(v) for k, v in self.items()}
|
Reference in New Issue
Block a user