fix for mac

This commit is contained in:
Errol Sancaktar 2024-08-02 16:22:05 -06:00
parent 3d515c4832
commit 20a7c6c8f4
2770 changed files with 473600 additions and 4 deletions

View File

@ -0,0 +1,931 @@
# GENERAL
# import = ["<string>",]
# Import additional configuration files.
# Imports are loaded in order, skipping all missing files, with the importing file being loaded last. If a field is already present in a previous import, it will be replaced.
# All imports must either be absolute paths starting with /, or paths relative to the user's home directory starting with ~/.
# Example:
# import = [
# "~/.config/alacritty/base16-dark.toml",
# "~/.config/alacritty/keybindings.toml",
# ]
import = [
"~/.config/alacritty/catpuccin-latte.toml",
]
# live_config_reload = true | false
# Live config reload (changes require restart)
# Default: true
live_config_reload = true
# ipc_socket = true | false # (unix only)
# Offer IPC using alacritty msg
# Default: true
# ipc_socket = true
# ENV
# All key-value pairs in the [env] section will be added as environment variables for any process spawned by Alacritty, including its shell. Some entries may override variables set by alacritty itself.
# Example:
[env]
#TERM = "xterm-256color"
TERM = "alacritty"
# WINIT_X11_SCALE_FACTOR = "1.0"
# WINDOW
# This section documents the [window] table of the configuration file.
# dimensions = { columns = <integer>, lines = <integer> }
# Window dimensions (changes require restart).
# Number of lines/columns (not pixels) in the terminal. Both lines and columns must be non-zero for this to take effect. The number of columns must be at least 2, while using a value of 0 for columns and lines will fall back to the window manager's recommended size
# Default: { column = 0, lines = 0 }
# position = "None" | { x = <integer>, y = <integer> }
# Window startup position.
# Specified in number of pixels.
# If the position is "None", the window manager will handle placement.
# Default: "None"
# padding = { x = <integer>, y = <integer> }
# Blank space added around the window in pixels. This padding is scaled by DPI and the specified value is always added at both opposing sides.
# Default: { x = 0, y = 0 }
# dynamic_padding = true | false
# Spread additional padding evenly around the terminal content.
# Default: false
# Window decorations.
# decorations = "Full" | "None" | "Transparent" | "Buttonless"
# Full
# Borders and title bar.
# None
# Neither borders nor title bar.
# Transparent (macOS only)
# Title bar, transparent background and title bar buttons.
# Buttonless (macOS only)
# Title bar, transparent background and no title bar buttons.
# Default: "Full"
# Background opacity (works on macOS & KDE Wayland only)
# opacity = <float>
# Background opacity as a floating point number from 0.0 to 1.0. The value 0.0 is completely transparent and 1.0 is opaque.
# Default: 1.0
# Request compositor to blur content behind transparent windows (works on macOS & KDE Wayland only).
# Default: false
# blur = true | false
# Startup mode (changes require restart)
# startup_mode = "Windowed" | "Maximized" | "Fullscreen" | "SimpleFullscreen"
# Windowed
# Regular window.
# Maximized
# The window will be maximized on startup.
# Fullscreen
# The window will be fullscreened on startup.
# SimpleFullscreen (macOS only)
# Same as Fullscreen, but you can stack windows on top.
# Default: "Windowed"
# Window title.
# title = "<string>"
# Default: "Alacritty"
# dynamic_title = true | false
# Allow terminal applications to change Alacritty's window title.
# Default: true
# Window class.
# class = { instance = "<string>", general = "<string>" } # (Linux/BSD only)
# On Wayland, general is used as app_id and instance is ignored.
# Default: { instance = "Alacritty", general = "Alacritty" }
# Override the variant of the System theme/GTK theme/Wayland client side decorations. Set this to "None" to use the system's default theme variant.
# decorations_theme_variant = "Dark" | "Light" | "None"
# Default: "None"
# resize_increments = true | false
# Prefer resizing window by discrete steps equal to cell dimensions.
# Default: false
# option_as_alt = "OnlyLeft" | "OnlyRight" | "Both" | "None" # (macos only)
# Make Option key behave as Alt.
# Default: "None"
# Example:
# [window]
# padding = { x = 3, y = 3 }
# dynamic_padding = true
# opacity = 0.9
[window]
startup_mode = "Windowed"
title = "eTerm"
dynamic_title = true
dynamic_padding = true
decorations = "Full"
decorations_theme_variant = "Dark"
opacity = 0.8
blur = true
dimensions = { columns = 160, lines = 45 }
# SCROLLING
# This section documents the [scrolling] table of the configuration file.
# history = <integer>
# Maximum number of lines in the scrollback buffer.
# Specifying 0 will disable scrolling.
# Limited to 100000.
# Default: 10000
# multiplier = <integer>
# Number of line scrolled for every input scroll increment.
# Default: 3
[scrolling]
history = 100000
multiplier = 3
# FONT
# This section documents the [font] table of the configuration file.
# normal = { family = "<string>", style = "<string>" }
# Default:
# Linux/BSD: { family = "monospace", style = "Regular" }
# Windows: { family = "Consolas", style = "Regular" }
# macOS: { family = "Menlo", style = "Regular" }
# bold = { family = "<string>", style = "<string>" }
# If the family is not specified, it will fall back to the value specified for the normal font.
# Default: { style = "Bold" }
# italic = { family = "<string>", style = "<string>" }
# If the family is not specified, it will fall back to the value specified for the normal font.
# Default: { style = "Italic" }
# bold_italic = { family = "<string>", style = "<string>" }
# If the family is not specified, it will fall back to the value specified for the normal font.
# Default: { style = "Bold Italic" }
# size = <float>
# Font size in points.
# Default: 11.25
# offset = { x = <integer>, y = <integer> }
# Offset is the extra space around each character. The 'y' can be thought of as modifying the line spacing, and 'x' as modifying the letter spacing.
# Default: { x = 0, y = 0 }
# glyph_offset = { x = <integer>, y = <integer> }
# Glyph offset determines the locations of the glyphs within their cells with the default being at the bottom. Increasing x moves the glyph to the right, increasing y moves the glyph upward.
# builtin_box_drawing = true | false
# When true, Alacritty will use a custom built-in font for box drawing characters (Unicode points U+2500 - U+259F) and powerline symbols (Unicode points U+E0B0 - U+E0B3).
# Default: true
# COLORS
# This section documents the [colors] table of the configuration file.
# Colors are specified using their hexadecimal values with a # prefix: #RRGGBB.
# colors primary
# This section documents the [colors.primary] table of the configuration file.
[colors]
#foreground = "#d8d8d8"
#background = "#181818"
#dim_foreground = "#828482"
#bright_foreground = "None"
#cursor = { text = "CellBackground", cursor = "CellForeground" }
# foreground = "<string>"
# Default: "#d8d8d8"
# background = "<string>"
# Default: "#181818"
# dim_foreground = "<string>"
# If this is not set, the color is automatically calculated based on the foreground color.
# Default: "#828482"
# bright_foreground = "<string>"
# This color is only used when draw_bold_text_with_bright_colors is true.
# If this is not set, the normal foreground will be used.
# Default: "None"
# cursor = { text = "<string>", cursor = "<string>" }
# Colors which should be used to draw the terminal cursor.
# Allowed values are hexadecimal colors like #ff00ff, or CellForeground/CellBackground, which references the affected cell.
# Default: { text = "CellBackground", cursor = "CellForeground" }
# vi_mode_cursor = { text = "<string>", cursor = "<string>" }
# Colors for the cursor when the vi mode is active.
# Allowed values are hexadecimal colors like #ff00ff, or CellForeground/CellBackground, which references the affected cell.
# Default: { text = "CellBackground", cursor = "CellForeground" }
# colors search
# This section documents the [colors.search] table of the configuration.
# Allowed values are hexadecimal colors like #ff00ff, or CellForeground/CellBackground, which references the affected cell.
[colors.search]
matches = { foreground = "#181818", background = "#ac4242" }
focused_match = { foreground = "#181818", background = "#f4bf75" }
# matches = { foreground = "<string>", background = "<string>" }
# Default: { foreground = "#181818", background = "#ac4242" }
# focused_match = { foreground = "<string>", background = "<string>" }
# Default: { foreground = "#181818", background = "#f4bf75" }
# indexed_colors = [{ index = <integer>, color = "<string>" },]
# The indexed colors include all colors from 16 to 256. When these are not set, they're filled with sensible defaults.
# Default: []
# transparent_background_colors = true | false
# Whether or not window.opacity applies to all cell backgrounds, or only to the default background. When set to true all cells will be transparent regardless of their background color.
# Default: false
# draw_bold_text_with_bright_colors = true | false
# When true, bold text is drawn using the bright color variants.
# Default: false
# colors hints
# This section documents the [colors.hints] table of the configuration.
[colors.hints]
start = { foreground = "#181818", background = "#f4bf75" }
end = { foreground = "#181818", background = "#ac4242" }
# start = { foreground = "<string>", background = "<string>" }
# First character in the hint label.
# Allowed values are hexadecimal colors like #ff00ff, or CellForeground/CellBackground, which references the affected cell.
# Default: { foreground = "#181818", background = "#f4bf75" }
# end = { foreground = "<string>", background = "<string>" }
# All characters after the first one in the hint label.
# Allowed values are hexadecimal colors like #ff00ff, or CellForeground/CellBackground, which references the affected cell.
# Default: { foreground = "#181818", background = "#ac4242" }
# line_indicator = { foreground = "<string>", background = "<string>" }
# Color used for the indicator displaying the position in history during search and vi mode.
# Setting this to "None" will use the opposing primary color.
# Default: { foreground = "None", background = "None" }
# footer_bar = { foreground = "<string>", background = "<string>" }
# Color used for the footer bar on the bottom, used by search regex input, hyperlink URI preview, etc.
# Default: { foreground = "#181818", background = "#d8d8d8" }
# selection = { text = "<string>", background = "<string>" }
# Colors used for drawing selections.
# Allowed values are hexadecimal colors like #ff00ff, or CellForeground/CellBackground, which references the affected cell.
# Default: { text = "CellBackground", background = "CellForeground" }
# normal
# This section documents the [colors.normal] table of the configuration.
# Defaults from Alacritty.org used.
[colors.normal]
black = "#181818"
red = "#ac4242"
green = "#90a959"
yellow = "#f4bf75"
blue = "#6a9fb5"
magenta = "#aa759f"
cyan = "#75b5aa"
white = "#d8d8d8"
# bright
# This section documents the [colors.bright] table of the configuration.
# Defaults from Alacritty.org used.
[colors.bright]
black = "#6b6b6b"
red = "#c55555"
green = "#aac474"
yellow = "#feca88"
blue = "#82b8c8"
magenta = "#c28cb8"
cyan = "#93d3c3"
white = "#f8f8f8"
# dim
# This section documents the [colors.dim] table of the configuration.
# If the dim colors are not set, they will be calculated automatically based on the normal colors.
[colors.dim]
black = "#0f0f0f"
red = "#712b2b"
green = "#5f6f3a"
yellow = "#a17e4d"
blue = "#456877"
magenta = "#704d68"
cyan = "#4d7770"
white = "#8e8e8e"
# BELL
# This section documents the [bell] table of the configuration file.
# animation = "Ease" | "EaseOut" | "EaseOutSine" | "EaseOutQuad" | "EaseOutCubic" | "EaseOutQuart" | "EaseOutQuint" | "EaseOutExpo" | "EaseOutCirc" | "Linear"
# Visual bell animation effect for flashing the screen when the visual bell is rung.
# Default: "Linear"
# duration = <integer>
# Duration of the visual bell flash in milliseconds. A `duration` of `0` will disable the visual bell animation.
# Default: 0
# color = "<string>"
# Visual bell animation color.
# Default: "#ffffff"
# command = "<string>" | { program = "<string>", args = ["<string>",] }
# This program is executed whenever the bell is rung.
# When set to "None", no command will be executed.
# Default: "None"
[bell]
animation = "EaseOutExpo"
duration = 0
#color = "#ffffff"
#command = "None"
# SELECTION
# This section documents the [selection] table of the configuration file.
# semantic_escape_chars = "<string>"
# This string contains all characters that are used as separators for "semantic words" in Alacritty.
# Default: ",│`|:\"' ()[]{}<>\t"
# save_to_clipboard = true | false
# When set to true, selected text will be copied to the primary clipboard.
# Default: false
[selection]
semantic_escape_chars = ",│`|:\"' ()[]{}<>\t"
save_to_clipboard = false
# CURSOR
# This section documents the [cursor] table of the configuration file.
# style = { <shape>, <blinking> }
# shape = "Block" | "Underline" | "Beam"
# Default: "Block"
# blinking = "Never" | "Off" | "On" | "Always"
# Never - Prevent the cursor from ever blinking
# Off - Disable blinking by default
# On - Enable blinking by default
# Always -Force the cursor to always blink
# Default: "Off"
# vi_mode_style = { <shape>, <blinking> } | "None"
# If the vi mode cursor style is "None" or not specified, it will fall back to the active value of the normal cursor.
# Default: "None"
# blink_interval = <integer>
# Cursor blinking interval in milliseconds.
# Default: 750
# blink_timeout = <integer>
# Time after which cursor stops blinking, in seconds.
# Specifying 0 will disable timeout for blinking.
# Default: 5
# unfocused_hollow = true | false
# When this is true, the cursor will be rendered as a hollow box when the window is not focused.
# Default: true
# thickness = <float>
# Thickness of the cursor relative to the cell width as floating point number from 0.0 to 1.0.
# Default: 0.15
[cursor]
style = { shape = "Block", blinking = "on" }
unfocused_hollow = true
thickness = 0.15
# TERMINAL
# This section documents the [terminal] table of the configuration file.
# osc52 = "Disabled" | "OnlyCopy" | "OnlyPaste" | "CopyPaste"
# Controls the ability to write to the system clipboard with the OSC 52 escape sequence. While this escape sequence is useful to copy contents from the remote server, allowing any application to read from the clipboard can be easily abused while not providing significant benefits over explicitly pasting text.
# Default: "OnlyCopy"
#[terminal]
# MOUSE
# This section documents the [mouse] table of the configuration file.
# hide_when_typing = true | false
# When this is true, the cursor is temporarily hidden when typing.
# Default: false
# bindings = [{ <mouse>, <mods>, <mode>, <action> | <chars> },]
# See keyboard.bindings for full documentation on mods, mode, action, and chars.
# When an application running within Alacritty captures the mouse, the `Shift` modifier can be used to suppress mouse reporting. If no action is found for the event, actions for the event without the `Shift` modifier are triggered instead.
# mouse = "Middle" | "Left" | "Right" | "Back" | "Forward" | <integer>
# Mouse button which needs to be pressed to trigger this binding.
# action = <keyboard.bindings.action> | "ExpandSelection"
# ExpandSelection
# Expand the selection to the current mouse cursor location.
# Example:
# [mouse]
# bindings = [
# { mouse = "Right", mods = "Control", action = "Paste" },
# ]
[mouse]
hide_when_typing = true
# HINTS
# This section documents the [hints] table of the configuration file.
# Terminal hints can be used to find text or hyperlinks in the visible part of the terminal and pipe it to other applications.
# alphabet = "<string>"
# Keys used for the hint labels.
# Default: "jfkdls;ahgurieowpq"
# enabled = [{ <regex>, <hyperlinks>, <post_processing>, <persist>, <action>, <command> <binding>, <mouse> },]
# Array with all available hints.
# Each hint must have at least one of regex or hyperlinks and either an action or a command.
# regex = "<string>"
# Regex each line will be compared against.
# hyperlinks = true | false
# When this is true, all OSC 8 escape sequence hyperlinks will be included in the hints.
# post_processing = true | false
# When this is true, heuristics will be used to shorten the match if there are characters likely not to be part of the hint (e.g. a trailing .). This is most useful for URIs and applies only to regex matches.
# persist = true | false
# When this is true, hints remain persistent after selection.
# action = "Copy" | "Paste" | "Select" | "MoveViModeCursor"
# Copy
# Copy the hint's text to the clipboard.
# Paste
# Paste the hint's text to the terminal or search.
# Select
# Select the hint's text.
# MoveViModeCursor
# Move the vi mode cursor to the beginning of the hint.
# command = "<string>" | { program = "<string>", args = ["<string>",] }
# Command which will be executed when the hint is clicked or selected with the binding.
# The hint's text is always attached as the last argument.
# binding = { key = "<string>", mods = "<string>", mode = "<string>" }
# See keyboard.bindings for documentation on available values.
# This controls which key binding is used to start the keyboard hint selection process.
# mouse = { mods = "<string>", enabled = true | false }
# See keyboard.bindings for documentation on available mods.
# The enabled field controls if the hint should be underlined when hovering over the hint text with all mods pressed.
# Default:
# [[hints.enabled]]
# command = "xdg-open" # On Linux/BSD
# # command = "open" # On macOS
# # command = { program = "cmd", args = [ "/c", "start", "" ] } # On Windows
# hyperlinks = true
# post_processing = true
# persist = false
# mouse.enabled = true
# binding = { key = "U", mods = "Control|Shift" }
# regex = "(ipfs:|ipns:|magnet:|mailto:|gemini://|gopher://|https://|http://|news:|file:|git://|ssh:|ftp://)[^\u0000-\u001F\u007F-\u009F<>\"\\s{-}\\^⟨⟩`]+"
#[hints]
# KEYBOARD
# This section documents the [keyboard] table of the configuration file.
# bindings = [{ <key>, <mods>, <mode>, <action> | <chars> },]
# To unset a default binding, you can use the action "ReceiveChar" to remove it or "None" to inhibit any action.
# Multiple keybindings can be triggered by a single key press and will be executed in the order they are defined in.
# key = "<string>"
# The regular keys like "A", "0", and "Я" can be mapped directly without any special syntax. Full list of named keys like "F1" and the syntax for dead keys can be found here:
# https://docs.rs/winit/latest/winit/keyboard/enum.NamedKey.html
# https://docs.rs/winit/latest/winit/keyboard/enum.Key.html#variant.Dead
# Numpad keys are prefixed by Numpad: "NumpadEnter" | "NumpadAdd" | "NumpadComma" | "NumpadDivide" | "NumpadEquals" | "NumpadSubtract" | "NumpadMultiply" | "Numpad[0-9]".
# The key field also supports using scancodes, which are specified as a decimal number.
# mods = "Command" | "Control" | "Option" | "Super" | "Shift" | "Alt"
# Multiple modifiers can be combined using |, like this: "Control | Shift".
# mode = "AppCursor" | "AppKeypad" | "Search" | "Alt" | "Vi"
# This defines a terminal mode which must be active for this binding to have an effect.
# Prepending ~ to a mode will require the mode to not = be active for the binding to take effect.
# Multiple modes can be combined using |, like this: "~Vi|Search".
# chars = "<string>"
# Writes the specified string to the terminal.
# LIST OF ACTIONS
# format:
# action name
# description of action function
# Expandable List:
# None
# No action.
# Paste
# Paste contents of system clipboard.
# Copy
# Store current selection into clipboard.
# ReceiveChar
# Allow receiving char input.
# IncreaseFontSize
# Increase font size.
# DecreaseFontSize
# Decrease font size.
# ResetFontSize
# Reset font size to the config value.
# ScrollPageUp
# Scroll exactly one page up.
# ScrollPageDown
# Scroll exactly one page down.
# ScrollHalfPageUp
# Scroll half a page up.
# ScrollHalfPageDown
# Scroll half a page down.
# ScrollLineUp
# Scroll one line up.
# ScrollLineDown
# Scroll one line down.
# ScrollToTop
# Scroll all the way to the top.
# ScrollToBottom
# Scroll all the way to the bottom.
# ClearHistory
# Clear the display buffer(s) to remove history.
# Hide
# Hide the Alacritty window.
# Minimize
# Minimize the Alacritty window.
# Quit
# Quit Alacritty.
# ClearLogNotice
# Clear warning and error notices.
# SpawnNewInstance
# Spawn a new instance of Alacritty.
# CreateNewWindow
# Create a new Alacritty window.
# ToggleFullscreen
# Toggle fullscreen.
# ToggleMaximized
# Toggle maximized.
# ClearSelection
# Clear active selection.
# ToggleViMode
# Toggle vi mode.
# SearchForward
# Start a forward buffer search.
# SearchBackward
# Start a backward buffer search.
#
# Vi mode actions
# Expandable List:
# Up
# Move up.
# Down
# Move down.
# Left
# Move left.
# Right
# Move right.
# First
# First column, or beginning of the line when already at the first column.
# Last
# Last column, or beginning of the line when already at the last column.
# FirstOccupied
# First non-empty cell in this terminal row, or first non-empty cell of the line when already at the first cell of the row.
# High
# Move to top of screen.
# Middle
# Move to center of screen.
# Low
# Move to bottom of screen.
# SemanticLeft
# Move to start of semantically separated word.
# SemanticRight
# Move to start of next semantically separated word.
# SemanticLeftEnd
# Move to end of previous semantically separated word.
# SemanticRightEnd
# Move to end of semantically separated word.
# WordLeft
# Move to start of whitespace separated word.
# WordRight
# Move to start of next whitespace separated word.
# WordLeftEnd
# Move to end of previous whitespace separated word.
# WordRightEnd
# Move to end of whitespace separated word.
# Bracket
# Move to opposing bracket.
# ToggleNormalSelection
# Toggle normal vi selection.
# ToggleLineSelection
# Toggle line vi selection.
# ToggleBlockSelection
# Toggle block vi selection.
# ToggleSemanticSelection
# Toggle semantic vi selection.
# SearchNext
# Jump to the beginning of the next match.
# SearchPrevious
# Jump to the beginning of the previous match.
# SearchStart
# Jump to the next start of a match to the left of the origin.
# SearchEnd
# Jump to the next end of a match to the right of the origin.
# Open
# Launch the URL below the vi mode cursor.
# CenterAroundViCursor
# Centers the screen around the vi mode cursor.
# InlineSearchForward
# Search forward within the current line.
# InlineSearchBcakward
# Search backward within the current line.
# InlineSearchForwardShort
# Search forward within the current line, stopping just short of the character.
# InlineSearchBackwardShort
# Search backward within the current line, stopping just short of the character.
# InlineSearchNext
# Jump to the next inline search match.
# InlineSearchPrevious
# Jump to the previous inline search match.
# Search actions:
# SearchFocusNext
# Move the focus to the next search match.
# SearchFocusPrevious
# Move the focus to the previous search match.
# SearchConfirm
# Confirm the active search.
# SearchCancel
# Cancel the active search.
# SearchClear
# Reset the search regex.
# SearchDeleteWord
# Delete the last word in the search regex.
# SearchHistoryPrevious
# Go to the previous regex in the search history.
# SearchHistoryNext
# Go to the next regex in the search history.
# macOS exclusive:
# ToggleSimpleFullscreen
# Enter fullscreen without occupying another space.
# HideOtherApplications
# Hide all windows other than Alacritty.
# CreateNewTab
# Create new window in a tab.
# SelectNextTab
# Select next tab.
# SelectPreviousTab
# Select previous tab.
# SelectTab1
# Select the first tab.
# SelectTab2
# Select the second tab.
# SelectTab3
# Select the third tab.
# SelectTab4
# Select the fourth tab.
# SelectTab5
# Select the fifth tab.
# SelectTab6
# Select the sixth tab.
# SelectTab7
# Select the seventh tab.
# SelectTab8
# Select the eighth tab.
# SelectTab9
# Select the ninth tab.
# SelectLastTab
# Select the last tab.
# Linux/BSD exclusive:
# CopySelection
# Copy from the selection buffer.
# PasteSelection
# Paste from the selection buffer.
# Default: See alacritty-bindings(5)
#
# Example:
# [keyboard]
# bindings = [
# { key = "N", mods = "Control|Shift", action = "CreateNewWindow" },
# { key = "L", mods = "Control|Shift", chars = "l" },
# ]
#[keyboard]
# DEBUG
# This section documents the [debug] table of the configuration file.
# Debug options are meant to help troubleshoot issues with Alacritty. These can change or be removed entirely without warning, so their stability shouldn't be relied upon.
# render_timer = true | false
# Display the time it takes to draw each frame.
# Default: false
# persistent_logging = true | false
# Keep the log file after quitting Alacritty.
# Default: false
# log_level = "Off" | "Error" | "Warn" | "Info" | "Debug" | "Trace"
# Default: "Warn"
# To add extra libraries to logging ALACRITTY_EXTRA_LOG_TARGETS variable can be used.
# Example:
# ALACRITTY_EXTRA_LOG_TARGETS="winit;vte" alacritty -vvv
# renderer = "glsl3" | "gles2" | "gles2_pure" | "None"
# Force use of a specific renderer, "None" will use the highest available one.
# Default: "None"
# print_events = true | false
# Log all received window events.
# Default: false
# highlight_damage = true | false
# Highlight window damage information.
# Default: false
# prefer_egl = true | false
# Use EGL as display API if the current platform allows it. Note that transparency may not work with EGL on Linux/BSD.
# Default: false
#[debug]
# SEE ALSO
# alacritty(1), alacritty-msg(1), alacritty-bindings(5)
# BUGS
# Found a bug? Please report it at https://github.com/alacritty/alacritty/issues.
# MAINTAINERS
# · Christian Duerr <contact@christianduerr.com>
# · Kirill Chibisov <contact@kchibisov.com>
# SHELL
# shell="/opt/homebrew/bin/tmux"
# You can set shell.program to the path of your favorite shell, e.g. /bin/zsh. Entries in shell.args are passed as arguments to the shell.
# Default:
#shell="/opt/homebrew/bin/tmux"
# Linux/BSD/macOS: $SHELL or the user's login shell.
# Windows: "powershell"
# working_directory = "<string>" | "None"
# Directory the shell is started in. When this is unset, or "None", the working directory of the parent process will be used.
# Default: "None"
[shell]
program="/opt/homebrew/bin/tmux"
args=['new-session', '-A', '-s', 'default']
[font]
normal = { family = "Hack Nerd Font Mono" }
size = 13
builtin_box_drawing = true

View File

@ -0,0 +1 @@
{"ProviderType": "sso", "Credentials": {"AccessKeyId": "ASIA2JY5Z3EDVUA4A6RJ", "SecretAccessKey": "NGpO1aEMLJ/IP9zzH2FPHR54qXLa3y07BKzWJQtw", "SessionToken": "IQoJb3JpZ2luX2VjEKP//////////wEaCXVzLWVhc3QtMSJHMEUCIDUmnlQcHLjFoz15w3qjzhCdZV60jhv2edL91T7NZh41AiEAj7pNt7xiJW3b0lj9tthpD7QIogaA9+o+FuahOge69EEqhgMITBAAGgw3MDgxOTUyNDQyOTUiDIc3pPiyL8mtnthh2yrjApQ2+rtkLRN9qxpPWqRYknNfMdtxm9P+Kq4qbNcKa2iBpaqGPst8RwDtKUnmCrrxUeUiVhvA/hM+qmTaYXseKuxGkLUEkd5nu60JyX+c4rkZvYhJE/JVNlE16V6F+/i4tjndVS414654IMvfBJkr2Q97muuE5y3n5Y+fiuEKqm+iSYB6gPCsleNYl05vqfnm5vZzJjQKlECIsthiDlBYqk6a4mM4ZSf4eO/1gSWyOMfjsv6eRR+PKmd3Igrb9ylYNWXGzJ22hoJ9dFaYysWyhhn5MM6HUC0/ClWGqarrftlr43K5QDg8iRcQGm+GtBHIncfBJSB5CXS5aplaQAfOUJNt0GENFE7JoxUOjmRZkj5A/wQznF8MiZ87HmVMfS+ayeK/voLzT59kPJUEgbRuJbHywqFKDhwIxAlc0cxtXFjuaogFLplarR3lHopJy9Qhp3k1L7GAFewsyOvF5kl0v0X0miEwk/TRswY6pgF3F68pg7hY0ovrkikdSChpJHKWusOgmA+uv8j+w6fTQxpUzmm7PzP90WlNBqTvilhZySvZkXmllOE5wLZW7R3ny7qtP691B25Zy+AXWjc0QoFeBDK9XsR6VhancoVPU7Z4K7MHivQuUvrUqxnAkPqIs3PycYf+7662qhsw2rFE1Z19D7WXQdbAN9rUMbpWqLTFiSqtS4HBiU/TNR+zEnXj/b9rQbDK", "Expiration": "2024-06-20T19:50:58Z"}}

View File

@ -0,0 +1 @@
{"clientId": "-NdHQTXVpM0kOO6RdBO1GXVzLWVhc3QtMQ", "clientSecret": "eyJraWQiOiJrZXktMTU2NDAyODA5OSIsImFsZyI6IkhTMzg0In0.eyJzZXJpYWxpemVkIjoie1wiY2xpZW50SWRcIjp7XCJ2YWx1ZVwiOlwiLU5kSFFUWFZwTTBrT082UmRCTzFHWFZ6TFdWaGMzUXRNUVwifSxcImlkZW1wb3RlbnRLZXlcIjpudWxsLFwidGVuYW50SWRcIjpudWxsLFwiY2xpZW50TmFtZVwiOlwiYm90b2NvcmUtY2xpZW50LUFuY2hvclwiLFwiYmFja2ZpbGxWZXJzaW9uXCI6bnVsbCxcImNsaWVudFR5cGVcIjpcIlBVQkxJQ1wiLFwidGVtcGxhdGVBcm5cIjpudWxsLFwidGVtcGxhdGVDb250ZXh0XCI6bnVsbCxcImV4cGlyYXRpb25UaW1lc3RhbXBcIjoxNzI2Njg1Mzk1LjcxNjgwNzAwMCxcImNyZWF0ZWRUaW1lc3RhbXBcIjoxNzE4OTA5Mzk1LjcxNjgwNzAwMCxcInVwZGF0ZWRUaW1lc3RhbXBcIjoxNzE4OTA5Mzk1LjcxNjgwNzAwMCxcImNyZWF0ZWRCeVwiOm51bGwsXCJ1cGRhdGVkQnlcIjpudWxsLFwic3RhdHVzXCI6bnVsbCxcImluaXRpYXRlTG9naW5VcmlcIjpudWxsLFwiZW50aXRsZWRSZXNvdXJjZUlkXCI6bnVsbCxcImVudGl0bGVkUmVzb3VyY2VDb250YWluZXJJZFwiOm51bGwsXCJleHRlcm5hbElkXCI6bnVsbCxcInNvZnR3YXJlSWRcIjpudWxsLFwic2NvcGVzXCI6W3tcImZ1bGxTY29wZVwiOlwic3NvOmFjY291bnQ6YWNjZXNzXCIsXCJzdGF0dXNcIjpcIklOSVRJQUxcIixcImFwcGxpY2F0aW9uQXJuXCI6bnVsbCxcImZyaWVuZGx5SWRcIjpcInNzb1wiLFwidXNlQ2FzZUFjdGlvblwiOlwiYWNjb3VudDphY2Nlc3NcIixcInNjb3BlVHlwZVwiOlwiQUNDRVNTX1NDT1BFXCIsXCJ0eXBlXCI6XCJJbW11dGFibGVBY2Nlc3NTY29wZVwifV0sXCJhdXRoZW50aWNhdGlvbkNvbmZpZ3VyYXRpb25cIjpudWxsLFwiZW5hYmxlZEdyYW50c1wiOm51bGwsXCJlbmZvcmNlQXV0aE5Db25maWd1cmF0aW9uXCI6bnVsbCxcIm93bmVyQWNjb3VudElkXCI6bnVsbCxcInNzb0luc3RhbmNlQWNjb3VudElkXCI6bnVsbCxcInVzZXJDb25zZW50XCI6bnVsbCxcInNob3VsZEdldFZhbHVlRnJvbVRlbXBsYXRlXCI6dHJ1ZSxcImhhc0luaXRpYWxTY29wZXNcIjp0cnVlLFwiaGFzUmVxdWVzdGVkU2NvcGVzXCI6ZmFsc2UsXCJhcmVBbGxTY29wZXNDb25zZW50ZWRUb1wiOmZhbHNlLFwiZ3JvdXBTY29wZXNCeUZyaWVuZGx5SWRcIjp7XCJzc29cIjpbe1wiZnVsbFNjb3BlXCI6XCJzc286YWNjb3VudDphY2Nlc3NcIixcInN0YXR1c1wiOlwiSU5JVElBTFwiLFwiYXBwbGljYXRpb25Bcm5cIjpudWxsLFwiZnJpZW5kbHlJZFwiOlwic3NvXCIsXCJ1c2VDYXNlQWN0aW9uXCI6XCJhY2NvdW50OmFjY2Vzc1wiLFwic2NvcGVUeXBlXCI6XCJBQ0NFU1NfU0NPUEVcIixcInR5cGVcIjpcIkltbXV0YWJsZUFjY2Vzc1Njb3BlXCJ9XX0sXCJjb250YWluc09ubHlTc29TY29wZXNcIjp0cnVlLFwiaXNCYWNrZmlsbGVkXCI6ZmFsc2UsXCJpc0V4cGlyZWRcIjpmYWxzZSxcImlzVjFCYWNrZmlsbGVkXCI6ZmFsc2UsXCJpc1YyQmFja2ZpbGxlZFwiOmZhbHNlfSJ9.s9Urop7Wi0AzdDwsxq-hLuXaE7fHT02bbG78NBuZ9d1wWePw5WyOCmNWZdNn0out", "expiresAt": "2024-09-18T18:49:55Z", "scopes": ["sso:account:access"]}

View File

@ -0,0 +1 @@
{"startUrl": "https://yesenergy.awsapps.com/start", "region": "us-east-1", "accessToken": "aoaAAAAAGZ0h-ktKtSOAPazvGhJni91H_dIyDQHZDRR-hu6sccvm2ZoaU0cMl6PrfuVefd2XHfohsuYEFAJnmqL4oBkc0:MGUCMQDMeQCLBshLFw/mCL/FW+/wqGP7Mii+8V4McAJI39RoaorVP+3xmFPvn8wWYHyMuncCMGal6nHN3W67nxn8fia6Mk7JgmiaznPmmGkrokNBoGuSbuNCh7taOYf9/kCbyBUdLw", "expiresAt": "2024-06-20T19:50:01Z", "clientId": "-NdHQTXVpM0kOO6RdBO1GXVzLWVhc3QtMQ", "clientSecret": "eyJraWQiOiJrZXktMTU2NDAyODA5OSIsImFsZyI6IkhTMzg0In0.eyJzZXJpYWxpemVkIjoie1wiY2xpZW50SWRcIjp7XCJ2YWx1ZVwiOlwiLU5kSFFUWFZwTTBrT082UmRCTzFHWFZ6TFdWaGMzUXRNUVwifSxcImlkZW1wb3RlbnRLZXlcIjpudWxsLFwidGVuYW50SWRcIjpudWxsLFwiY2xpZW50TmFtZVwiOlwiYm90b2NvcmUtY2xpZW50LUFuY2hvclwiLFwiYmFja2ZpbGxWZXJzaW9uXCI6bnVsbCxcImNsaWVudFR5cGVcIjpcIlBVQkxJQ1wiLFwidGVtcGxhdGVBcm5cIjpudWxsLFwidGVtcGxhdGVDb250ZXh0XCI6bnVsbCxcImV4cGlyYXRpb25UaW1lc3RhbXBcIjoxNzI2Njg1Mzk1LjcxNjgwNzAwMCxcImNyZWF0ZWRUaW1lc3RhbXBcIjoxNzE4OTA5Mzk1LjcxNjgwNzAwMCxcInVwZGF0ZWRUaW1lc3RhbXBcIjoxNzE4OTA5Mzk1LjcxNjgwNzAwMCxcImNyZWF0ZWRCeVwiOm51bGwsXCJ1cGRhdGVkQnlcIjpudWxsLFwic3RhdHVzXCI6bnVsbCxcImluaXRpYXRlTG9naW5VcmlcIjpudWxsLFwiZW50aXRsZWRSZXNvdXJjZUlkXCI6bnVsbCxcImVudGl0bGVkUmVzb3VyY2VDb250YWluZXJJZFwiOm51bGwsXCJleHRlcm5hbElkXCI6bnVsbCxcInNvZnR3YXJlSWRcIjpudWxsLFwic2NvcGVzXCI6W3tcImZ1bGxTY29wZVwiOlwic3NvOmFjY291bnQ6YWNjZXNzXCIsXCJzdGF0dXNcIjpcIklOSVRJQUxcIixcImFwcGxpY2F0aW9uQXJuXCI6bnVsbCxcImZyaWVuZGx5SWRcIjpcInNzb1wiLFwidXNlQ2FzZUFjdGlvblwiOlwiYWNjb3VudDphY2Nlc3NcIixcInNjb3BlVHlwZVwiOlwiQUNDRVNTX1NDT1BFXCIsXCJ0eXBlXCI6XCJJbW11dGFibGVBY2Nlc3NTY29wZVwifV0sXCJhdXRoZW50aWNhdGlvbkNvbmZpZ3VyYXRpb25cIjpudWxsLFwiZW5hYmxlZEdyYW50c1wiOm51bGwsXCJlbmZvcmNlQXV0aE5Db25maWd1cmF0aW9uXCI6bnVsbCxcIm93bmVyQWNjb3VudElkXCI6bnVsbCxcInNzb0luc3RhbmNlQWNjb3VudElkXCI6bnVsbCxcInVzZXJDb25zZW50XCI6bnVsbCxcInNob3VsZEdldFZhbHVlRnJvbVRlbXBsYXRlXCI6dHJ1ZSxcImhhc0luaXRpYWxTY29wZXNcIjp0cnVlLFwiaGFzUmVxdWVzdGVkU2NvcGVzXCI6ZmFsc2UsXCJhcmVBbGxTY29wZXNDb25zZW50ZWRUb1wiOmZhbHNlLFwiZ3JvdXBTY29wZXNCeUZyaWVuZGx5SWRcIjp7XCJzc29cIjpbe1wiZnVsbFNjb3BlXCI6XCJzc286YWNjb3VudDphY2Nlc3NcIixcInN0YXR1c1wiOlwiSU5JVElBTFwiLFwiYXBwbGljYXRpb25Bcm5cIjpudWxsLFwiZnJpZW5kbHlJZFwiOlwic3NvXCIsXCJ1c2VDYXNlQWN0aW9uXCI6XCJhY2NvdW50OmFjY2Vzc1wiLFwic2NvcGVUeXBlXCI6XCJBQ0NFU1NfU0NPUEVcIixcInR5cGVcIjpcIkltbXV0YWJsZUFjY2Vzc1Njb3BlXCJ9XX0sXCJjb250YWluc09ubHlTc29TY29wZXNcIjp0cnVlLFwiaXNCYWNrZmlsbGVkXCI6ZmFsc2UsXCJpc0V4cGlyZWRcIjpmYWxzZSxcImlzVjFCYWNrZmlsbGVkXCI6ZmFsc2UsXCJpc1YyQmFja2ZpbGxlZFwiOmZhbHNlfSJ9.s9Urop7Wi0AzdDwsxq-hLuXaE7fHT02bbG78NBuZ9d1wWePw5WyOCmNWZdNn0out", "registrationExpiresAt": "2024-09-18T18:49:55Z", "refreshToken": "aorAAAAAGZ06Ks1_rOuRokfpGTYVDYinVRAXVYTLqSpd6wq32jVV95Q4BXUhWiRnx7XGHlIl-RqHucf4-Xbe_O6dwBkc0:MGQCMBdfyStg7Bkk2P5CoH38ffZXbLu3CJuLdqmtvoGg4HQ/Y3mz+Pg4k4Er49AeGydGewIwNkBlFVQ1qh9o1WEm6Qe49Zc9SOpyJPYg+T5SjJ2X5syGy48A5S91kji6j03ny6RL"}

@ -0,0 +1 @@
Subproject commit b8a285f0f1a86cfef4adb04dc9ea7bb689c4d6cc

View File

@ -0,0 +1,82 @@
_python_argcomplete() {
local IFS=' '
local prefix=
typeset -i n
(( lastw=${#COMP_WORDS[@]} -1))
if [[ ${COMP_WORDS[lastw]} == --*=* ]]; then
# for bash version 3.2
flag=${COMP_WORDS[lastw]%%=*}
set -- "$1" "$2" '='
elif [[ $3 == '=' ]]; then
flag=${COMP_WORDS[-3]}
fi
if [[ $3 == ssh && $2 == *@* ]] ;then
# handle ssh user@instance specially
prefix=${2%@*}@
COMP_LINE=${COMP_LINE%$2}"${2#*@}"
elif [[ $3 == '=' ]] ; then
# handle --flag=value
prefix=$flag=$2
line=${COMP_LINE%$prefix};
COMP_LINE=$line${prefix/=/ };
prefix=
fi
if [[ $2 == *,* ]]; then
# handle , separated list
prefix=${2%,*},
set -- "$1" "${2#$prefix}" "$3"
COMP_LINE==${COMP_LINE%$prefix*}$2
fi
# Treat --flag=<TAB> as --flag <TAB> to work around bash 4.x bug
if [[ ${COMP_LINE} == *= && ${COMP_WORDS[-2]} == --* ]]; then
COMP_LINE=${COMP_LINE%=}' '
fi
COMPREPLY=( $(IFS="$IFS" COMP_LINE="$COMP_LINE" COMP_POINT="$COMP_POINT" _ARGCOMPLETE_COMP_WORDBREAKS="$COMP_WORDBREAKS" _ARGCOMPLETE=1 "$1" 8>&1 9>&2 1>/dev/null 2>/dev/null) )
if [[ $? != 0 ]]; then
unset COMPREPLY
return
fi
if [[ $prefix != '' ]]; then
for ((n=0; n < ${#COMPREPLY[@]}; n++)); do
COMPREPLY[$n]=$prefix${COMPREPLY[$n]}
done
fi
for ((n=0; n < ${#COMPREPLY[@]}; n++)); do
match=${COMPREPLY[$n]%' '}
if [[ $match != '' ]]; then
COMPREPLY[$n]=${match//? /' '}' '
fi
done
# if flags argument has a single completion and ends in '= ', delete ' '
if [[ ${#COMPREPLY[@]} == 1 && ${COMPREPLY[0]} == -* &&
${COMPREPLY[0]} == *'= ' ]]; then
COMPREPLY[0]=${COMPREPLY[0]%' '}
fi
}
complete -o nospace -F _python_argcomplete "gcloud"
_completer() {
command=$1
name=$2
eval '[[ "$'"${name}"'_COMMANDS" ]] || '"${name}"'_COMMANDS="$('"${command}"')"'
set -- $COMP_LINE
shift
while [[ $1 == -* ]]; do
shift
done
[[ $2 ]] && return
grep -q "${name}\s*$" <<< $COMP_LINE &&
eval 'COMPREPLY=($'"${name}"'_COMMANDS)' &&
return
[[ "$COMP_LINE" == *" " ]] && return
[[ $1 ]] &&
eval 'COMPREPLY=($(echo "$'"${name}"'_COMMANDS" | grep ^'"$1"'))'
}
unset bq_COMMANDS
_bq_completer() {
_completer "CLOUDSDK_COMPONENT_MANAGER_DISABLE_UPDATE_CHECK=1 bq help | grep '^[^ ][^ ]* ' | sed 's/ .*//'" bq
}
complete -F _bq_completer bq
complete -o nospace -F _python_argcomplete gsutil

View File

@ -0,0 +1,10 @@
script_link="$( command readlink "$BASH_SOURCE" )" || script_link="$BASH_SOURCE"
apparent_sdk_dir="${script_link%/*}"
if [ "$apparent_sdk_dir" == "$script_link" ]; then
apparent_sdk_dir=.
fi
sdk_dir="$( command cd -P "$apparent_sdk_dir" > /dev/null && command pwd -P )"
bin_path="$sdk_dir/bin"
if [[ ":${PATH}:" != *":${bin_path}:"* ]]; then
export PATH=$bin_path:$PATH
fi

3
bash/.bash_profile Normal file
View File

@ -0,0 +1,3 @@
if [ -f ~/.bashrc ]; then
source ~/.bashrc
fi

163
bash/.bashrc Normal file
View File

@ -0,0 +1,163 @@
## bashrc file
## Errol Sanckatar
## errol@sancaktar.net
##
# HomeBrew
## Check for HomeBrew
if [[ -f "/opt/homebrew/bin/brew" ]]; then
eval "$(/opt/homebrew/bin/brew shellenv)"
export HOMEBREW_NO_ENV_HINTS=1
ensure_brew_packages $brew_packages
fi
function ensure_brew_packages {
local packages=("$@")
local cellar_dir="/opt/homebrew/Cellar"
local caskroom_dir="/opt/homebrew/Caskroom"
for package in "${packages[@]}"; do
if [ -d "$cellar_dir/$package" ]; then
#echo "$package (formula) is already installed."
continue
elif [ -d "$caskroom_dir/$package" ]; then
#echo "$package (cask) is already installed."
continue
else
echo "Installing $package..."
if brew info --cask $package &>/dev/null; then
brew install --cask --force $package
else
brew install --force $package
fi
fi
done
}
brew_packages=(
"opentofu"
"docker"
"docker-completion"
"helm"
"ipython"
"jq"
"k9s"
"kubernetes-cli"
"neovim"
"nmap"
"stow"
"terragrunt"
"tmux"
"font-hack-nerd-font"
"alacritty"
"spotify"
"tailscale"
"visual-studio-code"
"fzf"
"ripgrep"
"fd"
"spotify_player"
)
function alacritty_config_setup {
if [[ -f $HOME/.config/alacritty/alacritty-base.toml ]]; then
# echo $(pwd)
toml_files=("$HOME/.config/alacritty/alacritty-base.toml" "$HOME/.config/alacritty/alacritty-$1.toml")
combined_file="$HOME/.config/alacritty/alacritty.toml"
echo -n >"$combined_file"
for file in "${toml_files[@]}"; do
# Check if the file exists
if [ -f "$file" ]; then
# Append content of the current TOML file to the combined file
cat "$file" >>"$combined_file"
# Add a newline to separate TOML sections (optional)
echo >>"$combined_file"
else
echo "File '$file' not found. Skipping."
fi
done
fi
}
function shell_start_disp {
echo -n "$(hostname) - "
if [[ -f /sbin/ifconfig ]]; then
ifconfig | awk '/inet / && !/inet6/ && !/127.0.0.1/ {ips = ips " " $2} END {print ips}'
fi
}
# IF Linux
if [[ $(uname) == "Linux" ]]; then
# [[ ":$PATH:" != *":/home/errol/Applications:"* ]] && PATH="/home/errol/Applications:${PATH}"
# The next line updates PATH for the Google Cloud SDK.
# if [ -f '/home/errol/google-cloud-sdk/path.zsh.inc' ]; then . '/home/errol/google-cloud-sdk/path.zsh.inc'; fi
# The next line enables shell command completion for gcloud.
# if [ -f '/home/errol/google-cloud-sdk/completion.zsh.inc' ]; then . '/home/errol/google-cloud-sdk/completion.zsh.inc'; fi
export PATH="${KREW_ROOT:-$HOME/.krew}/bin:$PATH"
# Alacritty Crap
alacritty_config_setup linux
fi
## IF MacOS
if [[ $(uname) == "Darwin" ]]; then
# PATH
if [ -d /opt/homebrew/bin ]; do
export PATH="/opt/homebrew/bin:$PATH"
fi
# Alacritty Crap
alacritty_config_setup osx
fi
shell_start_disp
# Alias
if [[ -r ~/.aliasrc ]]; then
. ~/.aliasrc
fi
# History
HISTSIZE=5000000
HISTFILE=~/.bash_history
SAVEHIST=$HISTSIZE
# Fuzzy search
function fh() {
eval $( ([ -n "$ZSH_NAME" ] && fc -l 1 || history) | fzf +s --tac | sed 's/ *[0-9]* *//')
}
# history with search
function h() {
# check if we passed any parameters
if [ -z "$*" ]; then
# if no parameters were passed print entire history
history 1
else
# if words were passed use it as a search
history 1 | egrep --color=auto "$@"
fi
}
# Check Starship is installed
if which starship &>/dev/null; then
eval "$(starship init zsh)"
else
curl -sS https://starship.rs/install.sh | sh
fi
export PATH="$PATH:/Users/errol/.local/bin"
# COMPLETION
for bcfile in ~/.bash_completion.d/*; do
[ -f "$bcfile" ] && . $bcfile
if [ -d "$bcfile" ]; do
for subbc in $bcfile/*; do
[[ -f "$subbc" ]] && . $subbc
done
fi
done

1
bash/.inputrc Normal file
View File

@ -0,0 +1 @@
set show-all-if-ambiguous on

View File

@ -62,7 +62,12 @@ gc() {
# Alias Loop
current_shell=`cat /proc/$$/comm`
if [[ -d '/proc' ]]; then
current_shell=`cat /proc/$$/comm`
else
current_shell=`echo $SHELL | cut -d/ -f3`
fi
#Bash
if [[ "$current_shell" == 'bash' ]]; then
for alias_def in "${!alias_list[@]}"; do

View File

@ -0,0 +1,31 @@
{
"Comment.nvim": { "branch": "master", "commit": "e30b7f2008e52442154b66f7c519bfd2f1e32acb" },
"LuaSnip": { "branch": "master", "commit": "ce0a05ab4e2839e1c48d072c5236cce846a387bc" },
"cmp-buffer": { "branch": "main", "commit": "3022dbc9166796b644a841a02de8dd1cc1d311fa" },
"cmp-nvim-lsp": { "branch": "main", "commit": "39e2eda76828d88b773cc27a3f61d2ad782c922d" },
"cmp-nvim-lua": { "branch": "main", "commit": "f12408bdb54c39c23e67cab726264c10db33ada8" },
"cmp-path": { "branch": "main", "commit": "91ff86cd9c29299a64f968ebb45846c485725f23" },
"cmp_luasnip": { "branch": "master", "commit": "05a9ab28b53f71d1aece421ef32fee2cb857a843" },
"formatter.nvim": { "branch": "master", "commit": "ad246d34ce7a32f752071ed81b09b94e6b127fad" },
"friendly-snippets": { "branch": "main", "commit": "700c4a25caacbb4648c9a27972c2fe203948e0c2" },
"github-nvim-theme": { "branch": "main", "commit": "f02d52425b2cf5412ce73cdf403b28d2800f4ac2" },
"gitsigns.nvim": { "branch": "main", "commit": "47c8e3e571376b24de62408fd0c9d12f0a9fc0a3" },
"lazy.nvim": { "branch": "main", "commit": "fafe1f7c640aed75e70a10e6649612cd96f39149" },
"lualine.nvim": { "branch": "master", "commit": "0a5a66803c7407767b799067986b4dc3036e1983" },
"mason-lspconfig.nvim": { "branch": "main", "commit": "37a336b653f8594df75c827ed589f1c91d91ff6c" },
"mason.nvim": { "branch": "main", "commit": "0950b15060067f752fde13a779a994f59516ce3d" },
"neoterm.nvim": { "branch": "master", "commit": "b0a69018c6f7db5769c2a88bf44978ce19c5e764" },
"nvim-autopairs": { "branch": "master", "commit": "78a4507bb9ffc9b00f11ae0ac48243d00cb9194d" },
"nvim-cmp": { "branch": "main", "commit": "a110e12d0b58eefcf5b771f533fc2cf3050680ac" },
"nvim-lint": { "branch": "master", "commit": "941fa1220a61797a51f3af9ec6b7d74c8c7367ce" },
"nvim-lspconfig": { "branch": "master", "commit": "37f362ef42d1a604d332e8d3d7d47593852b4313" },
"nvim-tree.lua": { "branch": "master", "commit": "f9ff00bc06d7cb70548a3847d7a2a05e928bc988" },
"nvim-treesitter": { "branch": "master", "commit": "8dd40c7609c04d7bad7eb21d71f78c3fa4dc1c2c" },
"nvim-treesitter-textobjects": { "branch": "master", "commit": "34867c69838078df7d6919b130c0541c0b400c47" },
"nvim-web-devicons": { "branch": "master", "commit": "c0cfc1738361b5da1cd0a962dd6f774cc444f856" },
"plenary.nvim": { "branch": "master", "commit": "a3e3bc82a3f95c5ed0d7201546d5d2c19b20d683" },
"rose-pine": { "branch": "main", "commit": "9e4320d0bab7f261921d6af7d1c1bbc1a1e3dc67" },
"telescope-fzf-native.nvim": { "branch": "main", "commit": "9ef21b2e6bb6ebeaf349a0781745549bbb870d27" },
"telescope.nvim": { "branch": "master", "commit": "f12b15e1b3a33524eb06a1ae7bc852fb1fd92197" },
"tokyonight.nvim": { "branch": "main", "commit": "81c867c5f638597a82c82094dcb90ed42444dabc" }
}

View File

@ -0,0 +1,9 @@
{
"extras": [
],
"news": {
"NEWS.md": "6077"
},
"version": 6
}

1
vscode/.vscode/extensions/.obsolete vendored Normal file
View File

@ -0,0 +1 @@
{"ms-python.python-2024.10.0-darwin-arm64":true}

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,14 @@
[flake8]
ignore = W,BLK,
E24,E121,E123,E125,E126,E221,E226,E266,E704,
E265,E722,E501,E731,E306,E401,E302,E222,E303,
E402,E305,E261,E262,E203
exclude =
__pycache__,
.eggs,
.git,
.tox,
.nox,
build,
dist,
src/test/python_tests/test_data

View File

@ -0,0 +1,47 @@
<?xml version="1.0" encoding="utf-8"?>
<PackageManifest Version="2.0.0" xmlns="http://schemas.microsoft.com/developer/vsx-schema/2011" xmlns:d="http://schemas.microsoft.com/developer/vsx-schema-design/2011">
<Metadata>
<Identity Language="en-US" Id="debugpy" Version="2024.10.0" Publisher="ms-python" TargetPlatform="darwin-arm64"/>
<DisplayName>Python Debugger</DisplayName>
<Description xml:space="preserve">Python Debugger extension using debugpy.</Description>
<Tags>python,debugger,debugpy,debuggers</Tags>
<Categories>Debuggers</Categories>
<GalleryFlags>Public</GalleryFlags>
<Properties>
<Property Id="Microsoft.VisualStudio.Code.Engine" Value="^1.92.0" />
<Property Id="Microsoft.VisualStudio.Code.ExtensionDependencies" Value="ms-python.python" />
<Property Id="Microsoft.VisualStudio.Code.ExtensionPack" Value="" />
<Property Id="Microsoft.VisualStudio.Code.ExtensionKind" Value="workspace" />
<Property Id="Microsoft.VisualStudio.Code.LocalizedLanguages" Value="" />
<Property Id="Microsoft.VisualStudio.Code.EnabledApiProposals" Value="portsAttributes,contribIssueReporter,debugVisualization,contribViewsWelcome" />
<Property Id="Microsoft.VisualStudio.Code.ExecutesCode" Value="true" />
<Property Id="Microsoft.VisualStudio.Services.Links.Source" Value="https://github.com/microsoft/vscode-python-debugger.git" />
<Property Id="Microsoft.VisualStudio.Services.Links.Getstarted" Value="https://github.com/microsoft/vscode-python-debugger.git" />
<Property Id="Microsoft.VisualStudio.Services.Links.GitHub" Value="https://github.com/microsoft/vscode-python-debugger.git" />
<Property Id="Microsoft.VisualStudio.Services.Links.Support" Value="https://github.com/Microsoft/vscode-python-debugger/issues" />
<Property Id="Microsoft.VisualStudio.Services.Links.Learn" Value="https://github.com/Microsoft/vscode-python-debugger" />
<Property Id="Microsoft.VisualStudio.Services.GitHubFlavoredMarkdown" Value="true" />
<Property Id="Microsoft.VisualStudio.Services.Content.Pricing" Value="Free"/>
</Properties>
<License>extension/LICENSE.txt</License>
<Icon>extension/icon.png</Icon>
</Metadata>
<Installation>
<InstallationTarget Id="Microsoft.VisualStudio.Code"/>
</Installation>
<Dependencies/>
<Assets>
<Asset Type="Microsoft.VisualStudio.Code.Manifest" Path="extension/package.json" Addressable="true" />
<Asset Type="Microsoft.VisualStudio.Services.Content.Details" Path="extension/README.md" Addressable="true" />
<Asset Type="Microsoft.VisualStudio.Services.Content.License" Path="extension/LICENSE.txt" Addressable="true" />
<Asset Type="Microsoft.VisualStudio.Services.Icons.Default" Path="extension/icon.png" Addressable="true" />
</Assets>
</PackageManifest>

View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) Microsoft Corporation.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE

View File

@ -0,0 +1,55 @@
# Python Debugger extension for Visual Studio Code
A [Visual Studio Code](https://code.visualstudio.com/) [extension](https://marketplace.visualstudio.com/VSCode) that supports Python debugging with debugpy. Python Debugger provides a seamless debugging experience by allowing you to set breakpoints, step through code, inspect variables, and perform other essential debugging tasks. The debugpy extension offers debugging support for various types of Python applications including scripts, web applications, remote processes, and multi-threaded processes.
Note:
- The Python extension offers the python debugger extension as an optional installation, including it during the setup process.
- This extension is supported for all [actively supported versions](https://devguide.python.org/#status-of-python-branches) of the Python language (i.e., Python >= 3.7).
## Purpose
The main intent of this extension is to offer:
1. **Independence and Compatibility:** The Python Debugger extension aims to separate the debugging functionality from the main Python extension to prevent compatibility issues. This ensures that even as the Python extension drops support for older Python versions (e.g., Python 3.7), you can continue debugging projects with those versions without downgrading your Python extension. This allows you to access new features and bug fixes while keeping your debugging capabilities intact.
2. **Platform-Specific Builds:** Unlike the main Python extension, which bundles all debugpy builds for various platforms into a single extension package, the Python Debugger extension provides a more streamlined approach: it delivers platform-specific builds, ensuring you only receive the components relevant to your specific operating system. This reduces download times and unnecessary overhead.
3. **Feature Parity and Ongoing Updates:** This extension replicates all the functionality available in the main Python extension, and more. Going forward, any new debugger features will be added to this extension. In the future, the Python extension will no longer offer debugging support on its own, and we will transition all debugging support to this extension for all debugging functionality.
## Usage
Once installed in Visual Studio Code, python-debugger will be automatically activated when you open a Python file.
## Disabling the Python Debugger extension
If you want to disable the Python Debugger extension, you can [disable this extension](https://code.visualstudio.com/docs/editor/extension-marketplace#_disable-an-extension) per workspace in Visual Studio Code.
## Commands
| Command | Description |
| ---------------------- | --------------------------------- |
| Python Debugger: viewOutput | Show the Python Debugger Extension output. |
| Python Debugger: clearCacheAndReload | Allows you to clear the global values set in the extension. |
| Python Debugger: debugInTerminal | Allows you to debug a simple Python file in the terminal. |
## Limited support for deprecated Python versions
Older versions of the Python Debugger extension are available for debugging Python projects that use outdated Python versions like Python 2.7 and Python 3.6. However, its important to note that our team is no longer maintaining these extension versions. We strongly advise you to update your project to a supported Python version if possible.
You can reference the table below to find the most recent Python Debugger extension version that offers debugging support for projects using deprecated Python versions, as well as the debugpy version that is shipped in each extension version.
> **Note**: If you do not see older extension versions to install (<=`2024.0.0`), try opting-in to pre-releases. You can do so on the extension page by clicking `Switch to Pre-Release Version`.
| Python version | Latest supported Python Debugger extension version | debugpy version |
| -------------- | -------------------------------------------------- | ---------------- |
| 2.7, >= 3.5 | 2023.1.XXX | 1.5.1 |
| >= 3.7 | 2024.0.XXX | 1.7.0 |
| >= 3.8 | 2024.2.XXX | 1.8.1 |
> **Note**: Once you install an older version of the Python Debugger extension in VS Code, you may want to disable auto update by changing the value of the `"extensions.autoUpdate"` setting in your `settings.json` file.
## Data and telemetry
The Debugpy Extension for Visual Studio Code collects usage data and sends it to Microsoft to help improve our products and services. Read our [privacy statement](https://privacy.microsoft.com/privacystatement) to learn more. This extension respects the `telemetry.enableTelemetry` setting which you can learn more about at https://code.visualstudio.com/docs/supporting/faq#_how-to-disable-telemetry-reporting.

View File

@ -0,0 +1,41 @@
<!-- BEGIN MICROSOFT SECURITY.MD V0.0.8 BLOCK -->
## Security
Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/).
If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://aka.ms/opensource/security/definition), please report it to us as described below.
## Reporting Security Issues
**Please do not report security vulnerabilities through public GitHub issues.**
Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://aka.ms/opensource/security/create-report).
If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://aka.ms/opensource/security/pgpkey).
You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://aka.ms/opensource/security/msrc).
Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue:
* Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.)
* Full paths of source file(s) related to the manifestation of the issue
* The location of the affected source code (tag/branch/commit or direct URL)
* Any special configuration required to reproduce the issue
* Step-by-step instructions to reproduce the issue
* Proof-of-concept or exploit code (if possible)
* Impact of the issue, including how an attacker might exploit the issue
This information will help us triage your report more quickly.
If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://aka.ms/opensource/security/bounty) page for more details about our active programs.
## Preferred Languages
We prefer all communications to be in English.
## Policy
Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://aka.ms/opensource/security/cvd).
<!-- END MICROSOFT SECURITY.MD BLOCK -->

View File

@ -0,0 +1,11 @@
# Support
## How to file issues and get help
This project uses GitHub Issues to track bugs and feature requests. Please search the [existing issues](https://github.com/microsoft/vscode-python-debugger/issues) before filing new issues to avoid duplicates. For new issues, file your bug or feature request as a new Issue.
For help and questions about using this project, please see the [python+visual-studio-code labels on Stack Overflow](https://stackoverflow.com/questions/tagged/visual-studio-code+python) or the `#vscode` channel on the [microsoft-python server on Discord](https://aka.ms/python-discord-invite).
## Microsoft Support Policy
Support for this project is limited to the resources listed above.

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,24 @@
debugpy
Copyright (c) Microsoft Corporation
All rights reserved.
MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -0,0 +1,27 @@
Metadata-Version: 2.1
Name: debugpy
Version: 1.8.2
Summary: An implementation of the Debug Adapter Protocol for Python
Home-page: https://aka.ms/debugpy
Author: Microsoft Corporation
Author-email: ptvshelp@microsoft.com
License: MIT
Project-URL: Source, https://github.com/microsoft/debugpy
Classifier: Development Status :: 5 - Production/Stable
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Topic :: Software Development :: Debuggers
Classifier: Operating System :: Microsoft :: Windows
Classifier: Operating System :: MacOS
Classifier: Operating System :: POSIX
Classifier: License :: OSI Approved :: MIT License
Requires-Python: >=3.8
Description-Content-Type: text/markdown
License-File: LICENSE
debugpy is an implementation of the Debug Adapter Protocol for Python.
The source code and the issue tracker is [hosted on GitHub](https://github.com/microsoft/debugpy/).

View File

@ -0,0 +1,289 @@
debugpy/ThirdPartyNotices.txt,sha256=WzmT853BlHOtkEiV_xtk96iazYeVDKWB2tqKMtcm_jo,34345
debugpy/__init__.py,sha256=B621TRbcw1Pr4LrqSZB8Qr0CW9QfpbpcEz2eaN16jns,1018
debugpy/__main__.py,sha256=feuxCZgFCWXu9rVRgySgWvbrS_HECrm_BvaSG8VPdDc,1829
debugpy/_version.py,sha256=3TmVQn_HQ8v4bkt5waiFdSwQ_9u_IRXYq20nuSd2giM,497
debugpy/public_api.py,sha256=17qhti1Y4dOR8YSM6y-kKyztjk7TSLLUvlvkxlf0KHs,6320
debugpy/_vendored/__init__.py,sha256=cQGcZObOjPcKFDQk06oWNgqBHh2rXDCv0JTNISv3_rg,3878
debugpy/_vendored/_pydevd_packaging.py,sha256=cYo9maxM8jNPj-vdvCtweaYAPX210Pg8-NHS8ZU02Mg,1245
debugpy/_vendored/_util.py,sha256=E5k-21l2RXQHxl0C5YF5ZmZr-Fzgd9eNl1c6UYYzjfg,1840
debugpy/_vendored/force_pydevd.py,sha256=spQVMTUrxCRoNiSKKIkK96AhEi0JX6vHxcBIJNcNQE0,3172
debugpy/_vendored/pydevd/pydev_app_engine_debug_startup.py,sha256=-gA1UJ8pRY3VE8bRc7JhWRmxlRanQ8QG3724O5ioeKA,691
debugpy/_vendored/pydevd/pydev_coverage.py,sha256=qmd5XNE8Hwtem9m5eDtwbVIxi6U9XvtXIcur2xDP2Uk,3200
debugpy/_vendored/pydevd/pydev_pysrc.py,sha256=LKtwQyDYYry3lhL7YalgmehgWD82-NDpqQYYi1bTYj8,100
debugpy/_vendored/pydevd/pydev_run_in_console.py,sha256=bnrvimUb2pj9Gtcu09qNmSQFGY-lAcu1Yz9M6IJsqGM,4709
debugpy/_vendored/pydevd/pydevconsole.py,sha256=Y4a1Kq3Y8qug4Y6PjFnM46RbMcpsy5OxdTIBTBHaY6M,21094
debugpy/_vendored/pydevd/pydevd.py,sha256=4g912FKxopwibrlL7Z50Hlp-qWXHwz5RNyyA7YHphy8,147607
debugpy/_vendored/pydevd/pydevd_file_utils.py,sha256=_Mk1ugrnQ99YMWnV0Tlu_JW9H52oUWXDOBOPM249mPU,36326
debugpy/_vendored/pydevd/pydevd_tracing.py,sha256=u2xjF1vg4XhuqkZR5elKyiy-MqZ1-DZYKwr1YfM01Y8,15142
debugpy/_vendored/pydevd/setup_pydevd_cython.py,sha256=KcRQ-SPbua2b0AZEiFExT0PiQiVce7FSLsdWoqBBvAI,10410
debugpy/_vendored/pydevd/_pydev_bundle/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
debugpy/_vendored/pydevd/_pydev_bundle/_pydev_calltip_util.py,sha256=shNNj8qVCRge6yKmRDbCthzsG71MYtjc4KsaIr1glK0,4687
debugpy/_vendored/pydevd/_pydev_bundle/_pydev_completer.py,sha256=Vk1jAIrXKT5-i49QqC0UL6wvCCSag8J-lu_NoIV-hAI,8544
debugpy/_vendored/pydevd/_pydev_bundle/_pydev_execfile.py,sha256=Y1tsEGh18bIyF4gfIM84RDk8LUOpC4uYcB0uekv74O8,483
debugpy/_vendored/pydevd/_pydev_bundle/_pydev_filesystem_encoding.py,sha256=VyYZXhesz-t_frcGmzm6b5K2ccv7yYRTXWcdbqxwpzM,1095
debugpy/_vendored/pydevd/_pydev_bundle/_pydev_getopt.py,sha256=YYphxNwTDyaD73sWznWqc41Clw5vHF85-rIALwHqXB0,4458
debugpy/_vendored/pydevd/_pydev_bundle/_pydev_imports_tipper.py,sha256=cuQ0YXVeBryJBAf3dU4CDcXVyIruYSAX0-vdMgneUpE,12350
debugpy/_vendored/pydevd/_pydev_bundle/_pydev_jy_imports_tipper.py,sha256=2FIRkXu71bTvaAHQDTRsVVONFuyd5otTX1kM_RC7XJY,17063
debugpy/_vendored/pydevd/_pydev_bundle/_pydev_log.py,sha256=RrRri4IPsTWYPNA-EvTWOSXKoflIFAHxcERQppngJVM,555
debugpy/_vendored/pydevd/_pydev_bundle/_pydev_saved_modules.py,sha256=D93V9B1C3qA9PoL0dV5bosenu5ncVQR2b-RAoxpvWTc,4573
debugpy/_vendored/pydevd/_pydev_bundle/_pydev_sys_patch.py,sha256=7w1l1AAwvglYAAqydScAvUzOulnv59RLVM7ODsio6Cw,2076
debugpy/_vendored/pydevd/_pydev_bundle/_pydev_tipper_common.py,sha256=hEt5I9k_kxxaJWwinODOEzA_je5uJovcN4iUEYrwXg0,1227
debugpy/_vendored/pydevd/_pydev_bundle/pydev_console_utils.py,sha256=3jXJZ-tObnlWS5JnHhrI5eCBbtQnt-DcqYd1zsHgNZM,23769
debugpy/_vendored/pydevd/_pydev_bundle/pydev_import_hook.py,sha256=85M_hwEqlMuazvG5Nk8WFn3SpjDIQ0aO5ahAA7lPBsk,1322
debugpy/_vendored/pydevd/_pydev_bundle/pydev_imports.py,sha256=N6EMJg371s1SiA1wAFc_zBV9ZYbGBs0oFvTTvnW8q5Y,404
debugpy/_vendored/pydevd/_pydev_bundle/pydev_ipython_console.py,sha256=P3tZYpjILr_jyITP39wXLwhQmmwfzVjcBEPOcG73BTc,3821
debugpy/_vendored/pydevd/_pydev_bundle/pydev_ipython_console_011.py,sha256=P7WvuETdIVxIYZPdmd7c7vJ8DIZ0FCGVppGz9PIBcIE,21354
debugpy/_vendored/pydevd/_pydev_bundle/pydev_is_thread_alive.py,sha256=bOvk_nVIxgZH6EuUb4pkWh5L9JVZAw_IlQJVH75tL9o,696
debugpy/_vendored/pydevd/_pydev_bundle/pydev_localhost.py,sha256=7NTqaf3nAzfkbj1niqZ4xhgPzx9W9SDhgSkwqCazQIo,2070
debugpy/_vendored/pydevd/_pydev_bundle/pydev_log.py,sha256=irP3s1xg55sF6DR3SAyD-wzF2LmlLEeMhAmTq9Ge-hU,9174
debugpy/_vendored/pydevd/_pydev_bundle/pydev_monkey.py,sha256=lt2R1Kt5HoNC88488rEZTK4TDIkfgxMmYSzNkfywieg,42252
debugpy/_vendored/pydevd/_pydev_bundle/pydev_monkey_qt.py,sha256=7FLTYfeF9YCmzxxki7zcbGt3ieWUVwYG4SkCNGjsEvk,7306
debugpy/_vendored/pydevd/_pydev_bundle/pydev_override.py,sha256=lL3tGSnQsziztbyePyrk9eu-xmgAWU2YQwxtv2t3to4,872
debugpy/_vendored/pydevd/_pydev_bundle/pydev_umd.py,sha256=0kfbdk22O6_wAN9b2dGRNyhJVsv2P2VZs9dJHh6Fxl8,6279
debugpy/_vendored/pydevd/_pydev_bundle/pydev_versioncheck.py,sha256=VpGp1SZeDMPimt--5dq7LDfB6mKyPGRcAsQUCwuFHuw,510
debugpy/_vendored/pydevd/_pydev_bundle/fsnotify/__init__.py,sha256=X_j7-4Z4v6o5aSI9UW92eUWITNeM-qkvXrQbEOUJn7A,12704
debugpy/_vendored/pydevd/_pydev_runfiles/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles.py,sha256=oKip8qMSr9rwIFgxuRY9mKRxZbLSkpQM6hk0tUDfNlA,31550
debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_coverage.py,sha256=sWjARGt1-4SZXYDNVML20bWd3IBG5stPgs7Q3Cm4ub8,3499
debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_nose.py,sha256=wMCy67DodvPIwyADbrSkV3FGp1ZXyixJ4lWc0o_ugJU,7549
debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_parallel.py,sha256=Qgn7uUlngUlvw21xCr9D_nj8oVNY_9MVPGzMYhsXKCs,9472
debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_parallel_client.py,sha256=wwl8UBpGRSqTvw8VCdl2S7G0O-SyXtOmVFIPKz4oh9E,7722
debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_pytest2.py,sha256=Fny0ZPakVcx5ed0tqrtrR9WNor542br0lavONh2XnTs,9845
debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_unittest.py,sha256=U1jbD9cvqx2gpeKV-XuYc0AoETohg31Jv1oDTudlu4M,6685
debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_xml_rpc.py,sha256=BznpLOP0enjZ4aZus2nQvQbcoj0ThH0JJu-qBbZeXKA,10594
debugpy/_vendored/pydevd/_pydevd_bundle/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
debugpy/_vendored/pydevd/_pydevd_bundle/pydevconsole_code.py,sha256=m2z3GfCLJbbS0_V6_OSlOns21LruvVg1Zkdm2QYtS6Y,19014
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_additional_thread_info.py,sha256=FP57omz4K2irhuf5zlRStGoFLglsw9mcwSS-isJ1S8I,1166
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_additional_thread_info_regular.py,sha256=VnRPfq6xsWsKBmzjMvCoS98lgt7LPUTJ-hW1KKssAgo,6239
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_api.py,sha256=kr0qDV2JRHFj_3Q6QaGdtUPAdtLCR-abcOAe_tvNx7U,50739
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_breakpoints.py,sha256=UAgyAAIiwqLxe-NswVieGiCpuDl2dvb_VUxg4jkZQGw,6010
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_bytecode_utils.py,sha256=Ub1XpTtxqx14P84WriKaEI40Qa6PW4x70Gsc8UJA4ng,26277
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_code_to_source.py,sha256=B30xbup05xjZrYBjA0xP9w5H72lLW7qO6U0jgJlMd7Q,17622
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_collect_bytecode_info.py,sha256=gMsWsDVb5f6GAQlWn-MhPKJWNKUPd8EZyBP-y2ru_gY,37237
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_comm.py,sha256=iuPfHw1MaF40fnwS2uDFoV_eTrTvsAL5P0OJoRTGqbw,76130
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_comm_constants.py,sha256=S2aiyPQnH-uUdP1hitV2P2dqM9G8t3Ix2CWd3xPF084,6084
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_command_line_handling.py,sha256=WCIU5RPg0jIgCaGeQTiCkF45ztMxaTJFcANpKXQB5ok,6126
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_console.py,sha256=hIxX9k9ZNR7k9mi4Rtl-2ArzpTap3-hMrgUJgtFobmg,10179
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_constants.py,sha256=lzjrv-3KHx9h2twacRxk214RXj7mi1XVk_dOsgm6Ajg,27218
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_custom_frames.py,sha256=g69QtSQ5MjMlxARXbY95U3Q5t9REt0WPRk4pbvELB5k,4399
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.c,sha256=IAt_8ZZMxkImzuZ1pyWKqvqm91dx7N-TL72KWsVSPYc,2095445
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.cpython-311-darwin.so,sha256=cyXaTxzZtqJCrdPAXvbYt-Ulv9BYYsozhwsAg8IFHJ4,1104592
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.pxd,sha256=OzA3y2aGOd7ego0_ParOpQ5mzxj0iy38cNEVy1yKDco,1242
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.pyx,sha256=wY2dk_xgaVmHeM4ZIuVl8lENsEqfuHcKC_c0wfLMTBI,92487
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython_wrapper.py,sha256=hVfzhgMunBeuVkGl9qVnE6uMFFSX4DdhxPl0K2-Tcd4,1600
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_daemon_thread.py,sha256=JYINrkA8Tz0K8VfMofsIy95ipQrBQrh03BKyFy3D1KA,7964
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_defaults.py,sha256=anhqPJQu1sNI6h3V62povNEupCoGTmYFal6ezTsoDdk,2316
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_dont_trace.py,sha256=vOYpH4i6tWq-DX8oCtW2UNu1BrI7Px-UysiXjLPxjUs,3567
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_dont_trace_files.py,sha256=hHFDIIDFQ9EHi0lz5NSTJ4qGncT2XB2NhOg-rwWSeDU,5814
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_exec2.py,sha256=8q6dcEHDfJE6bdwNyrRFXa_vYOlTrHz3UVBtYzhnJJo,159
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_extension_api.py,sha256=Ef8g1TihaUOVR-ed9C0a2u0L6dhUJOzb-GJld9sKf4s,3907
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_extension_utils.py,sha256=DZYt56rfRm_4wjSXG7cUSFqgJBUKvYOnpHPxIWWpQR4,2369
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_filtering.py,sha256=yKtcV0GyyO0bs6LR6BJElEstCP7yZ5O-aba50aIHdl8,12831
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_frame.py,sha256=Yia39BovL20CruHp2ZKVqSutpMGHpV0XwCzySwNF0xU,64386
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_frame_utils.py,sha256=MdpmpRVl7t9WHHs2-sX4UMHTUyFZE3mP6vk5sSpjsGw,13133
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_gevent_integration.py,sha256=cUOOzjdQQ2geKjnSkJdnR894DVLkrtpltBDLmQoqnUc,3877
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_import_class.py,sha256=XigIMYbVg6IsNY3shHca-R26vGG5wMTGygdyh38vaU8,1838
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_io.py,sha256=Kkk-SrYQK8-oNZRbuiDOebRlhx0r8VblBwQgy2ndASs,8117
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_json_debug_options.py,sha256=2xrwawOQFtJNQWLNwGrnpiBKFBpFdevLSUPpcBjFP3U,6199
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_net_command.py,sha256=wnKwGD2DcxSrTf_l5XsJPuUwgnnjzzdRoJPuHPyPFqg,4588
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_net_command_factory_json.py,sha256=k3tUA_ysyKF_KNsGAq20uyAYXTZW9WK_c025VZfSt_A,22408
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_net_command_factory_xml.py,sha256=Y05qSjj0TF6nzOvIepzSXX-fQUCQn4Xht9edYnbGlDs,23269
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_plugin_utils.py,sha256=5azIt1MGftKwB7Y5YTtvGPC9agQt7xM23zjvKqGjeig,2484
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_process_net_command.py,sha256=YNdXxz9ArTbxS4RQ5n5X2A2yYu5NXxcGO-2GrSQUhLk,35186
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_process_net_command_json.py,sha256=_mTzfSWyboxqCv_IjqfyzPf0embVuTn_NlUC1rzavQ0,57057
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_referrers.py,sha256=WVZbrenmO4WKVTMdBFEOfO7JjICIDURT-5EeCy-lu10,9756
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_reload.py,sha256=J4JylvGaK34_DhMNBpqD0YMWgUtWn9LkgShpxzQJ_iI,15773
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_resolver.py,sha256=yGVZrTFZM12z9BbmafUs3suQGD4HtX5SXXIpLRlWedk,29616
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_runpy.py,sha256=EMN-0GQ242o41a9PG_SakVQfox7K-FK2jSV4yPbcdMs,13521
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_safe_repr.py,sha256=S8VBwT0KYhrcATdFz7AZck1mvOZbpm_yGLb8YRa98B8,14556
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_save_locals.py,sha256=WyKCgiqNQEWBm8YDZy2e6E_DkM_9sEQz3IJKKtbqD_M,3465
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_signature.py,sha256=cwiP2JIzmHrnyzDOL4dZcKRPWMmcCqFZ-Knwl4x8Ieg,6883
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_source_mapping.py,sha256=K8hROuOdVJhXTZGFDi5RP40uHDWK4ZWbG0ix0gNX_oc,6428
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_stackless.py,sha256=P9H6T_jIeukGbhY6TJ-BhPRty3neqP0CiAf65XC-5pg,16909
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_suspended_frames.py,sha256=gA223r51LB8KslU61gOjBLkEuahAwX8XHkGe5HAc9Vk,20864
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_thread_lifecycle.py,sha256=I-U_-urNLaxavillhZFfWSEFdX7QMdgq0dMUtiOL29E,3408
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_timeout.py,sha256=V1-pX6yhik_aa1bk9ClkMsN5RaUXFGWRlHawtmTCSrA,8366
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_trace_api.py,sha256=nlf56i9hKz6DikH5Txc_fwZxqN1gSV60Qps0ODWzc4g,1397
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_trace_dispatch.py,sha256=bIDRhihoBHfcVU9jVlK2TuS-sEIoHEYejTkdHsSXagM,3265
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_trace_dispatch_regular.py,sha256=3Q_iAqFyoOd6qMcKg5Dvw8aFqqtRYQ7QJdffGGZmRAc,22202
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_traceproperty.py,sha256=oY3O9UJwf8EIUngMLsJnOQAiyeuOt9Y2SEgm6MsMJ6o,3279
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_utils.py,sha256=vjEUSAke7-K-bV0lq3jFDYZo2jzNw3-gUSxPiqEMdJY,17844
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_vars.py,sha256=0pksSWxEVQ38Q3r0o7CiYGX0ajFTGM1KMwNJoXVNU7U,31106
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_vm_type.py,sha256=osPhgiDRcU66KFUQffURSEt1GxLsS5B-DuV1ThHR_sY,1578
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_xml.py,sha256=XZsERMeJDDOQjN4NZKT7-YNKXJyB5SzKnzH2gYnAREw,15485
debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__main__pydevd_gen_debug_adapter_protocol.py,sha256=7rcDvKv4OweHbJJVUIrVNaq7nRzc2NnbRCWY_wqGeXg,23085
debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/debugProtocol.json,sha256=F6Myi84FtlbjechZbCpVsWRwB-Q_z5ixJn7vCJrYy3s,157930
debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/debugProtocolCustom.json,sha256=NmKM_bN5s6ozUPXIFc_Q3w6unk6Zf1hb1pgyjvxv4QM,10616
debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_base_schema.py,sha256=Ke2Ff_8SRKGSDamE0eZxx8vAYNx4Ka9oJqRmNX5DfQA,3998
debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_schema.py,sha256=ViAsceebMsN4ZE8F3nwvcZN_0ABr8gkKdXEl_T5BFsM,763496
debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_schema_log.py,sha256=aMICrBzLWCaynZy2TysEvz3_sdHXfrQlFBHJNKl7j7k,1255
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/pydevd_concurrency_logger.py,sha256=nhYHTjhodOdBQ5ds2_kyyAF7kpqSvOjflPLacyx0SOw,16764
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/pydevd_thread_wrappers.py,sha256=Ky_yDpWcMfVTOAA0uwyEgAvPQZS7apJCrMM4OAo6BO4,2039
debugpy/_vendored/pydevd/_pydevd_frame_eval/.gitignore,sha256=EKhbR-PpYtbvIYhIYeDDdUKDcU_kZf4kBzDgARh1Fgg,87
debugpy/_vendored/pydevd/_pydevd_frame_eval/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_eval_cython_wrapper.py,sha256=fsjgGg07ahVCdIMe38savTyAEgQtEzb7vVIldjgLhfE,1343
debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_eval_main.py,sha256=XzgU2jey-apeQd9NCV40YXOkiwsGJdYdrUSv_mdNu8U,2105
debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.c,sha256=4fh_u7VP3gA6Xsb_m46U5GoGBVUx3CXHnboAXGcboss,935795
debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.pxd,sha256=0pKrUK3YwQtlJk7vvhZzqnDCJvB6JOb4V23Zl8UrqYI,5324
debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.template.pyx,sha256=YOHqtbizcUtC_jMeBlNQ6FLQZn-CZ3fcoqChyHF4h4g,24550
debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_tracing.py,sha256=HkS8B9I0YbUVsHxRDfc6kYstUTlKc41HqZyD-N5ifQA,4219
debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_modify_bytecode.py,sha256=cTwAD7Y2HBwZGZWe-M6AnO-Fs5spFuHXIwue1R8RRIw,13545
debugpy/_vendored/pydevd/_pydevd_frame_eval/release_mem.h,sha256=MbMCNJQXkcJZ8UU7xoH44MwqE3QRWZX5WCAz7zCju6Y,79
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/README.txt,sha256=jgSPsMO3Gc8ncNUe5RwdxdVB-YHyAioWMPXHcD6KbQE,700
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/pydevd_fix_code.py,sha256=0IMNqViUb9NM5gPw9Nv9KUay99XkLoW4TnklupYkdDs,1801
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__init__.py,sha256=9hCplBAGV2ZNbI6TkkkC-Zefk_SxbesAVwe2iXtdSPQ,4152
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/bytecode.py,sha256=UACCPg0CuM9RIzNMJLqvLlCzeMBNI1UZ-WAspzO7rQM,6983
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/cfg.py,sha256=_ngtd6LTdF74Q7eM0kv-jY70Y-1m2dYOVTdL3LABi6U,15391
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/concrete.py,sha256=0CoEZ5sxVK6BLytJ1KI2qZgSb9-UAMkXwa1GiPaC3ag,22299
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/flags.py,sha256=ipQqlQuvHn_zwjWkaPlf_-LPTjOTcayut3T-sAijYQU,6020
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/instr.py,sha256=z5zgc6dJLWlaOxpmiErmR3qoWK-pbsqFQ5DnZhYxp9w,11721
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/peephole_opt.py,sha256=O7q19q0sDiwN4zVkFGdmceThKK4bQYP_13DFIbH8o8M,15740
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__init__.py,sha256=qlpJ7nivOajEtN7mIjg_wpiQPBgjnkNypVy1KdbmlEw,4996
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_bytecode.py,sha256=oxR5LHyY7Mp8AnNd5gB9US1E2HLP1_ikKDZ9O1ybB9g,15909
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_cfg.py,sha256=rDlSsB9STCggvl79pv1q6uKUxElzat99z8_KcObcbb8,28547
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_code.py,sha256=5p98FDCpHG2GxUpoMnaw5S6SBakyeMqa5eX29nrOmuo,2425
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_concrete.py,sha256=HKFAg96iZKEjDMpaPwa2LrwERctdWUCTCcnvo-sKnEc,49634
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_flags.py,sha256=vBeWGBHWMvJ4yN7RdJKImSZurjuyeGQz7pHpeBBAKDI,6009
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_instr.py,sha256=hQ6EyqSddjJeUv4vhZFrEiy1xxMiqpyDuCK0xfKbgf8,11676
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_misc.py,sha256=z0K5O16SkAf81IljdDlKumS-x76HSrf5tnNGtsTLuIU,7149
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_peephole_opt.py,sha256=r6-xIPkRHhQlnTWXkt0sU0p0r1A80EgDKoFJTxE2J2A,32993
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/util_annotation.py,sha256=wKq6yPWrzkNlholl5Y10b3VjuCkoiYVgvcIjk_8jzf8,485
debugpy/_vendored/pydevd/pydev_ipython/README,sha256=rvIWDUoNsPxITSg6EUu3L9DihmZUCwx68vQsqo_FSQg,538
debugpy/_vendored/pydevd/pydev_ipython/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
debugpy/_vendored/pydevd/pydev_ipython/inputhook.py,sha256=GjhfMC0wvhSGsDKTOTEOt0Gl6FleaS81yUn4AxGIlZY,19554
debugpy/_vendored/pydevd/pydev_ipython/inputhookglut.py,sha256=J5QUoxuqZLwvb8nBjIkrmBr-pTULVLp86UsdmImhYY8,5675
debugpy/_vendored/pydevd/pydev_ipython/inputhookgtk.py,sha256=LDLg0tDXbmv-YoODReQKtOu5Aa3ECBr9HUaNJV1UYk0,1107
debugpy/_vendored/pydevd/pydev_ipython/inputhookgtk3.py,sha256=Sh382xr25ztKAQfChBNm4D-NC5UoY-Ho0LTIj3i4Wi8,1104
debugpy/_vendored/pydevd/pydev_ipython/inputhookpyglet.py,sha256=8lVbOG3Lucf_bX97HLNHXbq-wekKs1BUh79rMmh_bYg,3255
debugpy/_vendored/pydevd/pydev_ipython/inputhookqt4.py,sha256=DKOobEvH6bIkkvOF581Vj-rQXWSUU6f7cEc9fzXr3_g,7242
debugpy/_vendored/pydevd/pydev_ipython/inputhookqt5.py,sha256=82p6mLMradWAyzHh9faHb_eQJRigIgaW74WRbNLaoFc,7289
debugpy/_vendored/pydevd/pydev_ipython/inputhooktk.py,sha256=bW7hLVv2JOuP00TSeqIw9O3KKNVHtBBpE5bHASW-bSo,748
debugpy/_vendored/pydevd/pydev_ipython/inputhookwx.py,sha256=h804ZBfWPLy5ITbQSkzwELkGO4BqZtZB2b9izZXcpQk,6517
debugpy/_vendored/pydevd/pydev_ipython/matplotlibtools.py,sha256=bZSIYY09Cny96-NpxOdbmU0lueNEaCfIhJP87D-dbFc,5378
debugpy/_vendored/pydevd/pydev_ipython/qt.py,sha256=Ley-7H_Fn40za6lJKmekC-r0uOTeOgnH6FIGUBaGqP8,785
debugpy/_vendored/pydevd/pydev_ipython/qt_for_kernel.py,sha256=HF68WlXR08W6-4B3DjuF-5AbNEMLq-NXIVscoSUTEFc,3619
debugpy/_vendored/pydevd/pydev_ipython/qt_loaders.py,sha256=ZxyWPGfCtdYbGrtJ49BkAWu7CEW6w38VjSnoeEzvWjM,8413
debugpy/_vendored/pydevd/pydev_ipython/version.py,sha256=lLBSR8mtlF1eCzwwOejljchAyfSy0opD8b0w_QjR97Q,1227
debugpy/_vendored/pydevd/pydev_sitecustomize/__not_in_default_pythonpath.txt,sha256=hnkTAuxSFW_Tilgw0Bt1RVLrfGRE3hYjAmTPm1k-sc8,21
debugpy/_vendored/pydevd/pydev_sitecustomize/sitecustomize.py,sha256=kHFVCILQngtbNTgiaTu0icWDh7hRzqIpMlASeULI8Wo,9473
debugpy/_vendored/pydevd/pydevd_attach_to_process/README.txt,sha256=JibLodU4lzwvGyI8TNbfYhu626MPpYN3z4YAw82zTPU,960
debugpy/_vendored/pydevd/pydevd_attach_to_process/_always_live_program.py,sha256=I8Cbq2TR04ferhN1DMds9agFTqF8RST6Pw6diDFHr6o,679
debugpy/_vendored/pydevd/pydevd_attach_to_process/_check.py,sha256=9AE9SHJNK7bwNjIsaadqhfOM11tvgZm64KgDPFtLSDY,135
debugpy/_vendored/pydevd/pydevd_attach_to_process/_test_attach_to_process.py,sha256=kM9LFgzX_qQofDb_X0hYMSb8Qfoga-exKBlng3__KZw,297
debugpy/_vendored/pydevd/pydevd_attach_to_process/_test_attach_to_process_linux.py,sha256=ATL8WvNcGPi3wTHtTQL23UhKxnbtG9dwB3MTZEiC-2E,2523
debugpy/_vendored/pydevd/pydevd_attach_to_process/add_code_to_python_process.py,sha256=qcW2vIIxQG_-PP04N4HYOR0wwlmdZThhp7uL6GgeivY,22334
debugpy/_vendored/pydevd/pydevd_attach_to_process/attach_pydevd.py,sha256=0zce8nPiEccG6JOQV5Va7BkMTibuTJfsrUAMbCfNT1g,2479
debugpy/_vendored/pydevd/pydevd_attach_to_process/attach_script.py,sha256=iI9em6cK60PzfVkS0dcqUQgACvOGhnMLPYyJnpYRLyY,7873
debugpy/_vendored/pydevd/pydevd_attach_to_process/attach_x86_64.dylib,sha256=Zz3NWFM3_oEJ3p5PyUW5K-yXm-4VGZ-rG0ynlf8ynGM,55296
debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace.hpp,sha256=GCjlTIQW1wRoEpp1yCbeJiUaa9JDTbeOypJiZBRtxPE,8399
debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace_310.hpp,sha256=mNr6LVLPDoCRyxLPTdYb0JWDXSfRn7xuAzPOzZWvoFs,4062
debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace_311.hpp,sha256=_tpO9I0U0f2RqCYM8DIOPQJTLv8sL2NCxwKE2BnR0NE,4269
debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace_common.hpp,sha256=r1ch6UgwF4rxW8ehiNnAvJE18VCoUl2TujP7nTCy0vQ,1870
debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_settrace.hpp,sha256=IuObk2NpXdBYvJwoz8p9ZZpUtamRwXIudTORVFpA_84,7822
debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_utils.hpp,sha256=hxR-qpxpXQTupO-AgnasBq1j69ztvTdFUF8xWuiEdWA,3811
debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_version.hpp,sha256=tn11Wl2u0aLBj7Z0mcrYSCSOH6JrZ4e9P3jXSCZxySo,2617
debugpy/_vendored/pydevd/pydevd_attach_to_process/common/python.h,sha256=ueIyBiClInxfKMlgAVMuxTvmhESadiWeA4uSwZAROeo,21631
debugpy/_vendored/pydevd/pydevd_attach_to_process/common/ref_utils.hpp,sha256=8wDFQk9XoAnK2EdM4J-RErKfBYZn93uG6Rw5OCbLsA0,1475
debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/.gitignore,sha256=r3rDatBumb9cRDhx35hsdJp9URPUmjgkynAQViLIoR4,82
debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/attach.cpp,sha256=xG8NmOwfuhXN93spuG_uEfe0tOn32hnljCqY5f1z354,3703
debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/compile_linux.sh,sha256=zLx_fpShueHqBioStvOE0hm1aBuvIkc5EygxXSSh8mQ,275
debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/compile_mac.sh,sha256=6szfH260KYg9ZuPusllOBx4vfPXU6ZrOrOrEdh-2bOM,232
debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/compile_manylinux.cmd,sha256=W93C4jG-fGn27rd1Yes3neP2upJTO9qAdKZPf8cvSQE,633
debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/lldb_prepare.py,sha256=eSmL1KLLOrG8r4RJyVOd3USUjsplWXdKSCCnnGyGVdo,1691
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__init__.py,sha256=2VU5wHMC1RElLHJa5cwPVo6bK8sRDics9cFMtqx3mq4,7917
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/breakpoint.py,sha256=49N-AlYBS8S8ZC_lOXqQKq0ttdWOKjfmRpq8ESfNn84,168168
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/compat.py,sha256=0KEHUJM9HVPJkzEa_f6cWb6LB7ickr7xOqKvuOVjjeA,5230
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/crash.py,sha256=bAwe0hjMepdZkfIESfCJSxQJOnCxC7yp046nd6bJTI0,65394
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/debug.py,sha256=TI59UyIasBZF6iln8OMxtQCkIv4t6hRXqYaED8bRTzg,58709
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/disasm.py,sha256=e45vidMFkVl2s5nu2YJW4iE5Ng1KnWobNWBWnbaZskQ,24409
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/event.py,sha256=QzHPCcGvEZ1LrL33ddVegMsb6BURRgVBSi2nV20aOTs,67241
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/interactive.py,sha256=Nmm9hGyn5XnxFccS0vmLmHl25ReTXPxZSgIiWQCaPQg,83555
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/module.py,sha256=aQOXcKztzBzIhBNuEbSvaNZ-xtDERviWynRzbSVpesw,70615
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/process.py,sha256=ViXvPwwEz2_EKrG-myicFCrtUjirrOmCon36AjeUI78,183635
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/registry.py,sha256=BmThLf5TaXsPEXzk16PqtZHIArZKpSK7f1srG8oFpJ4,21569
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/search.py,sha256=NuUoyoU7lMZovibgZIjkTKtRsxWVuY3FRYfOJicM7-k,23798
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/sql.py,sha256=vHnOHabuFHMq4JBu7qf9O6omUrSTOsOGxdxjg81fvuw,34997
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/system.py,sha256=L5oZqyn68kkoDpFnOYJkaIdTjFle1_tBPwaTPfcKzvo,45884
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/textio.py,sha256=osUFHqxoZBdGQLmfvPXEFikvNPY_JbiQub5N2_tgee0,62691
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/thread.py,sha256=uEuXm8xSq4iwZNfwW6wXnrTvF4fpvkYTz--VQUYQ2mg,75478
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/util.py,sha256=9GFD52PGOg0czJc0oUlpE8sTJVHZnmam84teq938xeg,36223
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/window.py,sha256=E7zOrJKXXm2OnRp_Xvt868_BwEVdh_39etqDQgZYLjQ,24309
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__init__.py,sha256=LjPLQ0pv6rcHQmb32jV12bGUw_CTsi0atfNuOvkg2nc,2845
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/advapi32.py,sha256=2EFvqeuxUGiiDGTzkHMV4fEeDZvMxmrByD5hu9ClM8A,120809
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/context_amd64.py,sha256=8eT1_GvmfpRI2HWWqdNDRJjA6TvpwVl8ZQcVvSZFCOY,25137
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/context_i386.py,sha256=CLD5RAQi686FfCs0bszEgh_ZVFIj-YrUEt8HRvHE5HE,16108
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/dbghelp.py,sha256=6mZA5sDvGrIfP76-Jv6bvIYON3cjTruYt29Cxu2QKdQ,46705
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/defines.py,sha256=Verc00KnY3XSxI0KMpb81U4P6k6MUHCe-NgsxlG7Ie8,22799
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/gdi32.py,sha256=5i8RpG43wEMAYbcBjEwJaemrVhrlFwGKhkL947CR-7E,16829
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/kernel32.py,sha256=TeRsADg7eZN1dw7aGepbOIZobGmiKzddT-bKEZ97bQI,164818
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/ntdll.py,sha256=7PSunl1ixZo5y-bol-w53iE4GhKEPYv7KRiI9_CiOms,22847
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/peb_teb.py,sha256=KjCX0Yte_g7ty240hehVBbadxmhI2M--bVu0SfhKS9E,159230
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/psapi.py,sha256=V5n9HSLn0fv5qwe83zMjGGBcTeJ2woJzS_hdjGQhps4,13762
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/shell32.py,sha256=T2MAPhKCqGfg_JxYGZkWChzlDtDsRAeczsL8fGhSYDA,14007
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/shlwapi.py,sha256=x7jV5_99GrZU0wlD-ePUeM09Uq_PJ7L1M9YwucIMUEw,25807
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/user32.py,sha256=-YYdVrMVUZWpzMIBUbeLkNlhQ7VCBhvjWLdZtN9u0Vo,57177
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/version.py,sha256=uBtrPoubwMv_1CLSf42kpPQOiOrinCtYJkxWqzOl09I,36813
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/wtsapi32.py,sha256=P-4JIP38rtbQ-iHtdjSBst656mfBeFEBWPZyOrCD_c0,11164
debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/attach.cpp,sha256=qRJVdptEyvZtM70x0XmSkGhl3U28EYBZ9zCB2-GW3pE,27447
debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/attach.h,sha256=rWBA3kdzfyHaE9X9Diub3cojoJlXjC3TKnLQv-nGCeA,1846
debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/compile_windows.bat,sha256=odPnx4SH9fXX33cyKUT9-2h_k5xdePG0EIlhldSnSZc,2221
debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/inject_dll.cpp,sha256=GQmZbpNBRMMW1WFcDFOlJaGLy1-oZ4qCCIyo5exPLBM,4792
debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/py_win_helpers.hpp,sha256=45pO-c1ofub4nn0XY9kMsTm3s48_EPE-VWAhld3704I,2479
debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/run_code_in_memory.hpp,sha256=zvDC9cVGZ6BXEsz_Im-QMIubdw6vX3BCIg2T11cVfvg,3355
debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/run_code_on_dllmain.cpp,sha256=ApQza8ZJrfe2U4jTJPa8ItzvnHc7w2G6Yrfq4BT_56g,2516
debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/stdafx.cpp,sha256=NO8qlc7sKU6oHA_AnXJSodHupZLizp3npBTc-EGpBj8,999
debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/stdafx.h,sha256=l0tEIxxiv0XrIeiJ2zedTd4fYrdlxVTK8ECpBk881WM,1162
debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/targetver.h,sha256=fqw-iPopG_V-pFRxf33lVFL0uvWfDUB2ky0bibFblK0,1013
debugpy/_vendored/pydevd/pydevd_plugins/__init__.py,sha256=TAxMvzzecHUbYSemZ7gBbqi-3hYmsODLKrbmxrtbaUQ,66
debugpy/_vendored/pydevd/pydevd_plugins/django_debug.py,sha256=VIBdRwe3Ia3m4LwdyCmnm93ybPXoOwXIE8HUCy9JMcw,22430
debugpy/_vendored/pydevd/pydevd_plugins/jinja2_debug.py,sha256=z6XdAlP0Wf9PyluABWF2GICHnTfKaN3gRKvmuh6uymU,19141
debugpy/_vendored/pydevd/pydevd_plugins/pydevd_line_validation.py,sha256=Kl48sEmnu7cn8P4GTflgsqhfOh3UUNCp_qfyjN3B8FI,6286
debugpy/_vendored/pydevd/pydevd_plugins/extensions/README.md,sha256=cxu8F295snUVNgqE5xXKnZTpbqbR3LTmm60pgK0IOTs,1183
debugpy/_vendored/pydevd/pydevd_plugins/extensions/__init__.py,sha256=TAxMvzzecHUbYSemZ7gBbqi-3hYmsODLKrbmxrtbaUQ,66
debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__init__.py,sha256=TAxMvzzecHUbYSemZ7gBbqi-3hYmsODLKrbmxrtbaUQ,66
debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_helpers.py,sha256=0fFR63gGOCVNWHp-e8El6OPlBlljTJwCe1oEJWXPv5M,639
debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_plugin_numpy_types.py,sha256=xFgk-9qUwGV7IIraROYeT7ve-oZekcaabzG37FM6giU,3248
debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_plugin_pandas_types.py,sha256=s-vvx_QhZeSd_OLJ3u4YHGf82Uw-0XIe7cG1D1POAnU,6581
debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_plugins_django_form_str.py,sha256=quqrRUKuKEPoT37MBla7wJCSuWHio1dI1cEK1G13qO0,538
debugpy/adapter/__init__.py,sha256=ewTjlS3VAb6lypFWfGZjY7j2wiW6ApS3KSPJrm0xsEk,346
debugpy/adapter/__main__.py,sha256=MBXkVyKOTc0QwB-vrUAI_al7EMaGIC3k7mXtfBv7U78,8257
debugpy/adapter/clients.py,sha256=qzv-eHGZvSW2LpiglWyHQr-Oym5-cSuocWlHt74uSDs,31341
debugpy/adapter/components.py,sha256=UGZg2cTDq4oHi3OrJ5bW2zdFe5MAw-lTZ9lkjRbAwgM,6081
debugpy/adapter/launchers.py,sha256=VWMB5i8GJX6CW1FP3OtT1a-iGqMcTabJ3UowNiWq02A,7000
debugpy/adapter/servers.py,sha256=pvIW2wloRQUQTr6WGrOHB1jct5_v12jbpKQeJfdYdCU,23418
debugpy/adapter/sessions.py,sha256=KzkGF3hWP59siooY3UbnATQY2wqIbCHFpqJHO7aR8jo,11229
debugpy/common/__init__.py,sha256=b9YhaTxBfLJ1VqVI33KM-uqiNWAu34RI2GVVN1IH0_8,608
debugpy/common/json.py,sha256=NoinXsMZHybYGNiSbq3_OWPJxtmYbDew6RkqF3zMyZ8,9674
debugpy/common/log.py,sha256=NqGC5AIo9zdls1FKn5Gv0n1EvLmaB0d5s12CRY7ZpLs,11706
debugpy/common/messaging.py,sha256=-kvJRS7uYUoh4MFa5EnlVRYe8xIXwOca9YrSg3OGYos,56576
debugpy/common/singleton.py,sha256=bSTqWB9bLp1SpP1W9-LH_OlU9Arbd7pqu4OcjYKDQ_0,7666
debugpy/common/sockets.py,sha256=RsRPizhPy6SRsK5DSEiUWIBX2fw-D90ch-KnSfQwmZk,4224
debugpy/common/stacks.py,sha256=czZjqyY_5ntvOSpelZlJkpH4Gqq9JyZY7tcUqz4sWXA,1526
debugpy/common/timestamp.py,sha256=ZocK6sWz2JUD1hBAKj672ta8D3ze0Z3zJD_CWjeDq7A,410
debugpy/common/util.py,sha256=CPWCyS757aIcGISxf_SbaGlewSCFOgou0raEKfCZ56I,4646
debugpy/launcher/__init__.py,sha256=L7aoLf-CaGikoiEJokF5JmSL0Y7FWIn2EOJFV89KbbY,890
debugpy/launcher/__main__.py,sha256=yLvc7PNl8YulPLINLZVBGY-38hClmkgec0LmkEQna_Q,3812
debugpy/launcher/debuggee.py,sha256=uUOkA8E-67FrrtOrarI_yEfDBsnn76opcNW1PmFxb9M,8574
debugpy/launcher/handlers.py,sha256=vsMHp4SKqbR1ar27RW7t-SbApXdWPfhS7qLlgr_cw8g,5728
debugpy/launcher/output.py,sha256=R8YWa7ccb9Sy_ighQqN9R5W1OdMNcnmizmTLYc6yTsg,3748
debugpy/launcher/winapi.py,sha256=7ACn2Hxf8Kx5bBmxmuC-0A_hG60kEfrDtrZW_BnnjV4,3129
debugpy/server/__init__.py,sha256=mmPRoui4PkSeZxG3r5Gep6YB0MLMq_lIJru1pfGmKUY,323
debugpy/server/api.py,sha256=oxecs9s0cATf8K8MCd6vVEeZjTgUTuWVIIT0vdfOtF4,11789
debugpy/server/attach_pid_injected.py,sha256=mx8G8CDw5HGOPBM5XGkjkLka_LHVQJEuMJ4tUJHQqX8,2734
debugpy/server/cli.py,sha256=-RTBelnNYN_IdnpqNLcU4ynZnf8RbOa0Ey-vLLjfk3Q,13289
debugpy-1.8.2.dist-info/LICENSE,sha256=g8PtOU5gAGfBe30cCb0-og8HA7jAcW5qn0FLmfy9-BA,1176
debugpy-1.8.2.dist-info/METADATA,sha256=SIp9xzY8sLwprPjxLGgXZjRewD27BNjoCEyGLyDrnvE,1093
debugpy-1.8.2.dist-info/WHEEL,sha256=toMA19NeSU5gggc10kWFk1W2ImAql2N-kkNKf8yhweA,114
debugpy-1.8.2.dist-info/top_level.txt,sha256=6Om6JTEaqkWnj-9-7kJOJr988sTO6iSuiK4N9X6RLpg,8
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/COPYING,sha256=baWkm-Te2LLURwK7TL0zOkMSVjVCU_ezvObHBo298Tk,1074
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/METADATA,sha256=9XadDK6YTQ-FPowYI5DS4ieA7hRGnRP_fM5Z9ioPkEQ,2929
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/RECORD,sha256=2udHTtpgQXukzLaj7MVfrJhBa40hV7SjP8vyZ5vNqMU,2995
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/direct_url.json,sha256=s58Rb4KXRlMKxk-mzpvr_tJRQ-Hx8-DHsU6NdohCnAg,93
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/top_level.txt,sha256=9BhdB7HqYZ-PvHNoWX6ilwLYWQqcgEOLwdb3aXm5Gys,9
debugpy-1.8.2.dist-info/RECORD,,

View File

@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: setuptools (70.1.0)
Root-Is-Purelib: false
Tag: cp311-cp311-macosx_11_0_universal2

View File

@ -0,0 +1,499 @@
THIRD-PARTY SOFTWARE NOTICES AND INFORMATION
Do Not Translate or Localize
debugpy incorporates third party material from the projects listed below.
1. PyDev.Debugger (https://github.com/fabioz/PyDev.Debugger)
Includes:File copyright Brainwy Software Ltda.
Includes:File(s) related to Python, Cpython
Includes:File authored by Yuli Fitterman
Includes:File copyright Brainwy software Ltda
Includes:File with methods from Spyder
Includes:File(s) related to IPython
Includes:Files copyright Microsoft Corporation
Includes:six
Includes:WinAppDbg
Includes:XML-RPC client interface for Python
%% PyDev.Debugger NOTICES, INFORMATION, AND LICENSE BEGIN HERE
=========================================
The source code for the PyDev.Debugger files are provided with debugpy, or you may send a check or money order for US $5.00, including the product name (debugpy), the open source component name (PyDev.Debugger) and version number, to: Source Code Compliance Team, Microsoft Corporation, One Microsoft Way, Redmond, WA 98052, USA.
Eclipse Public License, Version 1.0 (EPL-1.0)
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
1. DEFINITIONS
"Contribution" means:
a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and
b) in the case of each subsequent Contributor:
i) changes to the Program, and
ii) additions to the Program;
where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program.
"Contributor" means any person or entity that distributes the Program.
"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
"Program" means the Contributions distributed in accordance with this Agreement.
"Recipient" means anyone who receives the Program under this Agreement, including all Contributors.
2. GRANT OF RIGHTS
a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form.
b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
3. REQUIREMENTS
A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that:
a) it complies with the terms and conditions of this Agreement; and
b) its license agreement:
i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and
iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange.
When the Program is made available in source code form:
a) it must be made available under this Agreement; and
b) a copy of this Agreement must be included with each copy of the Program.
Contributors may not remove or alter any copyright notices contained within the Program.
Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution.
4. COMMERCIAL DISTRIBUTION
Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
5. NO WARRANTY
EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
6. DISCLAIMER OF LIABILITY
EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
7. GENERAL
If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved.
This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation.
=========================================
Includes File copyright Brainwy Software Ltda.
File includes the following notice:
Copyright: Brainwy Software Ltda.
License: EPL.
=========================================
Includes file(s) from Python, Python xreload, Cpython and an ActiveState.com Recipe on "NULL OBJECT DESIGN PATTERN (PYTHON RECIPE)"
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
--------------------------------------------
1. This LICENSE AGREEMENT is between the Python Software Foundation
("PSF"), and the Individual or Organization ("Licensee") accessing and
otherwise using this software ("Python") in source or binary form and
its associated documentation.
2. Subject to the terms and conditions of this License Agreement, PSF hereby
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
analyze, test, perform and/or display publicly, prepare derivative works,
distribute, and otherwise use Python alone or in any derivative version,
provided, however, that PSF's License Agreement and PSF's notice of copyright,
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
2011, 2012, 2013, 2014, 2015, 2016, 2017 Python Software Foundation; All Rights
Reserved" are retained in Python alone or in any derivative version prepared by
Licensee.
3. In the event Licensee prepares a derivative work that is based on
or incorporates Python or any part thereof, and wants to make
the derivative work available to others as provided herein, then
Licensee hereby agrees to include in any such work a brief summary of
the changes made to Python.
4. PSF is making Python available to Licensee on an "AS IS"
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
7. Nothing in this License Agreement shall be deemed to create any
relationship of agency, partnership, or joint venture between PSF and
Licensee. This License Agreement does not grant permission to use PSF
trademarks or trade name in a trademark sense to endorse or promote
products or services of Licensee, or any third party.
8. By copying, installing or otherwise using Python, Licensee
agrees to be bound by the terms and conditions of this License
Agreement.
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
-------------------------------------------
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
Individual or Organization ("Licensee") accessing and otherwise using
this software in source or binary form and its associated
documentation ("the Software").
2. Subject to the terms and conditions of this BeOpen Python License
Agreement, BeOpen hereby grants Licensee a non-exclusive,
royalty-free, world-wide license to reproduce, analyze, test, perform
and/or display publicly, prepare derivative works, distribute, and
otherwise use the Software alone or in any derivative version,
provided, however, that the BeOpen Python License is retained in the
Software, alone or in any derivative version prepared by Licensee.
3. BeOpen is making the Software available to Licensee on an "AS IS"
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
5. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
6. This License Agreement shall be governed by and interpreted in all
respects by the law of the State of California, excluding conflict of
law provisions. Nothing in this License Agreement shall be deemed to
create any relationship of agency, partnership, or joint venture
between BeOpen and Licensee. This License Agreement does not grant
permission to use BeOpen trademarks or trade names in a trademark
sense to endorse or promote products or services of Licensee, or any
third party. As an exception, the "BeOpen Python" logos available at
http://www.pythonlabs.com/logos.html may be used according to the
permissions granted on that web page.
7. By copying, installing or otherwise using the software, Licensee
agrees to be bound by the terms and conditions of this License
Agreement.
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
---------------------------------------
1. This LICENSE AGREEMENT is between the Corporation for National
Research Initiatives, having an office at 1895 Preston White Drive,
Reston, VA 20191 ("CNRI"), and the Individual or Organization
("Licensee") accessing and otherwise using Python 1.6.1 software in
source or binary form and its associated documentation.
2. Subject to the terms and conditions of this License Agreement, CNRI
hereby grants Licensee a nonexclusive, royalty-free, world-wide
license to reproduce, analyze, test, perform and/or display publicly,
prepare derivative works, distribute, and otherwise use Python 1.6.1
alone or in any derivative version, provided, however, that CNRI's
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
1995-2001 Corporation for National Research Initiatives; All Rights
Reserved" are retained in Python 1.6.1 alone or in any derivative
version prepared by Licensee. Alternately, in lieu of CNRI's License
Agreement, Licensee may substitute the following text (omitting the
quotes): "Python 1.6.1 is made available subject to the terms and
conditions in CNRI's License Agreement. This Agreement together with
Python 1.6.1 may be located on the Internet using the following
unique, persistent identifier (known as a handle): 1895.22/1013. This
Agreement may also be obtained from a proxy server on the Internet
using the following URL: http://hdl.handle.net/1895.22/1013".
3. In the event Licensee prepares a derivative work that is based on
or incorporates Python 1.6.1 or any part thereof, and wants to make
the derivative work available to others as provided herein, then
Licensee hereby agrees to include in any such work a brief summary of
the changes made to Python 1.6.1.
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
7. This License Agreement shall be governed by the federal
intellectual property law of the United States, including without
limitation the federal copyright law, and, to the extent such
U.S. federal law does not apply, by the law of the Commonwealth of
Virginia, excluding Virginia's conflict of law provisions.
Notwithstanding the foregoing, with regard to derivative works based
on Python 1.6.1 that incorporate non-separable material that was
previously distributed under the GNU General Public License (GPL), the
law of the Commonwealth of Virginia shall govern this License
Agreement only as to issues arising under or with respect to
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
License Agreement shall be deemed to create any relationship of
agency, partnership, or joint venture between CNRI and Licensee. This
License Agreement does not grant permission to use CNRI trademarks or
trade name in a trademark sense to endorse or promote products or
services of Licensee, or any third party.
8. By clicking on the "ACCEPT" button where indicated, or by copying,
installing or otherwise using Python 1.6.1, Licensee agrees to be
bound by the terms and conditions of this License Agreement.
ACCEPT
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
--------------------------------------------------
Copyright (C) 2006-2010 Python Software Foundation
Permission to use, copy, modify, and distribute this software and its
documentation for any purpose and without fee is hereby granted,
provided that the above copyright notice appear in all copies and that
both that copyright notice and this permission notice appear in
supporting documentation, and that the name of Stichting Mathematisch
Centrum or CWI not be used in advertising or publicity pertaining to
distribution of the software without specific, written prior
permission.
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
=========================================
Includes File authored by Yuli Fitterman
Copyright (c) Yuli Fitterman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=========================================
Includes file(s): * Copyright (c) Brainwy software Ltda.
*
* This source code is subject to terms and conditions of the Apache License, Version 2.0. A
* copy of the license can be found in the License.html file at the root of this distribution. If
* you cannot locate the Apache License, Version 2.0, please send an email to
* vspython@microsoft.com. By using this source code in any fashion, you are agreeing to be bound
* by the terms of the Apache License, Version 2.0.
*
* You must not remove this notice, or any other, from this software.
=========================================
Includes file(s): Copyright (c) 2009-2012 Pierre Raybaut
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
=========================================
Includes file(s) from Ipython
Copyright (c) 2008-2010, IPython Development Team
Copyright (c) 2001-2007, Fernando Perez. <fernando.perez@colorado.edu>
Copyright (c) 2001, Janko Hauser <jhauser@zscout.de>
Copyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
Neither the name of the IPython Development Team nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
=========================================
Includes file(s): * Copyright (c) Microsoft Corporation.
*
* This source code is subject to terms and conditions of the Apache License, Version 2.0. A
* copy of the license can be found in the License.html file at the root of this distribution. If
* you cannot locate the Apache License, Version 2.0, please send an email to
* vspython@microsoft.com. By using this source code in any fashion, you are agreeing to be bound
* by the terms of the Apache License, Version 2.0.
*
* You must not remove this notice, or any other, from this software.
=========================================
Includes file(s) from https://pythonhosted.org/six/
Copyright (c) 2010-2018 Benjamin Peterson
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
Includes WinAppDbg
# Copyright (c) 2009-2014, Mario Vilas
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice,this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
=========================================
Includes XML-RPC client interface for Python
# Copyright (c) 1999-2002 by Secret Labs AB
# Copyright (c) 1999-2002 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
=========================================
Includes https://github.com/vstinner/bytecode e3e77fb690ed05ac171e15694e1c5d0e0dc34e86 - MIT
Copyright (c) 2016 Red Hat.
The MIT License (MIT)
Copyright (c) 2016 Red Hat.
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
Includes https://github.com/benhoyt/scandir 6ed381881bc2fb9de05804e892eeeeb3601a3af2 - BSD 3-Clause "New" or "Revised" License
Copyright (c) 2012, Ben Hoyt
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Ben Hoyt nor the names of its contributors may be used
to endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
=========================================
END OF PyDev.Debugger NOTICES, INFORMATION, AND LICENSE

View File

@ -0,0 +1,38 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
"""An implementation of the Debug Adapter Protocol (DAP) for Python.
https://microsoft.github.io/debug-adapter-protocol/
"""
# debugpy stable public API consists solely of members of this module that are
# enumerated below.
__all__ = [ # noqa
"__version__",
"breakpoint",
"configure",
"connect",
"debug_this_thread",
"is_client_connected",
"listen",
"log_to",
"trace_this_thread",
"wait_for_client",
]
import sys
assert sys.version_info >= (3, 7), (
"Python 3.6 and below is not supported by this version of debugpy; "
"use debugpy 1.5.1 or earlier."
)
# Actual definitions are in a separate file to work around parsing issues causing
# SyntaxError on Python 2 and preventing the above version check from executing.
from debugpy.public_api import * # noqa
from debugpy.public_api import __version__
del sys

View File

@ -0,0 +1,39 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
import sys
if __name__ == "__main__":
# debugpy can also be invoked directly rather than via -m. In this case, the first
# entry on sys.path is the one added automatically by Python for the directory
# containing this file. This means that import debugpy will not work, since we need
# the parent directory of debugpy/ to be in sys.path, rather than debugpy/ itself.
#
# The other issue is that many other absolute imports will break, because they
# will be resolved relative to debugpy/ - e.g. `import debugger` will then try
# to import debugpy/debugger.py.
#
# To fix both, we need to replace the automatically added entry such that it points
# at parent directory of debugpy/ instead of debugpy/ itself, import debugpy with that
# in sys.path, and then remove the first entry entry altogether, so that it doesn't
# affect any further imports we might do. For example, suppose the user did:
#
# python /foo/bar/debugpy ...
#
# At the beginning of this script, sys.path will contain "/foo/bar/debugpy" as the
# first entry. What we want is to replace it with "/foo/bar', then import debugpy
# with that in effect, and then remove the replaced entry before any more
# code runs. The imported debugpy module will remain in sys.modules, and thus all
# future imports of it or its submodules will resolve accordingly.
if "debugpy" not in sys.modules:
# Do not use dirname() to walk up - this can be a relative path, e.g. ".".
sys.path[0] = sys.path[0] + "/../"
import debugpy # noqa
del sys.path[0]
from debugpy.server import cli
cli.main()

View File

@ -0,0 +1,126 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
import contextlib
from importlib import import_module
import os
import sys
from . import _util
VENDORED_ROOT = os.path.dirname(os.path.abspath(__file__))
# TODO: Move the "pydevd" git submodule to the debugpy/_vendored directory
# and then drop the following fallback.
if "pydevd" not in os.listdir(VENDORED_ROOT):
VENDORED_ROOT = os.path.dirname(VENDORED_ROOT)
def list_all(resolve=False):
"""Return the list of vendored projects."""
# TODO: Derive from os.listdir(VENDORED_ROOT)?
projects = ["pydevd"]
if not resolve:
return projects
return [project_root(name) for name in projects]
def project_root(project):
"""Return the path the root dir of the vendored project.
If "project" is an empty string then the path prefix for vendored
projects (e.g. "debugpy/_vendored/") will be returned.
"""
if not project:
project = ""
return os.path.join(VENDORED_ROOT, project)
def iter_project_files(project, relative=False, **kwargs):
"""Yield (dirname, basename, filename) for all files in the project."""
if relative:
with _util.cwd(VENDORED_ROOT):
for result in _util.iter_all_files(project, **kwargs):
yield result
else:
root = project_root(project)
for result in _util.iter_all_files(root, **kwargs):
yield result
def iter_packaging_files(project):
"""Yield the filenames for all files in the project.
The filenames are relative to "debugpy/_vendored". This is most
useful for the "package data" in a setup.py.
"""
# TODO: Use default filters? __pycache__ and .pyc?
prune_dir = None
exclude_file = None
try:
mod = import_module("._{}_packaging".format(project), __name__)
except ImportError:
pass
else:
prune_dir = getattr(mod, "prune_dir", prune_dir)
exclude_file = getattr(mod, "exclude_file", exclude_file)
results = iter_project_files(
project, relative=True, prune_dir=prune_dir, exclude_file=exclude_file
)
for _, _, filename in results:
yield filename
def prefix_matcher(*prefixes):
"""Return a module match func that matches any of the given prefixes."""
assert prefixes
def match(name, module):
for prefix in prefixes:
if name.startswith(prefix):
return True
else:
return False
return match
def check_modules(project, match, root=None):
"""Verify that only vendored modules have been imported."""
if root is None:
root = project_root(project)
extensions = []
unvendored = {}
for modname, mod in list(sys.modules.items()):
if not match(modname, mod):
continue
try:
filename = getattr(mod, "__file__", None)
except: # In theory it's possible that any error is raised when accessing __file__
filename = None
if not filename: # extension module
extensions.append(modname)
elif not filename.startswith(root):
unvendored[modname] = filename
return unvendored, extensions
@contextlib.contextmanager
def vendored(project, root=None):
"""A context manager under which the vendored project will be imported."""
if root is None:
root = project_root(project)
# Add the vendored project directory, so that it gets tried first.
sys.path.insert(0, root)
try:
yield root
finally:
sys.path.remove(root)
def preimport(project, modules, **kwargs):
"""Import each of the named modules out of the vendored project."""
with vendored(project, **kwargs):
for name in modules:
import_module(name)

View File

@ -0,0 +1,48 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from . import VENDORED_ROOT
from ._util import cwd, iter_all_files
INCLUDES = [
'setup_pydevd_cython.py',
]
def iter_files():
# From the root of pydevd repo, we want only scripts and
# subdirectories that constitute the package itself (not helper
# scripts, tests etc). But when walking down into those
# subdirectories, we want everything below.
with cwd(VENDORED_ROOT):
return iter_all_files('pydevd', prune_dir, exclude_file)
def prune_dir(dirname, basename):
if basename == '__pycache__':
return True
elif dirname != 'pydevd':
return False
elif basename.startswith('pydev'):
return False
elif basename.startswith('_pydev'):
return False
return True
def exclude_file(dirname, basename):
if dirname == 'pydevd':
if basename in INCLUDES:
return False
elif not basename.endswith('.py'):
return True
elif 'pydev' not in basename:
return True
return False
if basename.endswith('.pyc'):
return True
return False

View File

@ -0,0 +1,59 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
import contextlib
import os
@contextlib.contextmanager
def cwd(dirname):
"""A context manager for operating in a different directory."""
orig = os.getcwd()
os.chdir(dirname)
try:
yield orig
finally:
os.chdir(orig)
def iter_all_files(root, prune_dir=None, exclude_file=None):
"""Yield (dirname, basename, filename) for each file in the tree.
This is an alternative to os.walk() that flattens out the tree and
with filtering.
"""
pending = [root]
while pending:
dirname = pending.pop(0)
for result in _iter_files(dirname, pending, prune_dir, exclude_file):
yield result
def iter_tree(root, prune_dir=None, exclude_file=None):
"""Yield (dirname, files) for each directory in the tree.
The list of files is actually a list of (basename, filename).
This is an alternative to os.walk() with filtering."""
pending = [root]
while pending:
dirname = pending.pop(0)
files = []
for _, b, f in _iter_files(dirname, pending, prune_dir, exclude_file):
files.append((b, f))
yield dirname, files
def _iter_files(dirname, subdirs, prune_dir, exclude_file):
for basename in os.listdir(dirname):
filename = os.path.join(dirname, basename)
if os.path.isdir(filename):
if prune_dir is not None and prune_dir(dirname, basename):
continue
subdirs.append(filename)
else:
# TODO: Use os.path.isfile() to narrow it down?
if exclude_file is not None and exclude_file(dirname, basename):
continue
yield dirname, basename, filename

View File

@ -0,0 +1,81 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from importlib import import_module
import os
import warnings
from . import check_modules, prefix_matcher, preimport, vendored
# Ensure that pydevd is our vendored copy.
_unvendored, _ = check_modules('pydevd',
prefix_matcher('pydev', '_pydev'))
if _unvendored:
_unvendored = sorted(_unvendored.values())
msg = 'incompatible copy of pydevd already imported'
# raise ImportError(msg)
warnings.warn(msg + ':\n {}'.format('\n '.join(_unvendored)))
# If debugpy logging is enabled, enable it for pydevd as well
if "DEBUGPY_LOG_DIR" in os.environ:
os.environ[str("PYDEVD_DEBUG")] = str("True")
os.environ[str("PYDEVD_DEBUG_FILE")] = os.environ["DEBUGPY_LOG_DIR"] + str("/debugpy.pydevd.log")
# Disable pydevd frame-eval optimizations only if unset, to allow opt-in.
if "PYDEVD_USE_FRAME_EVAL" not in os.environ:
os.environ[str("PYDEVD_USE_FRAME_EVAL")] = str("NO")
# Constants must be set before importing any other pydevd module
# # due to heavy use of "from" in them.
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=DeprecationWarning)
with vendored('pydevd'):
pydevd_constants = import_module('_pydevd_bundle.pydevd_constants')
# We limit representation size in our representation provider when needed.
pydevd_constants.MAXIMUM_VARIABLE_REPRESENTATION_SIZE = 2 ** 32
# Now make sure all the top-level modules and packages in pydevd are
# loaded. Any pydevd modules that aren't loaded at this point, will
# be loaded using their parent package's __path__ (i.e. one of the
# following).
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=DeprecationWarning)
preimport('pydevd', [
'_pydev_bundle',
'_pydev_runfiles',
'_pydevd_bundle',
'_pydevd_frame_eval',
'pydev_ipython',
'pydevd_plugins',
'pydevd',
])
# When pydevd is imported it sets the breakpoint behavior, but it needs to be
# overridden because by default pydevd will connect to the remote debugger using
# its own custom protocol rather than DAP.
import pydevd # noqa
import debugpy # noqa
def debugpy_breakpointhook():
debugpy.breakpoint()
pydevd.install_breakpointhook(debugpy_breakpointhook)
# Ensure that pydevd uses JSON protocol
from _pydevd_bundle import pydevd_constants
from _pydevd_bundle import pydevd_defaults
pydevd_defaults.PydevdCustomization.DEFAULT_PROTOCOL = pydevd_constants.HTTP_JSON_PROTOCOL
# Enable some defaults related to debugpy such as sending a single notification when
# threads pause and stopping on any exception.
pydevd_defaults.PydevdCustomization.DEBUG_MODE = 'debugpy-dap'
# This is important when pydevd attaches automatically to a subprocess. In this case, we have to
# make sure that debugpy is properly put back in the game for users to be able to use it.
pydevd_defaults.PydevdCustomization.PREIMPORT = '%s;%s' % (
os.path.dirname(os.path.dirname(debugpy.__file__)),
'debugpy._vendored.force_pydevd'
)

View File

@ -0,0 +1,155 @@
'''
License: Apache 2.0
Author: Yuli Fitterman
'''
import types
from _pydevd_bundle.pydevd_constants import IS_JYTHON
try:
import inspect
except:
import traceback;
traceback.print_exc() # Ok, no inspect available (search will not work)
from _pydev_bundle._pydev_imports_tipper import signature_from_docstring
def is_bound_method(obj):
if isinstance(obj, types.MethodType):
return getattr(obj, '__self__', getattr(obj, 'im_self', None)) is not None
else:
return False
def get_class_name(instance):
return getattr(getattr(instance, "__class__", None), "__name__", None)
def get_bound_class_name(obj):
my_self = getattr(obj, '__self__', getattr(obj, 'im_self', None))
if my_self is None:
return None
return get_class_name(my_self)
def get_description(obj):
try:
ob_call = obj.__call__
except:
ob_call = None
if isinstance(obj, type) or type(obj).__name__ == 'classobj':
fob = getattr(obj, '__init__', lambda: None)
if not isinstance(fob, (types.FunctionType, types.MethodType)):
fob = obj
elif is_bound_method(ob_call):
fob = ob_call
else:
fob = obj
argspec = ""
fn_name = None
fn_class = None
if isinstance(fob, (types.FunctionType, types.MethodType)):
spec_info = inspect.getfullargspec(fob)
argspec = inspect.formatargspec(*spec_info)
fn_name = getattr(fob, '__name__', None)
if isinstance(obj, type) or type(obj).__name__ == 'classobj':
fn_name = "__init__"
fn_class = getattr(obj, "__name__", "UnknownClass")
elif is_bound_method(obj) or is_bound_method(ob_call):
fn_class = get_bound_class_name(obj) or "UnknownClass"
else:
fn_name = getattr(fob, '__name__', None)
fn_self = getattr(fob, '__self__', None)
if fn_self is not None and not isinstance(fn_self, types.ModuleType):
fn_class = get_class_name(fn_self)
doc_string = get_docstring(ob_call) if is_bound_method(ob_call) else get_docstring(obj)
return create_method_stub(fn_name, fn_class, argspec, doc_string)
def create_method_stub(fn_name, fn_class, argspec, doc_string):
if fn_name and argspec:
doc_string = "" if doc_string is None else doc_string
fn_stub = create_function_stub(fn_name, argspec, doc_string, indent=1 if fn_class else 0)
if fn_class:
expr = fn_class if fn_name == '__init__' else fn_class + '().' + fn_name
return create_class_stub(fn_class, fn_stub) + "\n" + expr
else:
expr = fn_name
return fn_stub + "\n" + expr
elif doc_string:
if fn_name:
restored_signature, _ = signature_from_docstring(doc_string, fn_name)
if restored_signature:
return create_method_stub(fn_name, fn_class, restored_signature, doc_string)
return create_function_stub('unknown', '(*args, **kwargs)', doc_string) + '\nunknown'
else:
return ''
def get_docstring(obj):
if obj is not None:
try:
if IS_JYTHON:
# Jython
doc = obj.__doc__
if doc is not None:
return doc
from _pydev_bundle import _pydev_jy_imports_tipper
is_method, infos = _pydev_jy_imports_tipper.ismethod(obj)
ret = ''
if is_method:
for info in infos:
ret += info.get_as_doc()
return ret
else:
doc = inspect.getdoc(obj)
if doc is not None:
return doc
except:
pass
else:
return ''
try:
# if no attempt succeeded, try to return repr()...
return repr(obj)
except:
try:
# otherwise the class
return str(obj.__class__)
except:
# if all fails, go to an empty string
return ''
def create_class_stub(class_name, contents):
return "class %s(object):\n%s" % (class_name, contents)
def create_function_stub(fn_name, fn_argspec, fn_docstring, indent=0):
def shift_right(string, prefix):
return ''.join(prefix + line for line in string.splitlines(True))
fn_docstring = shift_right(inspect.cleandoc(fn_docstring), " " * (indent + 1))
ret = '''
def %s%s:
"""%s"""
pass
''' % (fn_name, fn_argspec, fn_docstring)
ret = ret[1:] # remove first /n
ret = ret.replace('\t', " ")
if indent:
prefix = " " * indent
ret = shift_right(ret, prefix)
return ret

View File

@ -0,0 +1,267 @@
from collections import namedtuple
from string import ascii_letters, digits
from _pydevd_bundle import pydevd_xml
import pydevconsole
import builtins as __builtin__ # Py3
try:
import java.lang # @UnusedImport
from _pydev_bundle import _pydev_jy_imports_tipper
_pydev_imports_tipper = _pydev_jy_imports_tipper
except ImportError:
IS_JYTHON = False
from _pydev_bundle import _pydev_imports_tipper
dir2 = _pydev_imports_tipper.generate_imports_tip_for_module
#=======================================================================================================================
# _StartsWithFilter
#=======================================================================================================================
class _StartsWithFilter:
'''
Used because we can't create a lambda that'll use an outer scope in jython 2.1
'''
def __init__(self, start_with):
self.start_with = start_with.lower()
def __call__(self, name):
return name.lower().startswith(self.start_with)
#=======================================================================================================================
# Completer
#
# This class was gotten from IPython.completer (dir2 was replaced with the completer already in pydev)
#=======================================================================================================================
class Completer:
def __init__(self, namespace=None, global_namespace=None):
"""Create a new completer for the command line.
Completer([namespace,global_namespace]) -> completer instance.
If unspecified, the default namespace where completions are performed
is __main__ (technically, __main__.__dict__). Namespaces should be
given as dictionaries.
An optional second namespace can be given. This allows the completer
to handle cases where both the local and global scopes need to be
distinguished.
Completer instances should be used as the completion mechanism of
readline via the set_completer() call:
readline.set_completer(Completer(my_namespace).complete)
"""
# Don't bind to namespace quite yet, but flag whether the user wants a
# specific namespace or to use __main__.__dict__. This will allow us
# to bind to __main__.__dict__ at completion time, not now.
if namespace is None:
self.use_main_ns = 1
else:
self.use_main_ns = 0
self.namespace = namespace
# The global namespace, if given, can be bound directly
if global_namespace is None:
self.global_namespace = {}
else:
self.global_namespace = global_namespace
def complete(self, text):
"""Return the next possible completion for 'text'.
This is called successively with state == 0, 1, 2, ... until it
returns None. The completion should begin with 'text'.
"""
if self.use_main_ns:
# In pydev this option should never be used
raise RuntimeError('Namespace must be provided!')
self.namespace = __main__.__dict__ # @UndefinedVariable
if "." in text:
return self.attr_matches(text)
else:
return self.global_matches(text)
def global_matches(self, text):
"""Compute matches when text is a simple name.
Return a list of all keywords, built-in functions and names currently
defined in self.namespace or self.global_namespace that match.
"""
def get_item(obj, attr):
return obj[attr]
a = {}
for dict_with_comps in [__builtin__.__dict__, self.namespace, self.global_namespace]: # @UndefinedVariable
a.update(dict_with_comps)
filter = _StartsWithFilter(text)
return dir2(a, a.keys(), get_item, filter)
def attr_matches(self, text):
"""Compute matches when text contains a dot.
Assuming the text is of the form NAME.NAME....[NAME], and is
evaluatable in self.namespace or self.global_namespace, it will be
evaluated and its attributes (as revealed by dir()) are used as
possible completions. (For class instances, class members are are
also considered.)
WARNING: this can still invoke arbitrary C code, if an object
with a __getattr__ hook is evaluated.
"""
import re
# Another option, seems to work great. Catches things like ''.<tab>
m = re.match(r"(\S+(\.\w+)*)\.(\w*)$", text) # @UndefinedVariable
if not m:
return []
expr, attr = m.group(1, 3)
try:
obj = eval(expr, self.namespace)
except:
try:
obj = eval(expr, self.global_namespace)
except:
return []
filter = _StartsWithFilter(attr)
words = dir2(obj, filter=filter)
return words
def generate_completions(frame, act_tok):
'''
:return list(tuple(method_name, docstring, parameters, completion_type))
method_name: str
docstring: str
parameters: str -- i.e.: "(a, b)"
completion_type is an int
See: _pydev_bundle._pydev_imports_tipper for TYPE_ constants
'''
if frame is None:
return []
# Not using frame.f_globals because of https://sourceforge.net/tracker2/?func=detail&aid=2541355&group_id=85796&atid=577329
# (Names not resolved in generator expression in method)
# See message: http://mail.python.org/pipermail/python-list/2009-January/526522.html
updated_globals = {}
updated_globals.update(frame.f_globals)
updated_globals.update(frame.f_locals) # locals later because it has precedence over the actual globals
if pydevconsole.IPYTHON:
completions = pydevconsole.get_completions(act_tok, act_tok, updated_globals, frame.f_locals)
else:
completer = Completer(updated_globals, None)
# list(tuple(name, descr, parameters, type))
completions = completer.complete(act_tok)
return completions
def generate_completions_as_xml(frame, act_tok):
completions = generate_completions(frame, act_tok)
return completions_to_xml(completions)
def completions_to_xml(completions):
valid_xml = pydevd_xml.make_valid_xml_value
quote = pydevd_xml.quote
msg = ["<xml>"]
for comp in completions:
msg.append('<comp p0="')
msg.append(valid_xml(quote(comp[0], '/>_= \t')))
msg.append('" p1="')
msg.append(valid_xml(quote(comp[1], '/>_= \t')))
msg.append('" p2="')
msg.append(valid_xml(quote(comp[2], '/>_= \t')))
msg.append('" p3="')
msg.append(valid_xml(quote(comp[3], '/>_= \t')))
msg.append('"/>')
msg.append("</xml>")
return ''.join(msg)
identifier_start = ascii_letters + '_'
identifier_part = ascii_letters + '_' + digits
identifier_start = set(identifier_start)
identifier_part = set(identifier_part)
def isidentifier(s):
return s.isidentifier()
TokenAndQualifier = namedtuple('TokenAndQualifier', 'token, qualifier')
def extract_token_and_qualifier(text, line=0, column=0):
'''
Extracts the token a qualifier from the text given the line/colum
(see test_extract_token_and_qualifier for examples).
:param unicode text:
:param int line: 0-based
:param int column: 0-based
'''
# Note: not using the tokenize module because text should be unicode and
# line/column refer to the unicode text (otherwise we'd have to know
# those ranges after converted to bytes).
if line < 0:
line = 0
if column < 0:
column = 0
if isinstance(text, bytes):
text = text.decode('utf-8')
lines = text.splitlines()
try:
text = lines[line]
except IndexError:
return TokenAndQualifier(u'', u'')
if column >= len(text):
column = len(text)
text = text[:column]
token = u''
qualifier = u''
temp_token = []
for i in range(column - 1, -1, -1):
c = text[i]
if c in identifier_part or isidentifier(c) or c == u'.':
temp_token.append(c)
else:
break
temp_token = u''.join(reversed(temp_token))
if u'.' in temp_token:
temp_token = temp_token.split(u'.')
token = u'.'.join(temp_token[:-1])
qualifier = temp_token[-1]
else:
qualifier = temp_token
return TokenAndQualifier(token, qualifier)

View File

@ -0,0 +1,14 @@
# We must redefine it in Py3k if it's not already there
def execfile(file, glob=None, loc=None):
if glob is None:
import sys
glob = sys._getframe().f_back.f_globals
if loc is None:
loc = glob
import tokenize
with tokenize.open(file) as stream:
contents = stream.read()
# execute the script (note: it's important to compile first to have the filename set in debug mode)
exec(compile(contents + "\n", file, 'exec'), glob, loc)

View File

@ -0,0 +1,41 @@
import sys
def __getfilesystemencoding():
'''
Note: there's a copy of this method in interpreterInfo.py
'''
try:
ret = sys.getfilesystemencoding()
if not ret:
raise RuntimeError('Unable to get encoding.')
return ret
except:
try:
#Handle Jython
from java.lang import System # @UnresolvedImport
env = System.getProperty("os.name").lower()
if env.find('win') != -1:
return 'ISO-8859-1' #mbcs does not work on Jython, so, use a (hopefully) suitable replacement
return 'utf-8'
except:
pass
#Only available from 2.3 onwards.
if sys.platform == 'win32':
return 'mbcs'
return 'utf-8'
def getfilesystemencoding():
try:
ret = __getfilesystemencoding()
#Check if the encoding is actually there to be used!
if hasattr('', 'encode'):
''.encode(ret)
if hasattr('', 'decode'):
''.decode(ret)
return ret
except:
return 'utf-8'

View File

@ -0,0 +1,130 @@
#=======================================================================================================================
# getopt code copied since gnu_getopt is not available on jython 2.1
#=======================================================================================================================
class GetoptError(Exception):
opt = ''
msg = ''
def __init__(self, msg, opt=''):
self.msg = msg
self.opt = opt
Exception.__init__(self, msg, opt)
def __str__(self):
return self.msg
def gnu_getopt(args, shortopts, longopts=[]):
"""getopt(args, options[, long_options]) -> opts, args
This function works like getopt(), except that GNU style scanning
mode is used by default. This means that option and non-option
arguments may be intermixed. The getopt() function stops
processing options as soon as a non-option argument is
encountered.
If the first character of the option string is `+', or if the
environment variable POSIXLY_CORRECT is set, then option
processing stops as soon as a non-option argument is encountered.
"""
opts = []
prog_args = []
if type('') == type(longopts):
longopts = [longopts]
else:
longopts = list(longopts)
# Allow options after non-option arguments?
all_options_first = False
if shortopts.startswith('+'):
shortopts = shortopts[1:]
all_options_first = True
while args:
if args[0] == '--':
prog_args += args[1:]
break
if args[0][:2] == '--':
opts, args = do_longs(opts, args[0][2:], longopts, args[1:])
elif args[0][:1] == '-':
opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:])
else:
if all_options_first:
prog_args += args
break
else:
prog_args.append(args[0])
args = args[1:]
return opts, prog_args
def do_longs(opts, opt, longopts, args):
try:
i = opt.index('=')
except ValueError:
optarg = None
else:
opt, optarg = opt[:i], opt[i + 1:]
has_arg, opt = long_has_args(opt, longopts)
if has_arg:
if optarg is None:
if not args:
raise GetoptError('option --%s requires argument' % opt, opt)
optarg, args = args[0], args[1:]
elif optarg:
raise GetoptError('option --%s must not have an argument' % opt, opt)
opts.append(('--' + opt, optarg or ''))
return opts, args
# Return:
# has_arg?
# full option name
def long_has_args(opt, longopts):
possibilities = [o for o in longopts if o.startswith(opt)]
if not possibilities:
raise GetoptError('option --%s not recognized' % opt, opt)
# Is there an exact match?
if opt in possibilities:
return False, opt
elif opt + '=' in possibilities:
return True, opt
# No exact match, so better be unique.
if len(possibilities) > 1:
# XXX since possibilities contains all valid continuations, might be
# nice to work them into the error msg
raise GetoptError('option --%s not a unique prefix' % opt, opt)
assert len(possibilities) == 1
unique_match = possibilities[0]
has_arg = unique_match.endswith('=')
if has_arg:
unique_match = unique_match[:-1]
return has_arg, unique_match
def do_shorts(opts, optstring, shortopts, args):
while optstring != '':
opt, optstring = optstring[0], optstring[1:]
if short_has_arg(opt, shortopts):
if optstring == '':
if not args:
raise GetoptError('option -%s requires argument' % opt,
opt)
optstring, args = args[0], args[1:]
optarg, optstring = optstring, ''
else:
optarg = ''
opts.append(('-' + opt, optarg))
return opts, args
def short_has_arg(opt, shortopts):
for i in range(len(shortopts)):
if opt == shortopts[i] != ':':
return shortopts.startswith(':', i + 1)
raise GetoptError('option -%s not recognized' % opt, opt)
#=======================================================================================================================
# End getopt code
#=======================================================================================================================

View File

@ -0,0 +1,373 @@
import inspect
import os.path
import sys
from _pydev_bundle._pydev_tipper_common import do_find
from _pydevd_bundle.pydevd_utils import hasattr_checked, dir_checked
from inspect import getfullargspec
def getargspec(*args, **kwargs):
arg_spec = getfullargspec(*args, **kwargs)
return arg_spec.args, arg_spec.varargs, arg_spec.varkw, arg_spec.defaults, arg_spec.kwonlyargs or [], arg_spec.kwonlydefaults or {}
# completion types.
TYPE_IMPORT = '0'
TYPE_CLASS = '1'
TYPE_FUNCTION = '2'
TYPE_ATTR = '3'
TYPE_BUILTIN = '4'
TYPE_PARAM = '5'
def _imp(name, log=None):
try:
return __import__(name)
except:
if '.' in name:
sub = name[0:name.rfind('.')]
if log is not None:
log.add_content('Unable to import', name, 'trying with', sub)
log.add_exception()
return _imp(sub, log)
else:
s = 'Unable to import module: %s - sys.path: %s' % (str(name), sys.path)
if log is not None:
log.add_content(s)
log.add_exception()
raise ImportError(s)
IS_IPY = False
if sys.platform == 'cli':
IS_IPY = True
_old_imp = _imp
def _imp(name, log=None):
# We must add a reference in clr for .Net
import clr # @UnresolvedImport
initial_name = name
while '.' in name:
try:
clr.AddReference(name)
break # If it worked, that's OK.
except:
name = name[0:name.rfind('.')]
else:
try:
clr.AddReference(name)
except:
pass # That's OK (not dot net module).
return _old_imp(initial_name, log)
def get_file(mod):
f = None
try:
f = inspect.getsourcefile(mod) or inspect.getfile(mod)
except:
try:
f = getattr(mod, '__file__', None)
except:
f = None
if f and f.lower(f[-4:]) in ['.pyc', '.pyo']:
filename = f[:-4] + '.py'
if os.path.exists(filename):
f = filename
return f
def Find(name, log=None):
f = None
mod = _imp(name, log)
parent = mod
foundAs = ''
if inspect.ismodule(mod):
f = get_file(mod)
components = name.split('.')
old_comp = None
for comp in components[1:]:
try:
# this happens in the following case:
# we have mx.DateTime.mxDateTime.mxDateTime.pyd
# but after importing it, mx.DateTime.mxDateTime shadows access to mxDateTime.pyd
mod = getattr(mod, comp)
except AttributeError:
if old_comp != comp:
raise
if inspect.ismodule(mod):
f = get_file(mod)
else:
if len(foundAs) > 0:
foundAs = foundAs + '.'
foundAs = foundAs + comp
old_comp = comp
return f, mod, parent, foundAs
def search_definition(data):
'''@return file, line, col
'''
data = data.replace('\n', '')
if data.endswith('.'):
data = data.rstrip('.')
f, mod, parent, foundAs = Find(data)
try:
return do_find(f, mod), foundAs
except:
return do_find(f, parent), foundAs
def generate_tip(data, log=None):
data = data.replace('\n', '')
if data.endswith('.'):
data = data.rstrip('.')
f, mod, parent, foundAs = Find(data, log)
# print_ >> open('temp.txt', 'w'), f
tips = generate_imports_tip_for_module(mod)
return f, tips
def check_char(c):
if c == '-' or c == '.':
return '_'
return c
_SENTINEL = object()
def generate_imports_tip_for_module(obj_to_complete, dir_comps=None, getattr=getattr, filter=lambda name:True):
'''
@param obj_to_complete: the object from where we should get the completions
@param dir_comps: if passed, we should not 'dir' the object and should just iterate those passed as kwonly_arg parameter
@param getattr: the way to get kwonly_arg given object from the obj_to_complete (used for the completer)
@param filter: kwonly_arg callable that receives the name and decides if it should be appended or not to the results
@return: list of tuples, so that each tuple represents kwonly_arg completion with:
name, doc, args, type (from the TYPE_* constants)
'''
ret = []
if dir_comps is None:
dir_comps = dir_checked(obj_to_complete)
if hasattr_checked(obj_to_complete, '__dict__'):
dir_comps.append('__dict__')
if hasattr_checked(obj_to_complete, '__class__'):
dir_comps.append('__class__')
get_complete_info = True
if len(dir_comps) > 1000:
# ok, we don't want to let our users wait forever...
# no complete info for you...
get_complete_info = False
dontGetDocsOn = (float, int, str, tuple, list, dict)
dontGetattrOn = (dict, list, set, tuple)
for d in dir_comps:
if d is None:
continue
if not filter(d):
continue
args = ''
try:
try:
if isinstance(obj_to_complete, dontGetattrOn):
raise Exception('Since python 3.9, e.g. "dict[str]" will return'
" a dict that's only supposed to take strings. "
'Interestingly, e.g. dict["val"] is also valid '
'and presumably represents a dict that only takes '
'keys that are "val". This breaks our check for '
'class attributes.')
obj = getattr(obj_to_complete.__class__, d)
except:
obj = getattr(obj_to_complete, d)
except: # just ignore and get it without additional info
ret.append((d, '', args, TYPE_BUILTIN))
else:
if get_complete_info:
try:
retType = TYPE_BUILTIN
# check if we have to get docs
getDoc = True
for class_ in dontGetDocsOn:
if isinstance(obj, class_):
getDoc = False
break
doc = ''
if getDoc:
# no need to get this info... too many constants are defined and
# makes things much slower (passing all that through sockets takes quite some time)
try:
doc = inspect.getdoc(obj)
if doc is None:
doc = ''
except: # may happen on jython when checking java classes (so, just ignore it)
doc = ''
if inspect.ismethod(obj) or inspect.isbuiltin(obj) or inspect.isfunction(obj) or inspect.isroutine(obj):
try:
args, vargs, kwargs, defaults, kwonly_args, kwonly_defaults = getargspec(obj)
args = args[:]
for kwonly_arg in kwonly_args:
default = kwonly_defaults.get(kwonly_arg, _SENTINEL)
if default is not _SENTINEL:
args.append('%s=%s' % (kwonly_arg, default))
else:
args.append(str(kwonly_arg))
args = '(%s)' % (', '.join(args))
except TypeError:
# ok, let's see if we can get the arguments from the doc
args, doc = signature_from_docstring(doc, getattr(obj, '__name__', None))
retType = TYPE_FUNCTION
elif inspect.isclass(obj):
retType = TYPE_CLASS
elif inspect.ismodule(obj):
retType = TYPE_IMPORT
else:
retType = TYPE_ATTR
# add token and doc to return - assure only strings.
ret.append((d, doc, args, retType))
except: # just ignore and get it without aditional info
ret.append((d, '', args, TYPE_BUILTIN))
else: # get_complete_info == False
if inspect.ismethod(obj) or inspect.isbuiltin(obj) or inspect.isfunction(obj) or inspect.isroutine(obj):
retType = TYPE_FUNCTION
elif inspect.isclass(obj):
retType = TYPE_CLASS
elif inspect.ismodule(obj):
retType = TYPE_IMPORT
else:
retType = TYPE_ATTR
# ok, no complete info, let's try to do this as fast and clean as possible
# so, no docs for this kind of information, only the signatures
ret.append((d, '', str(args), retType))
return ret
def signature_from_docstring(doc, obj_name):
args = '()'
try:
found = False
if len(doc) > 0:
if IS_IPY:
# Handle case where we have the situation below
# sort(self, object cmp, object key)
# sort(self, object cmp, object key, bool reverse)
# sort(self)
# sort(self, object cmp)
# Or: sort(self: list, cmp: object, key: object)
# sort(self: list, cmp: object, key: object, reverse: bool)
# sort(self: list)
# sort(self: list, cmp: object)
if obj_name:
name = obj_name + '('
# Fix issue where it was appearing sort(aa)sort(bb)sort(cc) in the same line.
lines = doc.splitlines()
if len(lines) == 1:
c = doc.count(name)
if c > 1:
doc = ('\n' + name).join(doc.split(name))
major = ''
for line in doc.splitlines():
if line.startswith(name) and line.endswith(')'):
if len(line) > len(major):
major = line
if major:
args = major[major.index('('):]
found = True
if not found:
i = doc.find('->')
if i < 0:
i = doc.find('--')
if i < 0:
i = doc.find('\n')
if i < 0:
i = doc.find('\r')
if i > 0:
s = doc[0:i]
s = s.strip()
# let's see if we have a docstring in the first line
if s[-1] == ')':
start = s.find('(')
if start >= 0:
end = s.find('[')
if end <= 0:
end = s.find(')')
if end <= 0:
end = len(s)
args = s[start:end]
if not args[-1] == ')':
args = args + ')'
# now, get rid of unwanted chars
l = len(args) - 1
r = []
for i in range(len(args)):
if i == 0 or i == l:
r.append(args[i])
else:
r.append(check_char(args[i]))
args = ''.join(r)
if IS_IPY:
if args.startswith('(self:'):
i = args.find(',')
if i >= 0:
args = '(self' + args[i:]
else:
args = '(self)'
i = args.find(')')
if i > 0:
args = args[:i + 1]
except:
pass
return args, doc

View File

@ -0,0 +1,492 @@
import traceback
from io import StringIO
from java.lang import StringBuffer # @UnresolvedImport
from java.lang import String # @UnresolvedImport
import java.lang # @UnresolvedImport
import sys
from _pydev_bundle._pydev_tipper_common import do_find
from org.python.core import PyReflectedFunction # @UnresolvedImport
from org.python import core # @UnresolvedImport
from org.python.core import PyClass # @UnresolvedImport
# completion types.
TYPE_IMPORT = '0'
TYPE_CLASS = '1'
TYPE_FUNCTION = '2'
TYPE_ATTR = '3'
TYPE_BUILTIN = '4'
TYPE_PARAM = '5'
def _imp(name):
try:
return __import__(name)
except:
if '.' in name:
sub = name[0:name.rfind('.')]
return _imp(sub)
else:
s = 'Unable to import module: %s - sys.path: %s' % (str(name), sys.path)
raise RuntimeError(s)
import java.util
_java_rt_file = getattr(java.util, '__file__', None)
def Find(name):
f = None
if name.startswith('__builtin__'):
if name == '__builtin__.str':
name = 'org.python.core.PyString'
elif name == '__builtin__.dict':
name = 'org.python.core.PyDictionary'
mod = _imp(name)
parent = mod
foundAs = ''
try:
f = getattr(mod, '__file__', None)
except:
f = None
components = name.split('.')
old_comp = None
for comp in components[1:]:
try:
# this happens in the following case:
# we have mx.DateTime.mxDateTime.mxDateTime.pyd
# but after importing it, mx.DateTime.mxDateTime does shadows access to mxDateTime.pyd
mod = getattr(mod, comp)
except AttributeError:
if old_comp != comp:
raise
if hasattr(mod, '__file__'):
f = mod.__file__
else:
if len(foundAs) > 0:
foundAs = foundAs + '.'
foundAs = foundAs + comp
old_comp = comp
if f is None and name.startswith('java.lang'):
# Hack: java.lang.__file__ is None on Jython 2.7 (whereas it pointed to rt.jar on Jython 2.5).
f = _java_rt_file
if f is not None:
if f.endswith('.pyc'):
f = f[:-1]
elif f.endswith('$py.class'):
f = f[:-len('$py.class')] + '.py'
return f, mod, parent, foundAs
def format_param_class_name(paramClassName):
if paramClassName.startswith('<type \'') and paramClassName.endswith('\'>'):
paramClassName = paramClassName[len('<type \''):-2]
if paramClassName.startswith('['):
if paramClassName == '[C':
paramClassName = 'char[]'
elif paramClassName == '[B':
paramClassName = 'byte[]'
elif paramClassName == '[I':
paramClassName = 'int[]'
elif paramClassName.startswith('[L') and paramClassName.endswith(';'):
paramClassName = paramClassName[2:-1]
paramClassName += '[]'
return paramClassName
def generate_tip(data, log=None):
data = data.replace('\n', '')
if data.endswith('.'):
data = data.rstrip('.')
f, mod, parent, foundAs = Find(data)
tips = generate_imports_tip_for_module(mod)
return f, tips
#=======================================================================================================================
# Info
#=======================================================================================================================
class Info:
def __init__(self, name, **kwargs):
self.name = name
self.doc = kwargs.get('doc', None)
self.args = kwargs.get('args', ()) # tuple of strings
self.varargs = kwargs.get('varargs', None) # string
self.kwargs = kwargs.get('kwargs', None) # string
self.ret = kwargs.get('ret', None) # string
def basic_as_str(self):
'''@returns this class information as a string (just basic format)
'''
args = self.args
s = 'function:%s args=%s, varargs=%s, kwargs=%s, docs:%s' % \
(self.name, args, self.varargs, self.kwargs, self.doc)
return s
def get_as_doc(self):
s = str(self.name)
if self.doc:
s += '\n@doc %s\n' % str(self.doc)
if self.args:
s += '\n@params '
for arg in self.args:
s += str(format_param_class_name(arg))
s += ' '
if self.varargs:
s += '\n@varargs '
s += str(self.varargs)
if self.kwargs:
s += '\n@kwargs '
s += str(self.kwargs)
if self.ret:
s += '\n@return '
s += str(format_param_class_name(str(self.ret)))
return str(s)
def isclass(cls):
return isinstance(cls, core.PyClass) or type(cls) == java.lang.Class
def ismethod(func):
'''this function should return the information gathered on a function
@param func: this is the function we want to get info on
@return a tuple where:
0 = indicates whether the parameter passed is a method or not
1 = a list of classes 'Info', with the info gathered from the function
this is a list because when we have methods from java with the same name and different signatures,
we actually have many methods, each with its own set of arguments
'''
try:
if isinstance(func, core.PyFunction):
# ok, this is from python, created by jython
# print_ ' PyFunction'
def getargs(func_code):
"""Get information about the arguments accepted by a code object.
Three things are returned: (args, varargs, varkw), where 'args' is
a list of argument names (possibly containing nested lists), and
'varargs' and 'varkw' are the names of the * and ** arguments or None."""
nargs = func_code.co_argcount
names = func_code.co_varnames
args = list(names[:nargs])
step = 0
if not hasattr(func_code, 'CO_VARARGS'):
from org.python.core import CodeFlag # @UnresolvedImport
co_varargs_flag = CodeFlag.CO_VARARGS.flag
co_varkeywords_flag = CodeFlag.CO_VARKEYWORDS.flag
else:
co_varargs_flag = func_code.CO_VARARGS
co_varkeywords_flag = func_code.CO_VARKEYWORDS
varargs = None
if func_code.co_flags & co_varargs_flag:
varargs = func_code.co_varnames[nargs]
nargs = nargs + 1
varkw = None
if func_code.co_flags & co_varkeywords_flag:
varkw = func_code.co_varnames[nargs]
return args, varargs, varkw
args = getargs(func.func_code)
return 1, [Info(func.func_name, args=args[0], varargs=args[1], kwargs=args[2], doc=func.func_doc)]
if isinstance(func, core.PyMethod):
# this is something from java itself, and jython just wrapped it...
# things to play in func:
# ['__call__', '__class__', '__cmp__', '__delattr__', '__dir__', '__doc__', '__findattr__', '__name__', '_doget', 'im_class',
# 'im_func', 'im_self', 'toString']
# print_ ' PyMethod'
# that's the PyReflectedFunction... keep going to get it
func = func.im_func
if isinstance(func, PyReflectedFunction):
# this is something from java itself, and jython just wrapped it...
# print_ ' PyReflectedFunction'
infos = []
for i in range(len(func.argslist)):
# things to play in func.argslist[i]:
# 'PyArgsCall', 'PyArgsKeywordsCall', 'REPLACE', 'StandardCall', 'args', 'compare', 'compareTo', 'data', 'declaringClass'
# 'flags', 'isStatic', 'matches', 'precedence']
# print_ ' ', func.argslist[i].data.__class__
# func.argslist[i].data.__class__ == java.lang.reflect.Method
if func.argslist[i]:
met = func.argslist[i].data
name = met.getName()
try:
ret = met.getReturnType()
except AttributeError:
ret = ''
parameterTypes = met.getParameterTypes()
args = []
for j in range(len(parameterTypes)):
paramTypesClass = parameterTypes[j]
try:
try:
paramClassName = paramTypesClass.getName()
except:
paramClassName = paramTypesClass.getName(paramTypesClass)
except AttributeError:
try:
paramClassName = repr(paramTypesClass) # should be something like <type 'object'>
paramClassName = paramClassName.split('\'')[1]
except:
paramClassName = repr(paramTypesClass) # just in case something else happens... it will at least be visible
# if the parameter equals [C, it means it it a char array, so, let's change it
a = format_param_class_name(paramClassName)
# a = a.replace('[]','Array')
# a = a.replace('Object', 'obj')
# a = a.replace('String', 's')
# a = a.replace('Integer', 'i')
# a = a.replace('Char', 'c')
# a = a.replace('Double', 'd')
args.append(a) # so we don't leave invalid code
info = Info(name, args=args, ret=ret)
# print_ info.basic_as_str()
infos.append(info)
return 1, infos
except Exception:
s = StringIO()
traceback.print_exc(file=s)
return 1, [Info(str('ERROR'), doc=s.getvalue())]
return 0, None
def ismodule(mod):
# java modules... do we have other way to know that?
if not hasattr(mod, 'getClass') and not hasattr(mod, '__class__') \
and hasattr(mod, '__name__'):
return 1
return isinstance(mod, core.PyModule)
def dir_obj(obj):
ret = []
found = java.util.HashMap()
original = obj
if hasattr(obj, '__class__'):
if obj.__class__ == java.lang.Class:
# get info about superclasses
classes = []
classes.append(obj)
try:
c = obj.getSuperclass()
except TypeError:
# may happen on jython when getting the java.lang.Class class
c = obj.getSuperclass(obj)
while c != None:
classes.append(c)
c = c.getSuperclass()
# get info about interfaces
interfs = []
for obj in classes:
try:
interfs.extend(obj.getInterfaces())
except TypeError:
interfs.extend(obj.getInterfaces(obj))
classes.extend(interfs)
# now is the time when we actually get info on the declared methods and fields
for obj in classes:
try:
declaredMethods = obj.getDeclaredMethods()
except TypeError:
declaredMethods = obj.getDeclaredMethods(obj)
try:
declaredFields = obj.getDeclaredFields()
except TypeError:
declaredFields = obj.getDeclaredFields(obj)
for i in range(len(declaredMethods)):
name = declaredMethods[i].getName()
ret.append(name)
found.put(name, 1)
for i in range(len(declaredFields)):
name = declaredFields[i].getName()
ret.append(name)
found.put(name, 1)
elif isclass(obj.__class__):
d = dir(obj.__class__)
for name in d:
ret.append(name)
found.put(name, 1)
# this simple dir does not always get all the info, that's why we have the part before
# (e.g.: if we do a dir on String, some methods that are from other interfaces such as
# charAt don't appear)
d = dir(original)
for name in d:
if found.get(name) != 1:
ret.append(name)
return ret
def format_arg(arg):
'''formats an argument to be shown
'''
s = str(arg)
dot = s.rfind('.')
if dot >= 0:
s = s[dot + 1:]
s = s.replace(';', '')
s = s.replace('[]', 'Array')
if len(s) > 0:
c = s[0].lower()
s = c + s[1:]
return s
def search_definition(data):
'''@return file, line, col
'''
data = data.replace('\n', '')
if data.endswith('.'):
data = data.rstrip('.')
f, mod, parent, foundAs = Find(data)
try:
return do_find(f, mod), foundAs
except:
return do_find(f, parent), foundAs
def generate_imports_tip_for_module(obj_to_complete, dir_comps=None, getattr=getattr, filter=lambda name:True):
'''
@param obj_to_complete: the object from where we should get the completions
@param dir_comps: if passed, we should not 'dir' the object and should just iterate those passed as a parameter
@param getattr: the way to get a given object from the obj_to_complete (used for the completer)
@param filter: a callable that receives the name and decides if it should be appended or not to the results
@return: list of tuples, so that each tuple represents a completion with:
name, doc, args, type (from the TYPE_* constants)
'''
ret = []
if dir_comps is None:
dir_comps = dir_obj(obj_to_complete)
for d in dir_comps:
if d is None:
continue
if not filter(d):
continue
args = ''
doc = ''
retType = TYPE_BUILTIN
try:
obj = getattr(obj_to_complete, d)
except (AttributeError, java.lang.NoClassDefFoundError):
# jython has a bug in its custom classloader that prevents some things from working correctly, so, let's see if
# we can fix that... (maybe fixing it in jython itself would be a better idea, as this is clearly a bug)
# for that we need a custom classloader... we have references from it in the below places:
#
# http://mindprod.com/jgloss/classloader.html
# http://www.javaworld.com/javaworld/jw-03-2000/jw-03-classload-p2.html
# http://freshmeat.net/articles/view/1643/
#
# note: this only happens when we add things to the sys.path at runtime, if they are added to the classpath
# before the run, everything goes fine.
#
# The code below ilustrates what I mean...
#
# import sys
# sys.path.insert(1, r"C:\bin\eclipse310\plugins\org.junit_3.8.1\junit.jar" )
#
# import junit.framework
# print_ dir(junit.framework) #shows the TestCase class here
#
# import junit.framework.TestCase
#
# raises the error:
# Traceback (innermost last):
# File "<console>", line 1, in ?
# ImportError: No module named TestCase
#
# whereas if we had added the jar to the classpath before, everything would be fine by now...
ret.append((d, '', '', retType))
# that's ok, private things cannot be gotten...
continue
else:
isMet = ismethod(obj)
if isMet[0] and isMet[1]:
info = isMet[1][0]
try:
args, vargs, kwargs = info.args, info.varargs, info.kwargs
doc = info.get_as_doc()
r = ''
for a in (args):
if len(r) > 0:
r += ', '
r += format_arg(a)
args = '(%s)' % (r)
except TypeError:
traceback.print_exc()
args = '()'
retType = TYPE_FUNCTION
elif isclass(obj):
retType = TYPE_CLASS
elif ismodule(obj):
retType = TYPE_IMPORT
# add token and doc to return - assure only strings.
ret.append((d, doc, args, retType))
return ret
if __name__ == "__main__":
sys.path.append(r'D:\dev_programs\eclipse_3\310\eclipse\plugins\org.junit_3.8.1\junit.jar')
sys.stdout.write('%s\n' % Find('junit.framework.TestCase'))

View File

@ -0,0 +1,24 @@
import traceback
import sys
from io import StringIO
class Log:
def __init__(self):
self._contents = []
def add_content(self, *content):
self._contents.append(' '.join(content))
def add_exception(self):
s = StringIO()
exc_info = sys.exc_info()
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], limit=None, file=s)
self._contents.append(s.getvalue())
def get_contents(self):
return '\n'.join(self._contents)
def clear_log(self):
del self._contents[:]

View File

@ -0,0 +1,110 @@
import sys
import os
def find_in_pythonpath(module_name):
# Check all the occurrences where we could match the given module/package in the PYTHONPATH.
#
# This is a simplistic approach, but probably covers most of the cases we're interested in
# (i.e.: this may fail in more elaborate cases of import customization or .zip imports, but
# this should be rare in general).
found_at = []
parts = module_name.split('.') # split because we need to convert mod.name to mod/name
for path in sys.path:
target = os.path.join(path, *parts)
target_py = target + '.py'
if os.path.isdir(target):
found_at.append(target)
if os.path.exists(target_py):
found_at.append(target_py)
return found_at
class DebuggerInitializationError(Exception):
pass
class VerifyShadowedImport(object):
def __init__(self, import_name):
self.import_name = import_name
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is not None:
if exc_type == DebuggerInitializationError:
return False # It's already an error we generated.
# We couldn't even import it...
found_at = find_in_pythonpath(self.import_name)
if len(found_at) <= 1:
# It wasn't found anywhere or there was just 1 occurrence.
# Let's just return to show the original error.
return False
# We found more than 1 occurrence of the same module in the PYTHONPATH
# (the user module and the standard library module).
# Let's notify the user as it seems that the module was shadowed.
msg = self._generate_shadowed_import_message(found_at)
raise DebuggerInitializationError(msg)
def _generate_shadowed_import_message(self, found_at):
msg = '''It was not possible to initialize the debugger due to a module name conflict.
i.e.: the module "%(import_name)s" could not be imported because it is shadowed by:
%(found_at)s
Please rename this file/folder so that the original module from the standard library can be imported.''' % {
'import_name': self.import_name, 'found_at': found_at[0]}
return msg
def check(self, module, expected_attributes):
msg = ''
for expected_attribute in expected_attributes:
try:
getattr(module, expected_attribute)
except:
msg = self._generate_shadowed_import_message([module.__file__])
break
if msg:
raise DebuggerInitializationError(msg)
with VerifyShadowedImport('threading') as verify_shadowed:
import threading; verify_shadowed.check(threading, ['Thread', 'settrace', 'setprofile', 'Lock', 'RLock', 'current_thread'])
with VerifyShadowedImport('time') as verify_shadowed:
import time; verify_shadowed.check(time, ['sleep', 'time', 'mktime'])
with VerifyShadowedImport('socket') as verify_shadowed:
import socket; verify_shadowed.check(socket, ['socket', 'gethostname', 'getaddrinfo'])
with VerifyShadowedImport('select') as verify_shadowed:
import select; verify_shadowed.check(select, ['select'])
with VerifyShadowedImport('code') as verify_shadowed:
import code as _code; verify_shadowed.check(_code, ['compile_command', 'InteractiveInterpreter'])
with VerifyShadowedImport('_thread') as verify_shadowed:
import _thread as thread; verify_shadowed.check(thread, ['start_new_thread', 'start_new', 'allocate_lock'])
with VerifyShadowedImport('queue') as verify_shadowed:
import queue as _queue; verify_shadowed.check(_queue, ['Queue', 'LifoQueue', 'Empty', 'Full', 'deque'])
with VerifyShadowedImport('xmlrpclib') as verify_shadowed:
import xmlrpc.client as xmlrpclib; verify_shadowed.check(xmlrpclib, ['ServerProxy', 'Marshaller', 'Server'])
with VerifyShadowedImport('xmlrpc.server') as verify_shadowed:
import xmlrpc.server as xmlrpcserver; verify_shadowed.check(xmlrpcserver, ['SimpleXMLRPCServer'])
with VerifyShadowedImport('http.server') as verify_shadowed:
import http.server as BaseHTTPServer; verify_shadowed.check(BaseHTTPServer, ['BaseHTTPRequestHandler'])
# If set, this is a version of the threading.enumerate that doesn't have the patching to remove the pydevd threads.
# Note: as it can't be set during execution, don't import the name (import the module and access it through its name).
pydevd_saved_threading_enumerate = None

View File

@ -0,0 +1,73 @@
import sys
def patch_sys_module():
def patched_exc_info(fun):
def pydev_debugger_exc_info():
type, value, traceback = fun()
if type == ImportError:
# we should not show frame added by plugin_import call
if traceback and hasattr(traceback, "tb_next"):
return type, value, traceback.tb_next
return type, value, traceback
return pydev_debugger_exc_info
system_exc_info = sys.exc_info
sys.exc_info = patched_exc_info(system_exc_info)
if not hasattr(sys, "system_exc_info"):
sys.system_exc_info = system_exc_info
def patched_reload(orig_reload):
def pydev_debugger_reload(module):
orig_reload(module)
if module.__name__ == "sys":
# if sys module was reloaded we should patch it again
patch_sys_module()
return pydev_debugger_reload
def patch_reload():
import builtins # Py3
if hasattr(builtins, "reload"):
sys.builtin_orig_reload = builtins.reload
builtins.reload = patched_reload(sys.builtin_orig_reload) # @UndefinedVariable
try:
import imp
sys.imp_orig_reload = imp.reload
imp.reload = patched_reload(sys.imp_orig_reload) # @UndefinedVariable
except:
pass
else:
try:
import importlib
sys.importlib_orig_reload = importlib.reload # @UndefinedVariable
importlib.reload = patched_reload(sys.importlib_orig_reload) # @UndefinedVariable
except:
pass
del builtins
def cancel_patches_in_sys_module():
sys.exc_info = sys.system_exc_info # @UndefinedVariable
import builtins # Py3
if hasattr(sys, "builtin_orig_reload"):
builtins.reload = sys.builtin_orig_reload
if hasattr(sys, "imp_orig_reload"):
import imp
imp.reload = sys.imp_orig_reload
if hasattr(sys, "importlib_orig_reload"):
import importlib
importlib.reload = sys.importlib_orig_reload
del builtins

View File

@ -0,0 +1,52 @@
import inspect
import re
def do_find(f, mod):
import linecache
if inspect.ismodule(mod):
return f, 0, 0
lines = linecache.getlines(f)
if inspect.isclass(mod):
name = mod.__name__
pat = re.compile(r'^\s*class\s*' + name + r'\b')
for i in range(len(lines)):
if pat.match(lines[i]):
return f, i, 0
return f, 0, 0
if inspect.ismethod(mod):
mod = mod.im_func
if inspect.isfunction(mod):
try:
mod = mod.func_code
except AttributeError:
mod = mod.__code__ # python 3k
if inspect.istraceback(mod):
mod = mod.tb_frame
if inspect.isframe(mod):
mod = mod.f_code
if inspect.iscode(mod):
if not hasattr(mod, 'co_filename'):
return None, 0, 0
if not hasattr(mod, 'co_firstlineno'):
return mod.co_filename, 0, 0
lnum = mod.co_firstlineno
pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)')
while lnum > 0:
if pat.match(lines[lnum]):
break
lnum -= 1
return f, lnum, 0
raise RuntimeError('Do not know about: ' + f + ' ' + str(mod))

View File

@ -0,0 +1,353 @@
'''
Sample usage to track changes in a thread.
import threading
import time
watcher = fsnotify.Watcher()
watcher.accepted_file_extensions = {'.py', '.pyw'}
# Configure target values to compute throttling.
# Note: internal sleep times will be updated based on
# profiling the actual application runtime to match
# those values.
watcher.target_time_for_single_scan = 2.
watcher.target_time_for_notification = 4.
watcher.set_tracked_paths([target_dir])
def start_watching(): # Called from thread
for change_enum, change_path in watcher.iter_changes():
if change_enum == fsnotify.Change.added:
print('Added: ', change_path)
elif change_enum == fsnotify.Change.modified:
print('Modified: ', change_path)
elif change_enum == fsnotify.Change.deleted:
print('Deleted: ', change_path)
t = threading.Thread(target=start_watching)
t.daemon = True
t.start()
try:
...
finally:
watcher.dispose()
Note: changes are only reported for files (added/modified/deleted), not directories.
'''
import threading
import sys
from os.path import basename
from _pydev_bundle import pydev_log
from os import scandir
try:
from enum import IntEnum
except:
class IntEnum(object):
pass
import time
__author__ = 'Fabio Zadrozny'
__email__ = 'fabiofz@gmail.com'
__version__ = '0.1.5' # Version here and in setup.py
class Change(IntEnum):
added = 1
modified = 2
deleted = 3
class _SingleVisitInfo(object):
def __init__(self):
self.count = 0
self.visited_dirs = set()
self.file_to_mtime = {}
self.last_sleep_time = time.time()
class _PathWatcher(object):
'''
Helper to watch a single path.
'''
def __init__(self, root_path, accept_directory, accept_file, single_visit_info, max_recursion_level, sleep_time=.0):
'''
:type root_path: str
:type accept_directory: Callback[str, bool]
:type accept_file: Callback[str, bool]
:type max_recursion_level: int
:type sleep_time: float
'''
self.accept_directory = accept_directory
self.accept_file = accept_file
self._max_recursion_level = max_recursion_level
self._root_path = root_path
# Initial sleep value for throttling, it'll be auto-updated based on the
# Watcher.target_time_for_single_scan.
self.sleep_time = sleep_time
self.sleep_at_elapsed = 1. / 30.
# When created, do the initial snapshot right away!
old_file_to_mtime = {}
self._check(single_visit_info, lambda _change: None, old_file_to_mtime)
def __eq__(self, o):
if isinstance(o, _PathWatcher):
return self._root_path == o._root_path
return False
def __ne__(self, o):
return not self == o
def __hash__(self):
return hash(self._root_path)
def _check_dir(self, dir_path, single_visit_info, append_change, old_file_to_mtime, level):
# This is the actual poll loop
if dir_path in single_visit_info.visited_dirs or level > self._max_recursion_level:
return
single_visit_info.visited_dirs.add(dir_path)
try:
if isinstance(dir_path, bytes):
try:
dir_path = dir_path.decode(sys.getfilesystemencoding())
except UnicodeDecodeError:
try:
dir_path = dir_path.decode('utf-8')
except UnicodeDecodeError:
return # Ignore if we can't deal with the path.
new_files = single_visit_info.file_to_mtime
for entry in scandir(dir_path):
single_visit_info.count += 1
# Throttle if needed inside the loop
# to avoid consuming too much CPU.
if single_visit_info.count % 300 == 0:
if self.sleep_time > 0:
t = time.time()
diff = t - single_visit_info.last_sleep_time
if diff > self.sleep_at_elapsed:
time.sleep(self.sleep_time)
single_visit_info.last_sleep_time = time.time()
if entry.is_dir():
if self.accept_directory(entry.path):
self._check_dir(entry.path, single_visit_info, append_change, old_file_to_mtime, level + 1)
elif self.accept_file(entry.path):
stat = entry.stat()
mtime = (stat.st_mtime_ns, stat.st_size)
path = entry.path
new_files[path] = mtime
old_mtime = old_file_to_mtime.pop(path, None)
if not old_mtime:
append_change((Change.added, path))
elif old_mtime != mtime:
append_change((Change.modified, path))
except OSError:
pass # Directory was removed in the meanwhile.
def _check(self, single_visit_info, append_change, old_file_to_mtime):
self._check_dir(self._root_path, single_visit_info, append_change, old_file_to_mtime, 0)
class Watcher(object):
# By default (if accept_directory is not specified), these will be the
# ignored directories.
ignored_dirs = {u'.git', u'__pycache__', u'.idea', u'node_modules', u'.metadata'}
# By default (if accept_file is not specified), these will be the
# accepted files.
accepted_file_extensions = ()
# Set to the target value for doing full scan of all files (adds a sleep inside the poll loop
# which processes files to reach the target time).
# Lower values will consume more CPU
# Set to 0.0 to have no sleeps (which will result in a higher cpu load).
target_time_for_single_scan = 2.0
# Set the target value from the start of one scan to the start of another scan (adds a
# sleep after a full poll is done to reach the target time).
# Lower values will consume more CPU.
# Set to 0.0 to have a new scan start right away without any sleeps.
target_time_for_notification = 4.0
# Set to True to print the time for a single poll through all the paths.
print_poll_time = False
# This is the maximum recursion level.
max_recursion_level = 10
def __init__(self, accept_directory=None, accept_file=None):
'''
:param Callable[str, bool] accept_directory:
Callable that returns whether a directory should be watched.
Note: if passed it'll override the `ignored_dirs`
:param Callable[str, bool] accept_file:
Callable that returns whether a file should be watched.
Note: if passed it'll override the `accepted_file_extensions`.
'''
self._path_watchers = set()
self._disposed = threading.Event()
if accept_directory is None:
accept_directory = lambda dir_path: basename(dir_path) not in self.ignored_dirs
if accept_file is None:
accept_file = lambda path_name: \
not self.accepted_file_extensions or path_name.endswith(self.accepted_file_extensions)
self.accept_file = accept_file
self.accept_directory = accept_directory
self._single_visit_info = _SingleVisitInfo()
@property
def accept_directory(self):
return self._accept_directory
@accept_directory.setter
def accept_directory(self, accept_directory):
self._accept_directory = accept_directory
for path_watcher in self._path_watchers:
path_watcher.accept_directory = accept_directory
@property
def accept_file(self):
return self._accept_file
@accept_file.setter
def accept_file(self, accept_file):
self._accept_file = accept_file
for path_watcher in self._path_watchers:
path_watcher.accept_file = accept_file
def dispose(self):
self._disposed.set()
@property
def path_watchers(self):
return tuple(self._path_watchers)
def set_tracked_paths(self, paths):
"""
Note: always resets all path trackers to track the passed paths.
"""
if not isinstance(paths, (list, tuple, set)):
paths = (paths,)
# Sort by the path len so that the bigger paths come first (so,
# if there's any nesting we want the nested paths to be visited
# before the parent paths so that the max_recursion_level is correct).
paths = sorted(set(paths), key=lambda path:-len(path))
path_watchers = set()
self._single_visit_info = _SingleVisitInfo()
initial_time = time.time()
for path in paths:
sleep_time = 0. # When collecting the first time, sleep_time should be 0!
path_watcher = _PathWatcher(
path,
self.accept_directory,
self.accept_file,
self._single_visit_info,
max_recursion_level=self.max_recursion_level,
sleep_time=sleep_time,
)
path_watchers.add(path_watcher)
actual_time = (time.time() - initial_time)
pydev_log.debug('Tracking the following paths for changes: %s', paths)
pydev_log.debug('Time to track: %.2fs', actual_time)
pydev_log.debug('Folders found: %s', len(self._single_visit_info.visited_dirs))
pydev_log.debug('Files found: %s', len(self._single_visit_info.file_to_mtime))
self._path_watchers = path_watchers
def iter_changes(self):
'''
Continuously provides changes (until dispose() is called).
Changes provided are tuples with the Change enum and filesystem path.
:rtype: Iterable[Tuple[Change, str]]
'''
while not self._disposed.is_set():
initial_time = time.time()
old_visit_info = self._single_visit_info
old_file_to_mtime = old_visit_info.file_to_mtime
changes = []
append_change = changes.append
self._single_visit_info = single_visit_info = _SingleVisitInfo()
for path_watcher in self._path_watchers:
path_watcher._check(single_visit_info, append_change, old_file_to_mtime)
# Note that we pop entries while visiting, so, what remained is what's deleted.
for entry in old_file_to_mtime:
append_change((Change.deleted, entry))
for change in changes:
yield change
actual_time = (time.time() - initial_time)
if self.print_poll_time:
print('--- Total poll time: %.3fs' % actual_time)
if actual_time > 0:
if self.target_time_for_single_scan <= 0.0:
for path_watcher in self._path_watchers:
path_watcher.sleep_time = 0.0
else:
perc = self.target_time_for_single_scan / actual_time
# Prevent from changing the values too much (go slowly into the right
# direction).
# (to prevent from cases where the user puts the machine on sleep and
# values become too skewed).
if perc > 2.:
perc = 2.
elif perc < 0.5:
perc = 0.5
for path_watcher in self._path_watchers:
if path_watcher.sleep_time <= 0.0:
path_watcher.sleep_time = 0.001
new_sleep_time = path_watcher.sleep_time * perc
# Prevent from changing the values too much (go slowly into the right
# direction).
# (to prevent from cases where the user puts the machine on sleep and
# values become too skewed).
diff_sleep_time = new_sleep_time - path_watcher.sleep_time
path_watcher.sleep_time += (diff_sleep_time / (3.0 * len(self._path_watchers)))
if actual_time > 0:
self._disposed.wait(actual_time)
if path_watcher.sleep_time < 0.001:
path_watcher.sleep_time = 0.001
# print('new sleep time: %s' % path_watcher.sleep_time)
diff = self.target_time_for_notification - actual_time
if diff > 0.:
self._disposed.wait(diff)

View File

@ -0,0 +1,639 @@
import os
import sys
import traceback
from _pydev_bundle.pydev_imports import xmlrpclib, _queue, Exec
from _pydev_bundle._pydev_calltip_util import get_description
from _pydevd_bundle import pydevd_vars
from _pydevd_bundle import pydevd_xml
from _pydevd_bundle.pydevd_constants import (IS_JYTHON, NEXT_VALUE_SEPARATOR, get_global_debugger,
silence_warnings_decorator)
from contextlib import contextmanager
from _pydev_bundle import pydev_log
from _pydevd_bundle.pydevd_utils import interrupt_main_thread
from io import StringIO
# =======================================================================================================================
# BaseStdIn
# =======================================================================================================================
class BaseStdIn:
def __init__(self, original_stdin=sys.stdin, *args, **kwargs):
try:
self.encoding = sys.stdin.encoding
except:
# Not sure if it's available in all Python versions...
pass
self.original_stdin = original_stdin
try:
self.errors = sys.stdin.errors # Who knew? sys streams have an errors attribute!
except:
# Not sure if it's available in all Python versions...
pass
def readline(self, *args, **kwargs):
# sys.stderr.write('Cannot readline out of the console evaluation\n') -- don't show anything
# This could happen if the user had done input('enter number).<-- upon entering this, that message would appear,
# which is not something we want.
return '\n'
def write(self, *args, **kwargs):
pass # not available StdIn (but it can be expected to be in the stream interface)
def flush(self, *args, **kwargs):
pass # not available StdIn (but it can be expected to be in the stream interface)
def read(self, *args, **kwargs):
# in the interactive interpreter, a read and a readline are the same.
return self.readline()
def close(self, *args, **kwargs):
pass # expected in StdIn
def __iter__(self):
# BaseStdIn would not be considered as Iterable in Python 3 without explicit `__iter__` implementation
return self.original_stdin.__iter__()
def __getattr__(self, item):
# it's called if the attribute wasn't found
if hasattr(self.original_stdin, item):
return getattr(self.original_stdin, item)
raise AttributeError("%s has no attribute %s" % (self.original_stdin, item))
# =======================================================================================================================
# StdIn
# =======================================================================================================================
class StdIn(BaseStdIn):
'''
Object to be added to stdin (to emulate it as non-blocking while the next line arrives)
'''
def __init__(self, interpreter, host, client_port, original_stdin=sys.stdin):
BaseStdIn.__init__(self, original_stdin)
self.interpreter = interpreter
self.client_port = client_port
self.host = host
def readline(self, *args, **kwargs):
# Ok, callback into the client to get the new input
try:
server = xmlrpclib.Server('http://%s:%s' % (self.host, self.client_port))
requested_input = server.RequestInput()
if not requested_input:
return '\n' # Yes, a readline must return something (otherwise we can get an EOFError on the input() call).
else:
# readline should end with '\n' (not doing so makes IPython 5 remove the last *valid* character).
requested_input += '\n'
return requested_input
except KeyboardInterrupt:
raise # Let KeyboardInterrupt go through -- #PyDev-816: Interrupting infinite loop in the Interactive Console
except:
return '\n'
def close(self, *args, **kwargs):
pass # expected in StdIn
#=======================================================================================================================
# DebugConsoleStdIn
#=======================================================================================================================
class DebugConsoleStdIn(BaseStdIn):
'''
Object to be added to stdin (to emulate it as non-blocking while the next line arrives)
'''
def __init__(self, py_db, original_stdin):
'''
:param py_db:
If None, get_global_debugger() is used.
'''
BaseStdIn.__init__(self, original_stdin)
self._py_db = py_db
self._in_notification = 0
def __send_input_requested_message(self, is_started):
try:
py_db = self._py_db
if py_db is None:
py_db = get_global_debugger()
if py_db is None:
return
cmd = py_db.cmd_factory.make_input_requested_message(is_started)
py_db.writer.add_command(cmd)
except Exception:
pydev_log.exception()
@contextmanager
def notify_input_requested(self):
self._in_notification += 1
if self._in_notification == 1:
self.__send_input_requested_message(True)
try:
yield
finally:
self._in_notification -= 1
if self._in_notification == 0:
self.__send_input_requested_message(False)
def readline(self, *args, **kwargs):
with self.notify_input_requested():
return self.original_stdin.readline(*args, **kwargs)
def read(self, *args, **kwargs):
with self.notify_input_requested():
return self.original_stdin.read(*args, **kwargs)
class CodeFragment:
def __init__(self, text, is_single_line=True):
self.text = text
self.is_single_line = is_single_line
def append(self, code_fragment):
self.text = self.text + "\n" + code_fragment.text
if not code_fragment.is_single_line:
self.is_single_line = False
# =======================================================================================================================
# BaseInterpreterInterface
# =======================================================================================================================
class BaseInterpreterInterface:
def __init__(self, mainThread, connect_status_queue=None):
self.mainThread = mainThread
self.interruptable = False
self.exec_queue = _queue.Queue(0)
self.buffer = None
self.banner_shown = False
self.connect_status_queue = connect_status_queue
self.mpl_modules_for_patching = {}
self.init_mpl_modules_for_patching()
def build_banner(self):
return 'print({0})\n'.format(repr(self.get_greeting_msg()))
def get_greeting_msg(self):
return 'PyDev console: starting.\n'
def init_mpl_modules_for_patching(self):
from pydev_ipython.matplotlibtools import activate_matplotlib, activate_pylab, activate_pyplot
self.mpl_modules_for_patching = {
"matplotlib": lambda: activate_matplotlib(self.enableGui),
"matplotlib.pyplot": activate_pyplot,
"pylab": activate_pylab
}
def need_more_for_code(self, source):
# PyDev-502: PyDev 3.9 F2 doesn't support backslash continuations
# Strangely even the IPython console is_complete said it was complete
# even with a continuation char at the end.
if source.endswith('\\'):
return True
if hasattr(self.interpreter, 'is_complete'):
return not self.interpreter.is_complete(source)
try:
# At this point, it should always be single.
# If we don't do this, things as:
#
# for i in range(10): print(i)
#
# (in a single line) don't work.
# Note that it won't give an error and code will be None (so, it'll
# use execMultipleLines in the next call in this case).
symbol = 'single'
code = self.interpreter.compile(source, '<input>', symbol)
except (OverflowError, SyntaxError, ValueError):
# Case 1
return False
if code is None:
# Case 2
return True
# Case 3
return False
def need_more(self, code_fragment):
if self.buffer is None:
self.buffer = code_fragment
else:
self.buffer.append(code_fragment)
return self.need_more_for_code(self.buffer.text)
def create_std_in(self, debugger=None, original_std_in=None):
if debugger is None:
return StdIn(self, self.host, self.client_port, original_stdin=original_std_in)
else:
return DebugConsoleStdIn(py_db=debugger, original_stdin=original_std_in)
def add_exec(self, code_fragment, debugger=None):
# In case sys.excepthook called, use original excepthook #PyDev-877: Debug console freezes with Python 3.5+
# (showtraceback does it on python 3.5 onwards)
sys.excepthook = sys.__excepthook__
try:
original_in = sys.stdin
try:
help = None
if 'pydoc' in sys.modules:
pydoc = sys.modules['pydoc'] # Don't import it if it still is not there.
if hasattr(pydoc, 'help'):
# You never know how will the API be changed, so, let's code defensively here
help = pydoc.help
if not hasattr(help, 'input'):
help = None
except:
# Just ignore any error here
pass
more = False
try:
sys.stdin = self.create_std_in(debugger, original_in)
try:
if help is not None:
# This will enable the help() function to work.
try:
try:
help.input = sys.stdin
except AttributeError:
help._input = sys.stdin
except:
help = None
if not self._input_error_printed:
self._input_error_printed = True
sys.stderr.write('\nError when trying to update pydoc.help.input\n')
sys.stderr.write('(help() may not work -- please report this as a bug in the pydev bugtracker).\n\n')
traceback.print_exc()
try:
self.start_exec()
if hasattr(self, 'debugger'):
self.debugger.enable_tracing()
more = self.do_add_exec(code_fragment)
if hasattr(self, 'debugger'):
self.debugger.disable_tracing()
self.finish_exec(more)
finally:
if help is not None:
try:
try:
help.input = original_in
except AttributeError:
help._input = original_in
except:
pass
finally:
sys.stdin = original_in
except SystemExit:
raise
except:
traceback.print_exc()
finally:
sys.__excepthook__ = sys.excepthook
return more
def do_add_exec(self, codeFragment):
'''
Subclasses should override.
@return: more (True if more input is needed to complete the statement and False if the statement is complete).
'''
raise NotImplementedError()
def get_namespace(self):
'''
Subclasses should override.
@return: dict with namespace.
'''
raise NotImplementedError()
def __resolve_reference__(self, text):
"""
:type text: str
"""
obj = None
if '.' not in text:
try:
obj = self.get_namespace()[text]
except KeyError:
pass
if obj is None:
try:
obj = self.get_namespace()['__builtins__'][text]
except:
pass
if obj is None:
try:
obj = getattr(self.get_namespace()['__builtins__'], text, None)
except:
pass
else:
try:
last_dot = text.rindex('.')
parent_context = text[0:last_dot]
res = pydevd_vars.eval_in_context(parent_context, self.get_namespace(), self.get_namespace())
obj = getattr(res, text[last_dot + 1:])
except:
pass
return obj
def getDescription(self, text):
try:
obj = self.__resolve_reference__(text)
if obj is None:
return ''
return get_description(obj)
except:
return ''
def do_exec_code(self, code, is_single_line):
try:
code_fragment = CodeFragment(code, is_single_line)
more = self.need_more(code_fragment)
if not more:
code_fragment = self.buffer
self.buffer = None
self.exec_queue.put(code_fragment)
return more
except:
traceback.print_exc()
return False
def execLine(self, line):
return self.do_exec_code(line, True)
def execMultipleLines(self, lines):
if IS_JYTHON:
more = False
for line in lines.split('\n'):
more = self.do_exec_code(line, True)
return more
else:
return self.do_exec_code(lines, False)
def interrupt(self):
self.buffer = None # Also clear the buffer when it's interrupted.
try:
if self.interruptable:
# Fix for #PyDev-500: Console interrupt can't interrupt on sleep
interrupt_main_thread(self.mainThread)
self.finish_exec(False)
return True
except:
traceback.print_exc()
return False
def close(self):
sys.exit(0)
def start_exec(self):
self.interruptable = True
def get_server(self):
if getattr(self, 'host', None) is not None:
return xmlrpclib.Server('http://%s:%s' % (self.host, self.client_port))
else:
return None
server = property(get_server)
def ShowConsole(self):
server = self.get_server()
if server is not None:
server.ShowConsole()
def finish_exec(self, more):
self.interruptable = False
server = self.get_server()
if server is not None:
return server.NotifyFinished(more)
else:
return True
def getFrame(self):
xml = StringIO()
hidden_ns = self.get_ipython_hidden_vars_dict()
xml.write("<xml>")
xml.write(pydevd_xml.frame_vars_to_xml(self.get_namespace(), hidden_ns))
xml.write("</xml>")
return xml.getvalue()
@silence_warnings_decorator
def getVariable(self, attributes):
xml = StringIO()
xml.write("<xml>")
val_dict = pydevd_vars.resolve_compound_var_object_fields(self.get_namespace(), attributes)
if val_dict is None:
val_dict = {}
for k, val in val_dict.items():
val = val_dict[k]
evaluate_full_value = pydevd_xml.should_evaluate_full_value(val)
xml.write(pydevd_vars.var_to_xml(val, k, evaluate_full_value=evaluate_full_value))
xml.write("</xml>")
return xml.getvalue()
def getArray(self, attr, roffset, coffset, rows, cols, format):
name = attr.split("\t")[-1]
array = pydevd_vars.eval_in_context(name, self.get_namespace(), self.get_namespace())
return pydevd_vars.table_like_struct_to_xml(array, name, roffset, coffset, rows, cols, format)
def evaluate(self, expression):
xml = StringIO()
xml.write("<xml>")
result = pydevd_vars.eval_in_context(expression, self.get_namespace(), self.get_namespace())
xml.write(pydevd_vars.var_to_xml(result, expression))
xml.write("</xml>")
return xml.getvalue()
@silence_warnings_decorator
def loadFullValue(self, seq, scope_attrs):
"""
Evaluate full value for async Console variables in a separate thread and send results to IDE side
:param seq: id of command
:param scope_attrs: a sequence of variables with their attributes separated by NEXT_VALUE_SEPARATOR
(i.e.: obj\tattr1\tattr2NEXT_VALUE_SEPARATORobj2\attr1\tattr2)
:return:
"""
frame_variables = self.get_namespace()
var_objects = []
vars = scope_attrs.split(NEXT_VALUE_SEPARATOR)
for var_attrs in vars:
if '\t' in var_attrs:
name, attrs = var_attrs.split('\t', 1)
else:
name = var_attrs
attrs = None
if name in frame_variables:
var_object = pydevd_vars.resolve_var_object(frame_variables[name], attrs)
var_objects.append((var_object, name))
else:
var_object = pydevd_vars.eval_in_context(name, frame_variables, frame_variables)
var_objects.append((var_object, name))
from _pydevd_bundle.pydevd_comm import GetValueAsyncThreadConsole
py_db = getattr(self, 'debugger', None)
if py_db is None:
py_db = get_global_debugger()
if py_db is None:
from pydevd import PyDB
py_db = PyDB()
t = GetValueAsyncThreadConsole(py_db, self.get_server(), seq, var_objects)
t.start()
def changeVariable(self, attr, value):
def do_change_variable():
Exec('%s=%s' % (attr, value), self.get_namespace(), self.get_namespace())
# Important: it has to be really enabled in the main thread, so, schedule
# it to run in the main thread.
self.exec_queue.put(do_change_variable)
def connectToDebugger(self, debuggerPort, debugger_options=None):
'''
Used to show console with variables connection.
Mainly, monkey-patches things in the debugger structure so that the debugger protocol works.
'''
if debugger_options is None:
debugger_options = {}
env_key = "PYDEVD_EXTRA_ENVS"
if env_key in debugger_options:
for (env_name, value) in debugger_options[env_key].items():
existing_value = os.environ.get(env_name, None)
if existing_value:
os.environ[env_name] = "%s%c%s" % (existing_value, os.path.pathsep, value)
else:
os.environ[env_name] = value
if env_name == "PYTHONPATH":
sys.path.append(value)
del debugger_options[env_key]
def do_connect_to_debugger():
try:
# Try to import the packages needed to attach the debugger
import pydevd
from _pydev_bundle._pydev_saved_modules import threading
except:
# This happens on Jython embedded in host eclipse
traceback.print_exc()
sys.stderr.write('pydevd is not available, cannot connect\n')
from _pydevd_bundle.pydevd_constants import set_thread_id
from _pydev_bundle import pydev_localhost
set_thread_id(threading.current_thread(), "console_main")
VIRTUAL_FRAME_ID = "1" # matches PyStackFrameConsole.java
VIRTUAL_CONSOLE_ID = "console_main" # matches PyThreadConsole.java
f = FakeFrame()
f.f_back = None
f.f_globals = {} # As globals=locals here, let's simply let it empty (and save a bit of network traffic).
f.f_locals = self.get_namespace()
self.debugger = pydevd.PyDB()
self.debugger.add_fake_frame(thread_id=VIRTUAL_CONSOLE_ID, frame_id=VIRTUAL_FRAME_ID, frame=f)
try:
pydevd.apply_debugger_options(debugger_options)
self.debugger.connect(pydev_localhost.get_localhost(), debuggerPort)
self.debugger.prepare_to_run()
self.debugger.disable_tracing()
except:
traceback.print_exc()
sys.stderr.write('Failed to connect to target debugger.\n')
# Register to process commands when idle
self.debugrunning = False
try:
import pydevconsole
pydevconsole.set_debug_hook(self.debugger.process_internal_commands)
except:
traceback.print_exc()
sys.stderr.write('Version of Python does not support debuggable Interactive Console.\n')
# Important: it has to be really enabled in the main thread, so, schedule
# it to run in the main thread.
self.exec_queue.put(do_connect_to_debugger)
return ('connect complete',)
def handshake(self):
if self.connect_status_queue is not None:
self.connect_status_queue.put(True)
return "PyCharm"
def get_connect_status_queue(self):
return self.connect_status_queue
def hello(self, input_str):
# Don't care what the input string is
return ("Hello eclipse",)
def enableGui(self, guiname):
''' Enable the GUI specified in guiname (see inputhook for list).
As with IPython, enabling multiple GUIs isn't an error, but
only the last one's main loop runs and it may not work
'''
def do_enable_gui():
from _pydev_bundle.pydev_versioncheck import versionok_for_gui
if versionok_for_gui():
try:
from pydev_ipython.inputhook import enable_gui
enable_gui(guiname)
except:
sys.stderr.write("Failed to enable GUI event loop integration for '%s'\n" % guiname)
traceback.print_exc()
elif guiname not in ['none', '', None]:
# Only print a warning if the guiname was going to do something
sys.stderr.write("PyDev console: Python version does not support GUI event loop integration for '%s'\n" % guiname)
# Return value does not matter, so return back what was sent
return guiname
# Important: it has to be really enabled in the main thread, so, schedule
# it to run in the main thread.
self.exec_queue.put(do_enable_gui)
def get_ipython_hidden_vars_dict(self):
return None
# =======================================================================================================================
# FakeFrame
# =======================================================================================================================
class FakeFrame:
'''
Used to show console with variables connection.
A class to be used as a mock of a frame.
'''

View File

@ -0,0 +1,40 @@
import sys
import traceback
from types import ModuleType
from _pydevd_bundle.pydevd_constants import DebugInfoHolder
import builtins
class ImportHookManager(ModuleType):
def __init__(self, name, system_import):
ModuleType.__init__(self, name)
self._system_import = system_import
self._modules_to_patch = {}
def add_module_name(self, module_name, activate_function):
self._modules_to_patch[module_name] = activate_function
def do_import(self, name, *args, **kwargs):
module = self._system_import(name, *args, **kwargs)
try:
activate_func = self._modules_to_patch.pop(name, None)
if activate_func:
activate_func() # call activate function
except:
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 2:
traceback.print_exc()
# Restore normal system importer to reduce performance impact
# of calling this method every time an import statement is invoked
if not self._modules_to_patch:
builtins.__import__ = self._system_import
return module
import_hook_manager = ImportHookManager(__name__ + '.import_hook', builtins.__import__)
builtins.__import__ = import_hook_manager.do_import
sys.modules[import_hook_manager.__name__] = import_hook_manager

View File

@ -0,0 +1,13 @@
from _pydev_bundle._pydev_saved_modules import xmlrpclib
from _pydev_bundle._pydev_saved_modules import xmlrpcserver
SimpleXMLRPCServer = xmlrpcserver.SimpleXMLRPCServer
from _pydev_bundle._pydev_execfile import execfile
from _pydev_bundle._pydev_saved_modules import _queue
from _pydevd_bundle.pydevd_exec2 import Exec
from urllib.parse import quote, quote_plus, unquote_plus # @UnresolvedImport

View File

@ -0,0 +1,97 @@
import sys
from _pydev_bundle.pydev_console_utils import BaseInterpreterInterface
import traceback
# Uncomment to force PyDev standard shell.
# raise ImportError()
from _pydev_bundle.pydev_ipython_console_011 import get_pydev_frontend
#=======================================================================================================================
# InterpreterInterface
#=======================================================================================================================
class InterpreterInterface(BaseInterpreterInterface):
'''
The methods in this class should be registered in the xml-rpc server.
'''
def __init__(self, host, client_port, main_thread, show_banner=True, connect_status_queue=None):
BaseInterpreterInterface.__init__(self, main_thread, connect_status_queue)
self.client_port = client_port
self.host = host
self.interpreter = get_pydev_frontend(host, client_port)
self._input_error_printed = False
self.notification_succeeded = False
self.notification_tries = 0
self.notification_max_tries = 3
self.show_banner = show_banner
self.notify_about_magic()
def get_greeting_msg(self):
if self.show_banner:
self.interpreter.show_banner()
return self.interpreter.get_greeting_msg()
def do_add_exec(self, code_fragment):
self.notify_about_magic()
if code_fragment.text.rstrip().endswith('??'):
print('IPython-->')
try:
res = bool(self.interpreter.add_exec(code_fragment.text))
finally:
if code_fragment.text.rstrip().endswith('??'):
print('<--IPython')
return res
def get_namespace(self):
return self.interpreter.get_namespace()
def getCompletions(self, text, act_tok):
return self.interpreter.getCompletions(text, act_tok)
def close(self):
sys.exit(0)
def notify_about_magic(self):
if not self.notification_succeeded:
self.notification_tries += 1
if self.notification_tries > self.notification_max_tries:
return
completions = self.getCompletions("%", "%")
magic_commands = [x[0] for x in completions]
server = self.get_server()
if server is not None:
try:
server.NotifyAboutMagic(magic_commands, self.interpreter.is_automagic())
self.notification_succeeded = True
except:
self.notification_succeeded = False
def get_ipython_hidden_vars_dict(self):
try:
if hasattr(self.interpreter, 'ipython') and hasattr(self.interpreter.ipython, 'user_ns_hidden'):
user_ns_hidden = self.interpreter.ipython.user_ns_hidden
if isinstance(user_ns_hidden, dict):
# Since IPython 2 dict `user_ns_hidden` contains hidden variables and values
user_hidden_dict = user_ns_hidden.copy()
else:
# In IPython 1.x `user_ns_hidden` used to be a set with names of hidden variables
user_hidden_dict = dict([(key, val) for key, val in self.interpreter.ipython.user_ns.items()
if key in user_ns_hidden])
# while `_`, `__` and `___` were not initialized, they are not presented in `user_ns_hidden`
user_hidden_dict.setdefault('_', '')
user_hidden_dict.setdefault('__', '')
user_hidden_dict.setdefault('___', '')
return user_hidden_dict
except:
# Getting IPython variables shouldn't break loading frame variables
traceback.print_exc()

View File

@ -0,0 +1,516 @@
# TODO that would make IPython integration better
# - show output other times then when enter was pressed
# - support proper exit to allow IPython to cleanup (e.g. temp files created with %edit)
# - support Ctrl-D (Ctrl-Z on Windows)
# - use IPython (numbered) prompts in PyDev
# - better integration of IPython and PyDev completions
# - some of the semantics on handling the code completion are not correct:
# eg: Start a line with % and then type c should give %cd as a completion by it doesn't
# however type %c and request completions and %cd is given as an option
# eg: Completing a magic when user typed it without the leading % causes the % to be inserted
# to the left of what should be the first colon.
"""Interface to TerminalInteractiveShell for PyDev Interactive Console frontend
for IPython 0.11 to 1.0+.
"""
from __future__ import print_function
import os
import sys
import codeop
import traceback
from IPython.core.error import UsageError
from IPython.core.completer import IPCompleter
from IPython.core.interactiveshell import InteractiveShell, InteractiveShellABC
from IPython.core.usage import default_banner_parts
from IPython.utils.strdispatch import StrDispatch
import IPython.core.release as IPythonRelease
from IPython.terminal.interactiveshell import TerminalInteractiveShell
try:
from traitlets import CBool, Unicode
except ImportError:
from IPython.utils.traitlets import CBool, Unicode
from IPython.core import release
from _pydev_bundle.pydev_imports import xmlrpclib
default_pydev_banner_parts = default_banner_parts
default_pydev_banner = ''.join(default_pydev_banner_parts)
def show_in_pager(self, strng, *args, **kwargs):
""" Run a string through pager """
# On PyDev we just output the string, there are scroll bars in the console
# to handle "paging". This is the same behaviour as when TERM==dump (see
# page.py)
# for compatibility with mime-bundle form:
if isinstance(strng, dict):
strng = strng.get('text/plain', strng)
print(strng)
def create_editor_hook(pydev_host, pydev_client_port):
def call_editor(filename, line=0, wait=True):
""" Open an editor in PyDev """
if line is None:
line = 0
# Make sure to send an absolution path because unlike most editor hooks
# we don't launch a process. This is more like what happens in the zmqshell
filename = os.path.abspath(filename)
# import sys
# sys.__stderr__.write('Calling editor at: %s:%s\n' % (pydev_host, pydev_client_port))
# Tell PyDev to open the editor
server = xmlrpclib.Server('http://%s:%s' % (pydev_host, pydev_client_port))
server.IPythonEditor(filename, str(line))
if wait:
input("Press Enter when done editing:")
return call_editor
class PyDevIPCompleter(IPCompleter):
def __init__(self, *args, **kwargs):
""" Create a Completer that reuses the advanced completion support of PyDev
in addition to the completion support provided by IPython """
IPCompleter.__init__(self, *args, **kwargs)
# Use PyDev for python matches, see getCompletions below
if self.python_matches in self.matchers:
# `self.python_matches` matches attributes or global python names
self.matchers.remove(self.python_matches)
class PyDevIPCompleter6(IPCompleter):
def __init__(self, *args, **kwargs):
""" Create a Completer that reuses the advanced completion support of PyDev
in addition to the completion support provided by IPython """
IPCompleter.__init__(self, *args, **kwargs)
@property
def matchers(self):
"""All active matcher routines for completion"""
# To remove python_matches we now have to override it as it's now a property in the superclass.
return [
self.file_matches,
self.magic_matches,
self.python_func_kw_matches,
self.dict_key_matches,
]
@matchers.setter
def matchers(self, value):
# To stop the init in IPCompleter raising an AttributeError we now have to specify a setter as it's now a property in the superclass.
return
class PyDevTerminalInteractiveShell(TerminalInteractiveShell):
banner1 = Unicode(default_pydev_banner, config=True,
help="""The part of the banner to be printed before the profile"""
)
# TODO term_title: (can PyDev's title be changed???, see terminal.py for where to inject code, in particular set_term_title as used by %cd)
# for now, just disable term_title
term_title = CBool(False)
# Note in version 0.11 there is no guard in the IPython code about displaying a
# warning, so with 0.11 you get:
# WARNING: Readline services not available or not loaded.
# WARNING: The auto-indent feature requires the readline library
# Disable readline, readline type code is all handled by PyDev (on Java side)
readline_use = CBool(False)
# autoindent has no meaning in PyDev (PyDev always handles that on the Java side),
# and attempting to enable it will print a warning in the absence of readline.
autoindent = CBool(False)
# Force console to not give warning about color scheme choice and default to NoColor.
# TODO It would be nice to enable colors in PyDev but:
# - The PyDev Console (Eclipse Console) does not support the full range of colors, so the
# effect isn't as nice anyway at the command line
# - If done, the color scheme should default to LightBG, but actually be dependent on
# any settings the user has (such as if a dark theme is in use, then Linux is probably
# a better theme).
colors_force = CBool(True)
colors = Unicode("NoColor")
# Since IPython 5 the terminal interface is not compatible with Emacs `inferior-shell` and
# the `simple_prompt` flag is needed
simple_prompt = CBool(True)
# In the PyDev Console, GUI control is done via hookable XML-RPC server
@staticmethod
def enable_gui(gui=None, app=None):
"""Switch amongst GUI input hooks by name.
"""
# Deferred import
from pydev_ipython.inputhook import enable_gui as real_enable_gui
try:
return real_enable_gui(gui, app)
except ValueError as e:
raise UsageError("%s" % e)
#-------------------------------------------------------------------------
# Things related to hooks
#-------------------------------------------------------------------------
def init_history(self):
# Disable history so that we don't have an additional thread for that
# (and we don't use the history anyways).
self.config.HistoryManager.enabled = False
super(PyDevTerminalInteractiveShell, self).init_history()
def init_hooks(self):
super(PyDevTerminalInteractiveShell, self).init_hooks()
self.set_hook('show_in_pager', show_in_pager)
#-------------------------------------------------------------------------
# Things related to exceptions
#-------------------------------------------------------------------------
def showtraceback(self, exc_tuple=None, *args, **kwargs):
# IPython does a lot of clever stuff with Exceptions. However mostly
# it is related to IPython running in a terminal instead of an IDE.
# (e.g. it prints out snippets of code around the stack trace)
# PyDev does a lot of clever stuff too, so leave exception handling
# with default print_exc that PyDev can parse and do its clever stuff
# with (e.g. it puts links back to the original source code)
try:
if exc_tuple is None:
etype, value, tb = sys.exc_info()
else:
etype, value, tb = exc_tuple
except ValueError:
return
if tb is not None:
traceback.print_exception(etype, value, tb)
#-------------------------------------------------------------------------
# Things related to text completion
#-------------------------------------------------------------------------
# The way to construct an IPCompleter changed in most versions,
# so we have a custom, per version implementation of the construction
def _new_completer_100(self):
completer = PyDevIPCompleter(shell=self,
namespace=self.user_ns,
global_namespace=self.user_global_ns,
alias_table=self.alias_manager.alias_table,
use_readline=self.has_readline,
parent=self,
)
return completer
def _new_completer_234(self):
# correct for IPython versions 2.x, 3.x, 4.x
completer = PyDevIPCompleter(shell=self,
namespace=self.user_ns,
global_namespace=self.user_global_ns,
use_readline=self.has_readline,
parent=self,
)
return completer
def _new_completer_500(self):
completer = PyDevIPCompleter(shell=self,
namespace=self.user_ns,
global_namespace=self.user_global_ns,
use_readline=False,
parent=self
)
return completer
def _new_completer_600(self):
completer = PyDevIPCompleter6(shell=self,
namespace=self.user_ns,
global_namespace=self.user_global_ns,
use_readline=False,
parent=self
)
return completer
def add_completer_hooks(self):
from IPython.core.completerlib import module_completer, magic_run_completer, cd_completer
try:
from IPython.core.completerlib import reset_completer
except ImportError:
# reset_completer was added for rel-0.13
reset_completer = None
self.configurables.append(self.Completer)
# Add custom completers to the basic ones built into IPCompleter
sdisp = self.strdispatchers.get('complete_command', StrDispatch())
self.strdispatchers['complete_command'] = sdisp
self.Completer.custom_completers = sdisp
self.set_hook('complete_command', module_completer, str_key='import')
self.set_hook('complete_command', module_completer, str_key='from')
self.set_hook('complete_command', magic_run_completer, str_key='%run')
self.set_hook('complete_command', cd_completer, str_key='%cd')
if reset_completer:
self.set_hook('complete_command', reset_completer, str_key='%reset')
def init_completer(self):
"""Initialize the completion machinery.
This creates a completer that provides the completions that are
IPython specific. We use this to supplement PyDev's core code
completions.
"""
# PyDev uses its own completer and custom hooks so that it uses
# most completions from PyDev's core completer which provides
# extra information.
# See getCompletions for where the two sets of results are merged
if IPythonRelease._version_major >= 6:
self.Completer = self._new_completer_600()
elif IPythonRelease._version_major >= 5:
self.Completer = self._new_completer_500()
elif IPythonRelease._version_major >= 2:
self.Completer = self._new_completer_234()
elif IPythonRelease._version_major >= 1:
self.Completer = self._new_completer_100()
if hasattr(self.Completer, 'use_jedi'):
self.Completer.use_jedi = False
self.add_completer_hooks()
if IPythonRelease._version_major <= 3:
# Only configure readline if we truly are using readline. IPython can
# do tab-completion over the network, in GUIs, etc, where readline
# itself may be absent
if self.has_readline:
self.set_readline_completer()
#-------------------------------------------------------------------------
# Things related to aliases
#-------------------------------------------------------------------------
def init_alias(self):
# InteractiveShell defines alias's we want, but TerminalInteractiveShell defines
# ones we don't. So don't use super and instead go right to InteractiveShell
InteractiveShell.init_alias(self)
#-------------------------------------------------------------------------
# Things related to exiting
#-------------------------------------------------------------------------
def ask_exit(self):
""" Ask the shell to exit. Can be overiden and used as a callback. """
# TODO PyDev's console does not have support from the Python side to exit
# the console. If user forces the exit (with sys.exit()) then the console
# simply reports errors. e.g.:
# >>> import sys
# >>> sys.exit()
# Failed to create input stream: Connection refused
# >>>
# Console already exited with value: 0 while waiting for an answer.
# Error stream:
# Output stream:
# >>>
#
# Alternatively if you use the non-IPython shell this is what happens
# >>> exit()
# <type 'exceptions.SystemExit'>:None
# >>>
# <type 'exceptions.SystemExit'>:None
# >>>
#
super(PyDevTerminalInteractiveShell, self).ask_exit()
print('To exit the PyDev Console, terminate the console within IDE.')
#-------------------------------------------------------------------------
# Things related to magics
#-------------------------------------------------------------------------
def init_magics(self):
super(PyDevTerminalInteractiveShell, self).init_magics()
# TODO Any additional magics for PyDev?
InteractiveShellABC.register(PyDevTerminalInteractiveShell) # @UndefinedVariable
#=======================================================================================================================
# _PyDevFrontEnd
#=======================================================================================================================
class _PyDevFrontEnd:
version = release.__version__
def __init__(self):
# Create and initialize our IPython instance.
if hasattr(PyDevTerminalInteractiveShell, '_instance') and PyDevTerminalInteractiveShell._instance is not None:
self.ipython = PyDevTerminalInteractiveShell._instance
else:
self.ipython = PyDevTerminalInteractiveShell.instance()
self._curr_exec_line = 0
self._curr_exec_lines = []
def show_banner(self):
self.ipython.show_banner()
def update(self, globals, locals):
ns = self.ipython.user_ns
for key, value in list(ns.items()):
if key not in locals:
locals[key] = value
self.ipython.user_global_ns.clear()
self.ipython.user_global_ns.update(globals)
self.ipython.user_ns = locals
if hasattr(self.ipython, 'history_manager') and hasattr(self.ipython.history_manager, 'save_thread'):
self.ipython.history_manager.save_thread.pydev_do_not_trace = True # don't trace ipython history saving thread
def complete(self, string):
try:
if string:
return self.ipython.complete(None, line=string, cursor_pos=string.__len__())
else:
return self.ipython.complete(string, string, 0)
except:
# Silence completer exceptions
pass
def is_complete(self, string):
# Based on IPython 0.10.1
if string in ('', '\n'):
# Prefiltering, eg through ipython0, may return an empty
# string although some operations have been accomplished. We
# thus want to consider an empty string as a complete
# statement.
return True
else:
try:
# Add line returns here, to make sure that the statement is
# complete (except if '\' was used).
# This should probably be done in a different place (like
# maybe 'prefilter_input' method? For now, this works.
clean_string = string.rstrip('\n')
if not clean_string.endswith('\\'):
clean_string += '\n\n'
is_complete = codeop.compile_command(
clean_string,
"<string>",
"exec"
)
except Exception:
# XXX: Hack: return True so that the
# code gets executed and the error captured.
is_complete = True
return is_complete
def getCompletions(self, text, act_tok):
# Get completions from IPython and from PyDev and merge the results
# IPython only gives context free list of completions, while PyDev
# gives detailed information about completions.
try:
TYPE_IPYTHON = '11'
TYPE_IPYTHON_MAGIC = '12'
_line, ipython_completions = self.complete(text)
from _pydev_bundle._pydev_completer import Completer
completer = Completer(self.get_namespace(), None)
ret = completer.complete(act_tok)
append = ret.append
ip = self.ipython
pydev_completions = set([f[0] for f in ret])
for ipython_completion in ipython_completions:
# PyCharm was not expecting completions with '%'...
# Could be fixed in the backend, but it's probably better
# fixing it at PyCharm.
# if ipython_completion.startswith('%'):
# ipython_completion = ipython_completion[1:]
if ipython_completion not in pydev_completions:
pydev_completions.add(ipython_completion)
inf = ip.object_inspect(ipython_completion)
if inf['type_name'] == 'Magic function':
pydev_type = TYPE_IPYTHON_MAGIC
else:
pydev_type = TYPE_IPYTHON
pydev_doc = inf['docstring']
if pydev_doc is None:
pydev_doc = ''
append((ipython_completion, pydev_doc, '', pydev_type))
return ret
except:
import traceback;traceback.print_exc()
return []
def get_namespace(self):
return self.ipython.user_ns
def clear_buffer(self):
del self._curr_exec_lines[:]
def add_exec(self, line):
if self._curr_exec_lines:
self._curr_exec_lines.append(line)
buf = '\n'.join(self._curr_exec_lines)
if self.is_complete(buf):
self._curr_exec_line += 1
self.ipython.run_cell(buf)
del self._curr_exec_lines[:]
return False # execute complete (no more)
return True # needs more
else:
if not self.is_complete(line):
# Did not execute
self._curr_exec_lines.append(line)
return True # needs more
else:
self._curr_exec_line += 1
self.ipython.run_cell(line, store_history=True)
# hist = self.ipython.history_manager.output_hist_reprs
# rep = hist.get(self._curr_exec_line, None)
# if rep is not None:
# print(rep)
return False # execute complete (no more)
def is_automagic(self):
return self.ipython.automagic
def get_greeting_msg(self):
return 'PyDev console: using IPython %s\n' % self.version
class _PyDevFrontEndContainer:
_instance = None
_last_host_port = None
def get_pydev_frontend(pydev_host, pydev_client_port):
if _PyDevFrontEndContainer._instance is None:
_PyDevFrontEndContainer._instance = _PyDevFrontEnd()
if _PyDevFrontEndContainer._last_host_port != (pydev_host, pydev_client_port):
_PyDevFrontEndContainer._last_host_port = pydev_host, pydev_client_port
# Back channel to PyDev to open editors (in the future other
# info may go back this way. This is the same channel that is
# used to get stdin, see StdIn in pydev_console_utils)
_PyDevFrontEndContainer._instance.ipython.hooks['editor'] = create_editor_hook(pydev_host, pydev_client_port)
# Note: setting the callback directly because setting it with set_hook would actually create a chain instead
# of ovewriting at each new call).
# _PyDevFrontEndContainer._instance.ipython.set_hook('editor', create_editor_hook(pydev_host, pydev_client_port))
return _PyDevFrontEndContainer._instance

View File

@ -0,0 +1,23 @@
from _pydev_bundle._pydev_saved_modules import threading
# Hack for https://www.brainwy.com/tracker/PyDev/363 (i.e.: calling is_alive() can throw AssertionError under some
# circumstances).
# It is required to debug threads started by start_new_thread in Python 3.4
_temp = threading.Thread()
if hasattr(_temp, '_is_stopped'): # Python 3.x has this
def is_thread_alive(t):
return not t._is_stopped
elif hasattr(_temp, '_Thread__stopped'): # Python 2.x has this
def is_thread_alive(t):
return not t._Thread__stopped
else:
# Jython wraps a native java thread and thus only obeys the public API.
def is_thread_alive(t):
return t.is_alive()
del _temp

View File

@ -0,0 +1,67 @@
from _pydev_bundle._pydev_saved_modules import socket
import sys
IS_JYTHON = sys.platform.find('java') != -1
_cache = None
def get_localhost():
'''
Should return 127.0.0.1 in ipv4 and ::1 in ipv6
localhost is not used because on windows vista/windows 7, there can be issues where the resolving doesn't work
properly and takes a lot of time (had this issue on the pyunit server).
Using the IP directly solves the problem.
'''
# TODO: Needs better investigation!
global _cache
if _cache is None:
try:
for addr_info in socket.getaddrinfo("localhost", 80, 0, 0, socket.SOL_TCP):
config = addr_info[4]
if config[0] == '127.0.0.1':
_cache = '127.0.0.1'
return _cache
except:
# Ok, some versions of Python don't have getaddrinfo or SOL_TCP... Just consider it 127.0.0.1 in this case.
_cache = '127.0.0.1'
else:
_cache = 'localhost'
return _cache
def get_socket_names(n_sockets, close=False):
socket_names = []
sockets = []
for _ in range(n_sockets):
if IS_JYTHON:
# Although the option which would be pure java *should* work for Jython, the socket being returned is still 0
# (i.e.: it doesn't give the local port bound, only the original port, which was 0).
from java.net import ServerSocket
sock = ServerSocket(0)
socket_name = get_localhost(), sock.getLocalPort()
else:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((get_localhost(), 0))
socket_name = sock.getsockname()
sockets.append(sock)
socket_names.append(socket_name)
if close:
for s in sockets:
s.close()
return socket_names
def get_socket_name(close=False):
return get_socket_names(1, close)[0]
if __name__ == '__main__':
print(get_socket_name())

View File

@ -0,0 +1,276 @@
from _pydevd_bundle.pydevd_constants import DebugInfoHolder, SHOW_COMPILE_CYTHON_COMMAND_LINE, NULL, LOG_TIME, \
ForkSafeLock
from contextlib import contextmanager
import traceback
import os
import sys
class _LoggingGlobals(object):
_warn_once_map = {}
_debug_stream_filename = None
_debug_stream = NULL
_debug_stream_initialized = False
_initialize_lock = ForkSafeLock()
def initialize_debug_stream(reinitialize=False):
'''
:param bool reinitialize:
Reinitialize is used to update the debug stream after a fork (thus, if it wasn't
initialized, we don't need to do anything, just wait for the first regular log call
to initialize).
'''
if reinitialize:
if not _LoggingGlobals._debug_stream_initialized:
return
else:
if _LoggingGlobals._debug_stream_initialized:
return
with _LoggingGlobals._initialize_lock:
# Initialization is done lazilly, so, it's possible that multiple threads try to initialize
# logging.
# Check initial conditions again after obtaining the lock.
if reinitialize:
if not _LoggingGlobals._debug_stream_initialized:
return
else:
if _LoggingGlobals._debug_stream_initialized:
return
_LoggingGlobals._debug_stream_initialized = True
# Note: we cannot initialize with sys.stderr because when forking we may end up logging things in 'os' calls.
_LoggingGlobals._debug_stream = NULL
_LoggingGlobals._debug_stream_filename = None
if not DebugInfoHolder.PYDEVD_DEBUG_FILE:
_LoggingGlobals._debug_stream = sys.stderr
else:
# Add pid to the filename.
try:
target_file = DebugInfoHolder.PYDEVD_DEBUG_FILE
debug_file = _compute_filename_with_pid(target_file)
_LoggingGlobals._debug_stream = open(debug_file, 'w')
_LoggingGlobals._debug_stream_filename = debug_file
except Exception:
_LoggingGlobals._debug_stream = sys.stderr
# Don't fail when trying to setup logging, just show the exception.
traceback.print_exc()
def _compute_filename_with_pid(target_file, pid=None):
# Note: used in tests.
dirname = os.path.dirname(target_file)
basename = os.path.basename(target_file)
try:
os.makedirs(dirname)
except Exception:
pass # Ignore error if it already exists.
name, ext = os.path.splitext(basename)
if pid is None:
pid = os.getpid()
return os.path.join(dirname, '%s.%s%s' % (name, pid, ext))
def log_to(log_file:str, log_level:int=3) -> None:
with _LoggingGlobals._initialize_lock:
# Can be set directly.
DebugInfoHolder.DEBUG_TRACE_LEVEL = log_level
if DebugInfoHolder.PYDEVD_DEBUG_FILE != log_file:
# Note that we don't need to reset it unless it actually changed
# (would be the case where it's set as an env var in a new process
# and a subprocess initializes logging to the same value).
_LoggingGlobals._debug_stream = NULL
_LoggingGlobals._debug_stream_filename = None
DebugInfoHolder.PYDEVD_DEBUG_FILE = log_file
_LoggingGlobals._debug_stream_initialized = False
def list_log_files(pydevd_debug_file):
log_files = []
dirname = os.path.dirname(pydevd_debug_file)
basename = os.path.basename(pydevd_debug_file)
if os.path.isdir(dirname):
name, ext = os.path.splitext(basename)
for f in os.listdir(dirname):
if f.startswith(name) and f.endswith(ext):
log_files.append(os.path.join(dirname, f))
return log_files
@contextmanager
def log_context(trace_level, stream):
'''
To be used to temporarily change the logging settings.
'''
with _LoggingGlobals._initialize_lock:
original_trace_level = DebugInfoHolder.DEBUG_TRACE_LEVEL
original_debug_stream = _LoggingGlobals._debug_stream
original_pydevd_debug_file = DebugInfoHolder.PYDEVD_DEBUG_FILE
original_debug_stream_filename = _LoggingGlobals._debug_stream_filename
original_initialized = _LoggingGlobals._debug_stream_initialized
DebugInfoHolder.DEBUG_TRACE_LEVEL = trace_level
_LoggingGlobals._debug_stream = stream
_LoggingGlobals._debug_stream_initialized = True
try:
yield
finally:
with _LoggingGlobals._initialize_lock:
DebugInfoHolder.DEBUG_TRACE_LEVEL = original_trace_level
_LoggingGlobals._debug_stream = original_debug_stream
DebugInfoHolder.PYDEVD_DEBUG_FILE = original_pydevd_debug_file
_LoggingGlobals._debug_stream_filename = original_debug_stream_filename
_LoggingGlobals._debug_stream_initialized = original_initialized
import time
_last_log_time = time.time()
# Set to True to show pid in each logged message (usually the file has it, but sometimes it's handy).
_LOG_PID = False
def _pydevd_log(level, msg, *args):
'''
Levels are:
0 most serious warnings/errors (always printed)
1 warnings/significant events
2 informational trace
3 verbose mode
'''
if level <= DebugInfoHolder.DEBUG_TRACE_LEVEL:
# yes, we can have errors printing if the console of the program has been finished (and we're still trying to print something)
try:
try:
if args:
msg = msg % args
except:
msg = '%s - %s' % (msg, args)
if LOG_TIME:
global _last_log_time
new_log_time = time.time()
time_diff = new_log_time - _last_log_time
_last_log_time = new_log_time
msg = '%.2fs - %s\n' % (time_diff, msg,)
else:
msg = '%s\n' % (msg,)
if _LOG_PID:
msg = '<%s> - %s\n' % (os.getpid(), msg,)
try:
try:
initialize_debug_stream() # Do it as late as possible
_LoggingGlobals._debug_stream.write(msg)
except TypeError:
if isinstance(msg, bytes):
# Depending on the StringIO flavor, it may only accept unicode.
msg = msg.decode('utf-8', 'replace')
_LoggingGlobals._debug_stream.write(msg)
except UnicodeEncodeError:
# When writing to the stream it's possible that the string can't be represented
# in the encoding expected (in this case, convert it to the stream encoding
# or ascii if we can't find one suitable using a suitable replace).
encoding = getattr(_LoggingGlobals._debug_stream, 'encoding', 'ascii')
msg = msg.encode(encoding, 'backslashreplace')
msg = msg.decode(encoding)
_LoggingGlobals._debug_stream.write(msg)
_LoggingGlobals._debug_stream.flush()
except:
pass
return True
def _pydevd_log_exception(msg='', *args):
if msg or args:
_pydevd_log(0, msg, *args)
try:
initialize_debug_stream() # Do it as late as possible
traceback.print_exc(file=_LoggingGlobals._debug_stream)
_LoggingGlobals._debug_stream.flush()
except:
raise
def verbose(msg, *args):
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 3:
_pydevd_log(3, msg, *args)
def debug(msg, *args):
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 2:
_pydevd_log(2, msg, *args)
def info(msg, *args):
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 1:
_pydevd_log(1, msg, *args)
warn = info
def critical(msg, *args):
_pydevd_log(0, msg, *args)
def exception(msg='', *args):
try:
_pydevd_log_exception(msg, *args)
except:
pass # Should never fail (even at interpreter shutdown).
error = exception
def error_once(msg, *args):
try:
if args:
message = msg % args
else:
message = str(msg)
except:
message = '%s - %s' % (msg, args)
if message not in _LoggingGlobals._warn_once_map:
_LoggingGlobals._warn_once_map[message] = True
critical(message)
def exception_once(msg, *args):
try:
if args:
message = msg % args
else:
message = str(msg)
except:
message = '%s - %s' % (msg, args)
if message not in _LoggingGlobals._warn_once_map:
_LoggingGlobals._warn_once_map[message] = True
exception(message)
def debug_once(msg, *args):
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 3:
error_once(msg, *args)
def show_compile_cython_command_line():
if SHOW_COMPILE_CYTHON_COMMAND_LINE:
dirname = os.path.dirname(os.path.dirname(__file__))
error_once("warning: Debugger speedups using cython not found. Run '\"%s\" \"%s\" build_ext --inplace' to build.",
sys.executable, os.path.join(dirname, 'setup_pydevd_cython.py'))

View File

@ -0,0 +1,216 @@
from __future__ import nested_scopes
from _pydev_bundle._pydev_saved_modules import threading
import os
from _pydev_bundle import pydev_log
def set_trace_in_qt():
from _pydevd_bundle.pydevd_comm import get_global_debugger
py_db = get_global_debugger()
if py_db is not None:
threading.current_thread() # Create the dummy thread for qt.
py_db.enable_tracing()
_patched_qt = False
def patch_qt(qt_support_mode):
'''
This method patches qt (PySide2, PySide, PyQt4, PyQt5) so that we have hooks to set the tracing for QThread.
'''
if not qt_support_mode:
return
if qt_support_mode is True or qt_support_mode == 'True':
# do not break backward compatibility
qt_support_mode = 'auto'
if qt_support_mode == 'auto':
qt_support_mode = os.getenv('PYDEVD_PYQT_MODE', 'auto')
# Avoid patching more than once
global _patched_qt
if _patched_qt:
return
pydev_log.debug('Qt support mode: %s', qt_support_mode)
_patched_qt = True
if qt_support_mode == 'auto':
patch_qt_on_import = None
try:
import PySide2 # @UnresolvedImport @UnusedImport
qt_support_mode = 'pyside2'
except:
try:
import Pyside # @UnresolvedImport @UnusedImport
qt_support_mode = 'pyside'
except:
try:
import PyQt5 # @UnresolvedImport @UnusedImport
qt_support_mode = 'pyqt5'
except:
try:
import PyQt4 # @UnresolvedImport @UnusedImport
qt_support_mode = 'pyqt4'
except:
return
if qt_support_mode == 'pyside2':
try:
import PySide2.QtCore # @UnresolvedImport
_internal_patch_qt(PySide2.QtCore, qt_support_mode)
except:
return
elif qt_support_mode == 'pyside':
try:
import PySide.QtCore # @UnresolvedImport
_internal_patch_qt(PySide.QtCore, qt_support_mode)
except:
return
elif qt_support_mode == 'pyqt5':
try:
import PyQt5.QtCore # @UnresolvedImport
_internal_patch_qt(PyQt5.QtCore)
except:
return
elif qt_support_mode == 'pyqt4':
# Ok, we have an issue here:
# PyDev-452: Selecting PyQT API version using sip.setapi fails in debug mode
# http://pyqt.sourceforge.net/Docs/PyQt4/incompatible_apis.html
# Mostly, if the user uses a different API version (i.e.: v2 instead of v1),
# that has to be done before importing PyQt4 modules (PySide/PyQt5 don't have this issue
# as they only implements v2).
patch_qt_on_import = 'PyQt4'
def get_qt_core_module():
import PyQt4.QtCore # @UnresolvedImport
return PyQt4.QtCore
_patch_import_to_patch_pyqt_on_import(patch_qt_on_import, get_qt_core_module)
else:
raise ValueError('Unexpected qt support mode: %s' % (qt_support_mode,))
def _patch_import_to_patch_pyqt_on_import(patch_qt_on_import, get_qt_core_module):
# I don't like this approach very much as we have to patch __import__, but I like even less
# asking the user to configure something in the client side...
# So, our approach is to patch PyQt4 right before the user tries to import it (at which
# point he should've set the sip api version properly already anyways).
pydev_log.debug('Setting up Qt post-import monkeypatch.')
dotted = patch_qt_on_import + '.'
original_import = __import__
from _pydev_bundle._pydev_sys_patch import patch_sys_module, patch_reload, cancel_patches_in_sys_module
patch_sys_module()
patch_reload()
def patched_import(name, *args, **kwargs):
if patch_qt_on_import == name or name.startswith(dotted):
builtins.__import__ = original_import
cancel_patches_in_sys_module()
_internal_patch_qt(get_qt_core_module()) # Patch it only when the user would import the qt module
return original_import(name, *args, **kwargs)
import builtins # Py3
builtins.__import__ = patched_import
def _internal_patch_qt(QtCore, qt_support_mode='auto'):
pydev_log.debug('Patching Qt: %s', QtCore)
_original_thread_init = QtCore.QThread.__init__
_original_runnable_init = QtCore.QRunnable.__init__
_original_QThread = QtCore.QThread
class FuncWrapper:
def __init__(self, original):
self._original = original
def __call__(self, *args, **kwargs):
set_trace_in_qt()
return self._original(*args, **kwargs)
class StartedSignalWrapper(QtCore.QObject): # Wrapper for the QThread.started signal
try:
_signal = QtCore.Signal() # @UndefinedVariable
except:
_signal = QtCore.pyqtSignal() # @UndefinedVariable
def __init__(self, thread, original_started):
QtCore.QObject.__init__(self)
self.thread = thread
self.original_started = original_started
if qt_support_mode in ('pyside', 'pyside2'):
self._signal = original_started
else:
self._signal.connect(self._on_call)
self.original_started.connect(self._signal)
def connect(self, func, *args, **kwargs):
if qt_support_mode in ('pyside', 'pyside2'):
return self._signal.connect(FuncWrapper(func), *args, **kwargs)
else:
return self._signal.connect(func, *args, **kwargs)
def disconnect(self, *args, **kwargs):
return self._signal.disconnect(*args, **kwargs)
def emit(self, *args, **kwargs):
return self._signal.emit(*args, **kwargs)
def _on_call(self, *args, **kwargs):
set_trace_in_qt()
class ThreadWrapper(QtCore.QThread): # Wrapper for QThread
def __init__(self, *args, **kwargs):
_original_thread_init(self, *args, **kwargs)
# In PyQt5 the program hangs when we try to call original run method of QThread class.
# So we need to distinguish instances of QThread class and instances of QThread inheritors.
if self.__class__.run == _original_QThread.run:
self.run = self._exec_run
else:
self._original_run = self.run
self.run = self._new_run
self._original_started = self.started
self.started = StartedSignalWrapper(self, self.started)
def _exec_run(self):
set_trace_in_qt()
self.exec_()
return None
def _new_run(self):
set_trace_in_qt()
return self._original_run()
class RunnableWrapper(QtCore.QRunnable): # Wrapper for QRunnable
def __init__(self, *args, **kwargs):
_original_runnable_init(self, *args, **kwargs)
self._original_run = self.run
self.run = self._new_run
def _new_run(self):
set_trace_in_qt()
return self._original_run()
QtCore.QThread = ThreadWrapper
QtCore.QRunnable = RunnableWrapper

View File

@ -0,0 +1,35 @@
def overrides(method):
'''
Meant to be used as
class B:
@overrides(A.m1)
def m1(self):
pass
'''
def wrapper(func):
if func.__name__ != method.__name__:
msg = "Wrong @override: %r expected, but overwriting %r."
msg = msg % (func.__name__, method.__name__)
raise AssertionError(msg)
if func.__doc__ is None:
func.__doc__ = method.__doc__
return func
return wrapper
def implements(method):
def wrapper(func):
if func.__name__ != method.__name__:
msg = "Wrong @implements: %r expected, but implementing %r."
msg = msg % (func.__name__, method.__name__)
raise AssertionError(msg)
if func.__doc__ is None:
func.__doc__ = method.__doc__
return func
return wrapper

View File

@ -0,0 +1,180 @@
"""
The UserModuleDeleter and runfile methods are copied from
Spyder and carry their own license agreement.
http://code.google.com/p/spyderlib/source/browse/spyderlib/widgets/externalshell/sitecustomize.py
Spyder License Agreement (MIT License)
--------------------------------------
Copyright (c) 2009-2012 Pierre Raybaut
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import sys
import os
from _pydev_bundle._pydev_execfile import execfile
# The following classes and functions are mainly intended to be used from
# an interactive Python session
class UserModuleDeleter:
"""
User Module Deleter (UMD) aims at deleting user modules
to force Python to deeply reload them during import
pathlist [list]: ignore list in terms of module path
namelist [list]: ignore list in terms of module name
"""
def __init__(self, namelist=None, pathlist=None):
if namelist is None:
namelist = []
self.namelist = namelist
if pathlist is None:
pathlist = []
self.pathlist = pathlist
try:
# ignore all files in org.python.pydev/pysrc
import pydev_pysrc, inspect
self.pathlist.append(os.path.dirname(pydev_pysrc.__file__))
except:
pass
self.previous_modules = list(sys.modules.keys())
def is_module_ignored(self, modname, modpath):
for path in [sys.prefix] + self.pathlist:
if modpath.startswith(path):
return True
else:
return set(modname.split('.')) & set(self.namelist)
def run(self, verbose=False):
"""
Del user modules to force Python to deeply reload them
Do not del modules which are considered as system modules, i.e.
modules installed in subdirectories of Python interpreter's binary
Do not del C modules
"""
log = []
modules_copy = dict(sys.modules)
for modname, module in modules_copy.items():
if modname == 'aaaaa':
print(modname, module)
print(self.previous_modules)
if modname not in self.previous_modules:
modpath = getattr(module, '__file__', None)
if modpath is None:
# *module* is a C module that is statically linked into the
# interpreter. There is no way to know its path, so we
# choose to ignore it.
continue
if not self.is_module_ignored(modname, modpath):
log.append(modname)
del sys.modules[modname]
if verbose and log:
print("\x1b[4;33m%s\x1b[24m%s\x1b[0m" % ("UMD has deleted",
": " + ", ".join(log)))
__umd__ = None
_get_globals_callback = None
def _set_globals_function(get_globals):
global _get_globals_callback
_get_globals_callback = get_globals
def _get_globals():
"""Return current Python interpreter globals namespace"""
if _get_globals_callback is not None:
return _get_globals_callback()
else:
try:
from __main__ import __dict__ as namespace
except ImportError:
try:
# The import fails on IronPython
import __main__
namespace = __main__.__dict__
except:
namespace
shell = namespace.get('__ipythonshell__')
if shell is not None and hasattr(shell, 'user_ns'):
# IPython 0.12+ kernel
return shell.user_ns
else:
# Python interpreter
return namespace
return namespace
def runfile(filename, args=None, wdir=None, namespace=None):
"""
Run filename
args: command line arguments (string)
wdir: working directory
"""
try:
if hasattr(filename, 'decode'):
filename = filename.decode('utf-8')
except (UnicodeError, TypeError):
pass
global __umd__
if os.environ.get("PYDEV_UMD_ENABLED", "").lower() == "true":
if __umd__ is None:
namelist = os.environ.get("PYDEV_UMD_NAMELIST", None)
if namelist is not None:
namelist = namelist.split(',')
__umd__ = UserModuleDeleter(namelist=namelist)
else:
verbose = os.environ.get("PYDEV_UMD_VERBOSE", "").lower() == "true"
__umd__.run(verbose=verbose)
if args is not None and not isinstance(args, (bytes, str)):
raise TypeError("expected a character buffer object")
if namespace is None:
namespace = _get_globals()
if '__file__' in namespace:
old_file = namespace['__file__']
else:
old_file = None
namespace['__file__'] = filename
sys.argv = [filename]
if args is not None:
for arg in args.split():
sys.argv.append(arg)
if wdir is not None:
try:
if hasattr(wdir, 'decode'):
wdir = wdir.decode('utf-8')
except (UnicodeError, TypeError):
pass
os.chdir(wdir)
execfile(filename, namespace)
sys.argv = ['']
if old_file is None:
del namespace['__file__']
else:
namespace['__file__'] = old_file

View File

@ -0,0 +1,16 @@
import sys
def versionok_for_gui():
''' Return True if running Python is suitable for GUI Event Integration and deeper IPython integration '''
# We require Python 2.6+ ...
if sys.hexversion < 0x02060000:
return False
# Or Python 3.2+
if sys.hexversion >= 0x03000000 and sys.hexversion < 0x03020000:
return False
# Not supported under Jython nor IronPython
if sys.platform.startswith("java") or sys.platform.startswith('cli'):
return False
return True

View File

@ -0,0 +1,857 @@
from __future__ import nested_scopes
import fnmatch
import os.path
from _pydev_runfiles.pydev_runfiles_coverage import start_coverage_support
from _pydevd_bundle.pydevd_constants import * # @UnusedWildImport
import re
import time
#=======================================================================================================================
# Configuration
#=======================================================================================================================
class Configuration:
def __init__(
self,
files_or_dirs='',
verbosity=2,
include_tests=None,
tests=None,
port=None,
files_to_tests=None,
jobs=1,
split_jobs='tests',
coverage_output_dir=None,
coverage_include=None,
coverage_output_file=None,
exclude_files=None,
exclude_tests=None,
include_files=None,
django=False,
):
self.files_or_dirs = files_or_dirs
self.verbosity = verbosity
self.include_tests = include_tests
self.tests = tests
self.port = port
self.files_to_tests = files_to_tests
self.jobs = jobs
self.split_jobs = split_jobs
self.django = django
if include_tests:
assert isinstance(include_tests, (list, tuple))
if exclude_files:
assert isinstance(exclude_files, (list, tuple))
if exclude_tests:
assert isinstance(exclude_tests, (list, tuple))
self.exclude_files = exclude_files
self.include_files = include_files
self.exclude_tests = exclude_tests
self.coverage_output_dir = coverage_output_dir
self.coverage_include = coverage_include
self.coverage_output_file = coverage_output_file
def __str__(self):
return '''Configuration
- files_or_dirs: %s
- verbosity: %s
- tests: %s
- port: %s
- files_to_tests: %s
- jobs: %s
- split_jobs: %s
- include_files: %s
- include_tests: %s
- exclude_files: %s
- exclude_tests: %s
- coverage_output_dir: %s
- coverage_include_dir: %s
- coverage_output_file: %s
- django: %s
''' % (
self.files_or_dirs,
self.verbosity,
self.tests,
self.port,
self.files_to_tests,
self.jobs,
self.split_jobs,
self.include_files,
self.include_tests,
self.exclude_files,
self.exclude_tests,
self.coverage_output_dir,
self.coverage_include,
self.coverage_output_file,
self.django,
)
#=======================================================================================================================
# parse_cmdline
#=======================================================================================================================
def parse_cmdline(argv=None):
"""
Parses command line and returns test directories, verbosity, test filter and test suites
usage:
runfiles.py -v|--verbosity <level> -t|--tests <Test.test1,Test2> dirs|files
Multiprocessing options:
jobs=number (with the number of jobs to be used to run the tests)
split_jobs='module'|'tests'
if == module, a given job will always receive all the tests from a module
if == tests, the tests will be split independently of their originating module (default)
--exclude_files = comma-separated list of patterns with files to exclude (fnmatch style)
--include_files = comma-separated list of patterns with files to include (fnmatch style)
--exclude_tests = comma-separated list of patterns with test names to exclude (fnmatch style)
Note: if --tests is given, --exclude_files, --include_files and --exclude_tests are ignored!
"""
if argv is None:
argv = sys.argv
verbosity = 2
include_tests = None
tests = None
port = None
jobs = 1
split_jobs = 'tests'
files_to_tests = {}
coverage_output_dir = None
coverage_include = None
exclude_files = None
exclude_tests = None
include_files = None
django = False
from _pydev_bundle._pydev_getopt import gnu_getopt
optlist, dirs = gnu_getopt(
argv[1:], "",
[
"verbosity=",
"tests=",
"port=",
"config_file=",
"jobs=",
"split_jobs=",
"include_tests=",
"include_files=",
"exclude_files=",
"exclude_tests=",
"coverage_output_dir=",
"coverage_include=",
"django="
]
)
for opt, value in optlist:
if opt in ("-v", "--verbosity"):
verbosity = value
elif opt in ("-p", "--port"):
port = int(value)
elif opt in ("-j", "--jobs"):
jobs = int(value)
elif opt in ("-s", "--split_jobs"):
split_jobs = value
if split_jobs not in ('module', 'tests'):
raise AssertionError('Expected split to be either "module" or "tests". Was :%s' % (split_jobs,))
elif opt in ("-d", "--coverage_output_dir",):
coverage_output_dir = value.strip()
elif opt in ("-i", "--coverage_include",):
coverage_include = value.strip()
elif opt in ("-I", "--include_tests"):
include_tests = value.split(',')
elif opt in ("-E", "--exclude_files"):
exclude_files = value.split(',')
elif opt in ("-F", "--include_files"):
include_files = value.split(',')
elif opt in ("-e", "--exclude_tests"):
exclude_tests = value.split(',')
elif opt in ("-t", "--tests"):
tests = value.split(',')
elif opt in ("--django",):
django = value.strip() in ['true', 'True', '1']
elif opt in ("-c", "--config_file"):
config_file = value.strip()
if os.path.exists(config_file):
f = open(config_file, 'r')
try:
config_file_contents = f.read()
finally:
f.close()
if config_file_contents:
config_file_contents = config_file_contents.strip()
if config_file_contents:
for line in config_file_contents.splitlines():
file_and_test = line.split('|')
if len(file_and_test) == 2:
file, test = file_and_test
if file in files_to_tests:
files_to_tests[file].append(test)
else:
files_to_tests[file] = [test]
else:
sys.stderr.write('Could not find config file: %s\n' % (config_file,))
if type([]) != type(dirs):
dirs = [dirs]
ret_dirs = []
for d in dirs:
if '|' in d:
# paths may come from the ide separated by |
ret_dirs.extend(d.split('|'))
else:
ret_dirs.append(d)
verbosity = int(verbosity)
if tests:
if verbosity > 4:
sys.stdout.write('--tests provided. Ignoring --exclude_files, --exclude_tests and --include_files\n')
exclude_files = exclude_tests = include_files = None
config = Configuration(
ret_dirs,
verbosity,
include_tests,
tests,
port,
files_to_tests,
jobs,
split_jobs,
coverage_output_dir,
coverage_include,
exclude_files=exclude_files,
exclude_tests=exclude_tests,
include_files=include_files,
django=django,
)
if verbosity > 5:
sys.stdout.write(str(config) + '\n')
return config
#=======================================================================================================================
# PydevTestRunner
#=======================================================================================================================
class PydevTestRunner(object):
""" finds and runs a file or directory of files as a unit test """
__py_extensions = ["*.py", "*.pyw"]
__exclude_files = ["__init__.*"]
# Just to check that only this attributes will be written to this file
__slots__ = [
'verbosity', # Always used
'files_to_tests', # If this one is given, the ones below are not used
'files_or_dirs', # Files or directories received in the command line
'include_tests', # The filter used to collect the tests
'tests', # Strings with the tests to be run
'jobs', # Integer with the number of jobs that should be used to run the test cases
'split_jobs', # String with 'tests' or 'module' (how should the jobs be split)
'configuration',
'coverage',
]
def __init__(self, configuration):
self.verbosity = configuration.verbosity
self.jobs = configuration.jobs
self.split_jobs = configuration.split_jobs
files_to_tests = configuration.files_to_tests
if files_to_tests:
self.files_to_tests = files_to_tests
self.files_or_dirs = list(files_to_tests.keys())
self.tests = None
else:
self.files_to_tests = {}
self.files_or_dirs = configuration.files_or_dirs
self.tests = configuration.tests
self.configuration = configuration
self.__adjust_path()
def __adjust_path(self):
""" add the current file or directory to the python path """
path_to_append = None
for n in range(len(self.files_or_dirs)):
dir_name = self.__unixify(self.files_or_dirs[n])
if os.path.isdir(dir_name):
if not dir_name.endswith("/"):
self.files_or_dirs[n] = dir_name + "/"
path_to_append = os.path.normpath(dir_name)
elif os.path.isfile(dir_name):
path_to_append = os.path.dirname(dir_name)
else:
if not os.path.exists(dir_name):
block_line = '*' * 120
sys.stderr.write('\n%s\n* PyDev test runner error: %s does not exist.\n%s\n' % (block_line, dir_name, block_line))
return
msg = ("unknown type. \n%s\nshould be file or a directory.\n" % (dir_name))
raise RuntimeError(msg)
if path_to_append is not None:
# Add it as the last one (so, first things are resolved against the default dirs and
# if none resolves, then we try a relative import).
sys.path.append(path_to_append)
def __is_valid_py_file(self, fname):
""" tests that a particular file contains the proper file extension
and is not in the list of files to exclude """
is_valid_fname = 0
for invalid_fname in self.__class__.__exclude_files:
is_valid_fname += int(not fnmatch.fnmatch(fname, invalid_fname))
if_valid_ext = 0
for ext in self.__class__.__py_extensions:
if_valid_ext += int(fnmatch.fnmatch(fname, ext))
return is_valid_fname > 0 and if_valid_ext > 0
def __unixify(self, s):
""" stupid windows. converts the backslash to forwardslash for consistency """
return os.path.normpath(s).replace(os.sep, "/")
def __importify(self, s, dir=False):
""" turns directory separators into dots and removes the ".py*" extension
so the string can be used as import statement """
if not dir:
dirname, fname = os.path.split(s)
if fname.count('.') > 1:
# if there's a file named xxx.xx.py, it is not a valid module, so, let's not load it...
return
imp_stmt_pieces = [dirname.replace("\\", "/").replace("/", "."), os.path.splitext(fname)[0]]
if len(imp_stmt_pieces[0]) == 0:
imp_stmt_pieces = imp_stmt_pieces[1:]
return ".".join(imp_stmt_pieces)
else: # handle dir
return s.replace("\\", "/").replace("/", ".")
def __add_files(self, pyfiles, root, files):
""" if files match, appends them to pyfiles. used by os.path.walk fcn """
for fname in files:
if self.__is_valid_py_file(fname):
name_without_base_dir = self.__unixify(os.path.join(root, fname))
pyfiles.append(name_without_base_dir)
def find_import_files(self):
""" return a list of files to import """
if self.files_to_tests:
pyfiles = self.files_to_tests.keys()
else:
pyfiles = []
for base_dir in self.files_or_dirs:
if os.path.isdir(base_dir):
for root, dirs, files in os.walk(base_dir):
# Note: handling directories that should be excluded from the search because
# they don't have __init__.py
exclude = {}
for d in dirs:
for init in ['__init__.py', '__init__.pyo', '__init__.pyc', '__init__.pyw', '__init__$py.class']:
if os.path.exists(os.path.join(root, d, init).replace('\\', '/')):
break
else:
exclude[d] = 1
if exclude:
new = []
for d in dirs:
if d not in exclude:
new.append(d)
dirs[:] = new
self.__add_files(pyfiles, root, files)
elif os.path.isfile(base_dir):
pyfiles.append(base_dir)
if self.configuration.exclude_files or self.configuration.include_files:
ret = []
for f in pyfiles:
add = True
basename = os.path.basename(f)
if self.configuration.include_files:
add = False
for pat in self.configuration.include_files:
if fnmatch.fnmatchcase(basename, pat):
add = True
break
if not add:
if self.verbosity > 3:
sys.stdout.write('Skipped file: %s (did not match any include_files pattern: %s)\n' % (f, self.configuration.include_files))
elif self.configuration.exclude_files:
for pat in self.configuration.exclude_files:
if fnmatch.fnmatchcase(basename, pat):
if self.verbosity > 3:
sys.stdout.write('Skipped file: %s (matched exclude_files pattern: %s)\n' % (f, pat))
elif self.verbosity > 2:
sys.stdout.write('Skipped file: %s\n' % (f,))
add = False
break
if add:
if self.verbosity > 3:
sys.stdout.write('Adding file: %s for test discovery.\n' % (f,))
ret.append(f)
pyfiles = ret
return pyfiles
def __get_module_from_str(self, modname, print_exception, pyfile):
""" Import the module in the given import path.
* Returns the "final" module, so importing "coilib40.subject.visu"
returns the "visu" module, not the "coilib40" as returned by __import__ """
try:
mod = __import__(modname)
for part in modname.split('.')[1:]:
mod = getattr(mod, part)
return mod
except:
if print_exception:
from _pydev_runfiles import pydev_runfiles_xml_rpc
from _pydevd_bundle import pydevd_io
buf_err = pydevd_io.start_redirect(keep_original_redirection=True, std='stderr')
buf_out = pydevd_io.start_redirect(keep_original_redirection=True, std='stdout')
try:
import traceback;traceback.print_exc()
sys.stderr.write('ERROR: Module: %s could not be imported (file: %s).\n' % (modname, pyfile))
finally:
pydevd_io.end_redirect('stderr')
pydevd_io.end_redirect('stdout')
pydev_runfiles_xml_rpc.notifyTest(
'error', buf_out.getvalue(), buf_err.getvalue(), pyfile, modname, 0)
return None
def remove_duplicates_keeping_order(self, seq):
seen = set()
seen_add = seen.add
return [x for x in seq if not (x in seen or seen_add(x))]
def find_modules_from_files(self, pyfiles):
""" returns a list of modules given a list of files """
# let's make sure that the paths we want are in the pythonpath...
imports = [(s, self.__importify(s)) for s in pyfiles]
sys_path = [os.path.normpath(path) for path in sys.path]
sys_path = self.remove_duplicates_keeping_order(sys_path)
system_paths = []
for s in sys_path:
system_paths.append(self.__importify(s, True))
ret = []
for pyfile, imp in imports:
if imp is None:
continue # can happen if a file is not a valid module
choices = []
for s in system_paths:
if imp.startswith(s):
add = imp[len(s) + 1:]
if add:
choices.append(add)
# sys.stdout.write(' ' + add + ' ')
if not choices:
sys.stdout.write('PYTHONPATH not found for file: %s\n' % imp)
else:
for i, import_str in enumerate(choices):
print_exception = i == len(choices) - 1
mod = self.__get_module_from_str(import_str, print_exception, pyfile)
if mod is not None:
ret.append((pyfile, mod, import_str))
break
return ret
#===================================================================================================================
# GetTestCaseNames
#===================================================================================================================
class GetTestCaseNames:
"""Yes, we need a class for that (cannot use outer context on jython 2.1)"""
def __init__(self, accepted_classes, accepted_methods):
self.accepted_classes = accepted_classes
self.accepted_methods = accepted_methods
def __call__(self, testCaseClass):
"""Return a sorted sequence of method names found within testCaseClass"""
testFnNames = []
className = testCaseClass.__name__
if className in self.accepted_classes:
for attrname in dir(testCaseClass):
# If a class is chosen, we select all the 'test' methods'
if attrname.startswith('test') and hasattr(getattr(testCaseClass, attrname), '__call__'):
testFnNames.append(attrname)
else:
for attrname in dir(testCaseClass):
# If we have the class+method name, we must do a full check and have an exact match.
if className + '.' + attrname in self.accepted_methods:
if hasattr(getattr(testCaseClass, attrname), '__call__'):
testFnNames.append(attrname)
# sorted() is not available in jython 2.1
testFnNames.sort()
return testFnNames
def _decorate_test_suite(self, suite, pyfile, module_name):
import unittest
if isinstance(suite, unittest.TestSuite):
add = False
suite.__pydev_pyfile__ = pyfile
suite.__pydev_module_name__ = module_name
for t in suite._tests:
t.__pydev_pyfile__ = pyfile
t.__pydev_module_name__ = module_name
if self._decorate_test_suite(t, pyfile, module_name):
add = True
return add
elif isinstance(suite, unittest.TestCase):
return True
else:
return False
def find_tests_from_modules(self, file_and_modules_and_module_name):
""" returns the unittests given a list of modules """
# Use our own suite!
from _pydev_runfiles import pydev_runfiles_unittest
import unittest
unittest.TestLoader.suiteClass = pydev_runfiles_unittest.PydevTestSuite
loader = unittest.TestLoader()
ret = []
if self.files_to_tests:
for pyfile, m, module_name in file_and_modules_and_module_name:
accepted_classes = {}
accepted_methods = {}
tests = self.files_to_tests[pyfile]
for t in tests:
accepted_methods[t] = t
loader.getTestCaseNames = self.GetTestCaseNames(accepted_classes, accepted_methods)
suite = loader.loadTestsFromModule(m)
if self._decorate_test_suite(suite, pyfile, module_name):
ret.append(suite)
return ret
if self.tests:
accepted_classes = {}
accepted_methods = {}
for t in self.tests:
splitted = t.split('.')
if len(splitted) == 1:
accepted_classes[t] = t
elif len(splitted) == 2:
accepted_methods[t] = t
loader.getTestCaseNames = self.GetTestCaseNames(accepted_classes, accepted_methods)
for pyfile, m, module_name in file_and_modules_and_module_name:
suite = loader.loadTestsFromModule(m)
if self._decorate_test_suite(suite, pyfile, module_name):
ret.append(suite)
return ret
def filter_tests(self, test_objs, internal_call=False):
""" based on a filter name, only return those tests that have
the test case names that match """
import unittest
if not internal_call:
if not self.configuration.include_tests and not self.tests and not self.configuration.exclude_tests:
# No need to filter if we have nothing to filter!
return test_objs
if self.verbosity > 1:
if self.configuration.include_tests:
sys.stdout.write('Tests to include: %s\n' % (self.configuration.include_tests,))
if self.tests:
sys.stdout.write('Tests to run: %s\n' % (self.tests,))
if self.configuration.exclude_tests:
sys.stdout.write('Tests to exclude: %s\n' % (self.configuration.exclude_tests,))
test_suite = []
for test_obj in test_objs:
if isinstance(test_obj, unittest.TestSuite):
# Note: keep the suites as they are and just 'fix' the tests (so, don't use the iter_tests).
if test_obj._tests:
test_obj._tests = self.filter_tests(test_obj._tests, True)
if test_obj._tests: # Only add the suite if we still have tests there.
test_suite.append(test_obj)
elif isinstance(test_obj, unittest.TestCase):
try:
testMethodName = test_obj._TestCase__testMethodName
except AttributeError:
# changed in python 2.5
testMethodName = test_obj._testMethodName
add = True
if self.configuration.exclude_tests:
for pat in self.configuration.exclude_tests:
if fnmatch.fnmatchcase(testMethodName, pat):
if self.verbosity > 3:
sys.stdout.write('Skipped test: %s (matched exclude_tests pattern: %s)\n' % (testMethodName, pat))
elif self.verbosity > 2:
sys.stdout.write('Skipped test: %s\n' % (testMethodName,))
add = False
break
if add:
if self.__match_tests(self.tests, test_obj, testMethodName):
include = True
if self.configuration.include_tests:
include = False
for pat in self.configuration.include_tests:
if fnmatch.fnmatchcase(testMethodName, pat):
include = True
break
if include:
test_suite.append(test_obj)
else:
if self.verbosity > 3:
sys.stdout.write('Skipped test: %s (did not match any include_tests pattern %s)\n' % (
testMethodName, self.configuration.include_tests,))
return test_suite
def iter_tests(self, test_objs):
# Note: not using yield because of Jython 2.1.
import unittest
tests = []
for test_obj in test_objs:
if isinstance(test_obj, unittest.TestSuite):
tests.extend(self.iter_tests(test_obj._tests))
elif isinstance(test_obj, unittest.TestCase):
tests.append(test_obj)
return tests
def list_test_names(self, test_objs):
names = []
for tc in self.iter_tests(test_objs):
try:
testMethodName = tc._TestCase__testMethodName
except AttributeError:
# changed in python 2.5
testMethodName = tc._testMethodName
names.append(testMethodName)
return names
def __match_tests(self, tests, test_case, test_method_name):
if not tests:
return 1
for t in tests:
class_and_method = t.split('.')
if len(class_and_method) == 1:
# only class name
if class_and_method[0] == test_case.__class__.__name__:
return 1
elif len(class_and_method) == 2:
if class_and_method[0] == test_case.__class__.__name__ and class_and_method[1] == test_method_name:
return 1
return 0
def __match(self, filter_list, name):
""" returns whether a test name matches the test filter """
if filter_list is None:
return 1
for f in filter_list:
if re.match(f, name):
return 1
return 0
def run_tests(self, handle_coverage=True):
""" runs all tests """
sys.stdout.write("Finding files... ")
files = self.find_import_files()
if self.verbosity > 3:
sys.stdout.write('%s ... done.\n' % (self.files_or_dirs))
else:
sys.stdout.write('done.\n')
sys.stdout.write("Importing test modules ... ")
if handle_coverage:
coverage_files, coverage = start_coverage_support(self.configuration)
file_and_modules_and_module_name = self.find_modules_from_files(files)
sys.stdout.write("done.\n")
all_tests = self.find_tests_from_modules(file_and_modules_and_module_name)
all_tests = self.filter_tests(all_tests)
from _pydev_runfiles import pydev_runfiles_unittest
test_suite = pydev_runfiles_unittest.PydevTestSuite(all_tests)
from _pydev_runfiles import pydev_runfiles_xml_rpc
pydev_runfiles_xml_rpc.notifyTestsCollected(test_suite.countTestCases())
start_time = time.time()
def run_tests():
executed_in_parallel = False
if self.jobs > 1:
from _pydev_runfiles import pydev_runfiles_parallel
# What may happen is that the number of jobs needed is lower than the number of jobs requested
# (e.g.: 2 jobs were requested for running 1 test) -- in which case execute_tests_in_parallel will
# return False and won't run any tests.
executed_in_parallel = pydev_runfiles_parallel.execute_tests_in_parallel(
all_tests, self.jobs, self.split_jobs, self.verbosity, coverage_files, self.configuration.coverage_include)
if not executed_in_parallel:
# If in coverage, we don't need to pass anything here (coverage is already enabled for this execution).
runner = pydev_runfiles_unittest.PydevTextTestRunner(stream=sys.stdout, descriptions=1, verbosity=self.verbosity)
sys.stdout.write('\n')
runner.run(test_suite)
if self.configuration.django:
get_django_test_suite_runner()(run_tests).run_tests([])
else:
run_tests()
if handle_coverage:
coverage.stop()
coverage.save()
total_time = 'Finished in: %.2f secs.' % (time.time() - start_time,)
pydev_runfiles_xml_rpc.notifyTestRunFinished(total_time)
DJANGO_TEST_SUITE_RUNNER = None
def get_django_test_suite_runner():
global DJANGO_TEST_SUITE_RUNNER
if DJANGO_TEST_SUITE_RUNNER:
return DJANGO_TEST_SUITE_RUNNER
try:
# django >= 1.8
import django
from django.test.runner import DiscoverRunner
class MyDjangoTestSuiteRunner(DiscoverRunner):
def __init__(self, on_run_suite):
django.setup()
DiscoverRunner.__init__(self)
self.on_run_suite = on_run_suite
def build_suite(self, *args, **kwargs):
pass
def suite_result(self, *args, **kwargs):
pass
def run_suite(self, *args, **kwargs):
self.on_run_suite()
except:
# django < 1.8
try:
from django.test.simple import DjangoTestSuiteRunner
except:
class DjangoTestSuiteRunner:
def __init__(self):
pass
def run_tests(self, *args, **kwargs):
raise AssertionError("Unable to run suite with django.test.runner.DiscoverRunner nor django.test.simple.DjangoTestSuiteRunner because it couldn't be imported.")
class MyDjangoTestSuiteRunner(DjangoTestSuiteRunner):
def __init__(self, on_run_suite):
DjangoTestSuiteRunner.__init__(self)
self.on_run_suite = on_run_suite
def build_suite(self, *args, **kwargs):
pass
def suite_result(self, *args, **kwargs):
pass
def run_suite(self, *args, **kwargs):
self.on_run_suite()
DJANGO_TEST_SUITE_RUNNER = MyDjangoTestSuiteRunner
return DJANGO_TEST_SUITE_RUNNER
#=======================================================================================================================
# main
#=======================================================================================================================
def main(configuration):
PydevTestRunner(configuration).run_tests()

View File

@ -0,0 +1,76 @@
import os.path
import sys
from _pydevd_bundle.pydevd_constants import Null
#=======================================================================================================================
# get_coverage_files
#=======================================================================================================================
def get_coverage_files(coverage_output_dir, number_of_files):
base_dir = coverage_output_dir
ret = []
i = 0
while len(ret) < number_of_files:
while True:
f = os.path.join(base_dir, '.coverage.%s' % i)
i += 1
if not os.path.exists(f):
ret.append(f)
break #Break only inner for.
return ret
#=======================================================================================================================
# start_coverage_support
#=======================================================================================================================
def start_coverage_support(configuration):
return start_coverage_support_from_params(
configuration.coverage_output_dir,
configuration.coverage_output_file,
configuration.jobs,
configuration.coverage_include,
)
#=======================================================================================================================
# start_coverage_support_from_params
#=======================================================================================================================
def start_coverage_support_from_params(coverage_output_dir, coverage_output_file, jobs, coverage_include):
coverage_files = []
coverage_instance = Null()
if coverage_output_dir or coverage_output_file:
try:
import coverage #@UnresolvedImport
except:
sys.stderr.write('Error: coverage module could not be imported\n')
sys.stderr.write('Please make sure that the coverage module (http://nedbatchelder.com/code/coverage/)\n')
sys.stderr.write('is properly installed in your interpreter: %s\n' % (sys.executable,))
import traceback;traceback.print_exc()
else:
if coverage_output_dir:
if not os.path.exists(coverage_output_dir):
sys.stderr.write('Error: directory for coverage output (%s) does not exist.\n' % (coverage_output_dir,))
elif not os.path.isdir(coverage_output_dir):
sys.stderr.write('Error: expected (%s) to be a directory.\n' % (coverage_output_dir,))
else:
n = jobs
if n <= 0:
n += 1
n += 1 #Add 1 more for the current process (which will do the initial import).
coverage_files = get_coverage_files(coverage_output_dir, n)
os.environ['COVERAGE_FILE'] = coverage_files.pop(0)
coverage_instance = coverage.coverage(source=[coverage_include])
coverage_instance.start()
elif coverage_output_file:
#Client of parallel run.
os.environ['COVERAGE_FILE'] = coverage_output_file
coverage_instance = coverage.coverage(source=[coverage_include])
coverage_instance.start()
return coverage_files, coverage_instance

View File

@ -0,0 +1,207 @@
from nose.plugins.multiprocess import MultiProcessTestRunner # @UnresolvedImport
from nose.plugins.base import Plugin # @UnresolvedImport
import sys
from _pydev_runfiles import pydev_runfiles_xml_rpc
import time
from _pydev_runfiles.pydev_runfiles_coverage import start_coverage_support
from contextlib import contextmanager
from io import StringIO
import traceback
#=======================================================================================================================
# PydevPlugin
#=======================================================================================================================
class PydevPlugin(Plugin):
def __init__(self, configuration):
self.configuration = configuration
Plugin.__init__(self)
def begin(self):
# Called before any test is run (it's always called, with multiprocess or not)
self.start_time = time.time()
self.coverage_files, self.coverage = start_coverage_support(self.configuration)
def finalize(self, result):
# Called after all tests are run (it's always called, with multiprocess or not)
self.coverage.stop()
self.coverage.save()
pydev_runfiles_xml_rpc.notifyTestRunFinished('Finished in: %.2f secs.' % (time.time() - self.start_time,))
#===================================================================================================================
# Methods below are not called with multiprocess (so, we monkey-patch MultiProcessTestRunner.consolidate
# so that they're called, but unfortunately we loose some info -- i.e.: the time for each test in this
# process).
#===================================================================================================================
class Sentinel(object):
pass
@contextmanager
def _without_user_address(self, test):
# #PyDev-1095: Conflict between address in test and test.address() in PydevPlugin().report_cond()
user_test_instance = test.test
user_address = self.Sentinel
user_class_address = self.Sentinel
try:
if 'address' in user_test_instance.__dict__:
user_address = user_test_instance.__dict__.pop('address')
except:
# Just ignore anything here.
pass
try:
user_class_address = user_test_instance.__class__.address
del user_test_instance.__class__.address
except:
# Just ignore anything here.
pass
try:
yield
finally:
if user_address is not self.Sentinel:
user_test_instance.__dict__['address'] = user_address
if user_class_address is not self.Sentinel:
user_test_instance.__class__.address = user_class_address
def _get_test_address(self, test):
try:
if hasattr(test, 'address'):
with self._without_user_address(test):
address = test.address()
# test.address() is something as:
# ('D:\\workspaces\\temp\\test_workspace\\pytesting1\\src\\mod1\\hello.py', 'mod1.hello', 'TestCase.testMet1')
#
# and we must pass: location, test
# E.g.: ['D:\\src\\mod1\\hello.py', 'TestCase.testMet1']
address = address[0], address[2]
else:
# multiprocess
try:
address = test[0], test[1]
except TypeError:
# It may be an error at setup, in which case it's not really a test, but a Context object.
f = test.context.__file__
if f.endswith('.pyc'):
f = f[:-1]
elif f.endswith('$py.class'):
f = f[:-len('$py.class')] + '.py'
address = f, '?'
except:
sys.stderr.write("PyDev: Internal pydev error getting test address. Please report at the pydev bug tracker\n")
traceback.print_exc()
sys.stderr.write("\n\n\n")
address = '?', '?'
return address
def report_cond(self, cond, test, captured_output, error=''):
'''
@param cond: fail, error, ok
'''
address = self._get_test_address(test)
error_contents = self.get_io_from_error(error)
try:
time_str = '%.2f' % (time.time() - test._pydev_start_time)
except:
time_str = '?'
pydev_runfiles_xml_rpc.notifyTest(cond, captured_output, error_contents, address[0], address[1], time_str)
def startTest(self, test):
test._pydev_start_time = time.time()
file, test = self._get_test_address(test)
pydev_runfiles_xml_rpc.notifyStartTest(file, test)
def get_io_from_error(self, err):
if type(err) == type(()):
if len(err) != 3:
if len(err) == 2:
return err[1] # multiprocess
s = StringIO()
etype, value, tb = err
if isinstance(value, str):
return value
traceback.print_exception(etype, value, tb, file=s)
return s.getvalue()
return err
def get_captured_output(self, test):
if hasattr(test, 'capturedOutput') and test.capturedOutput:
return test.capturedOutput
return ''
def addError(self, test, err):
self.report_cond(
'error',
test,
self.get_captured_output(test),
err,
)
def addFailure(self, test, err):
self.report_cond(
'fail',
test,
self.get_captured_output(test),
err,
)
def addSuccess(self, test):
self.report_cond(
'ok',
test,
self.get_captured_output(test),
'',
)
PYDEV_NOSE_PLUGIN_SINGLETON = None
def start_pydev_nose_plugin_singleton(configuration):
global PYDEV_NOSE_PLUGIN_SINGLETON
PYDEV_NOSE_PLUGIN_SINGLETON = PydevPlugin(configuration)
return PYDEV_NOSE_PLUGIN_SINGLETON
original = MultiProcessTestRunner.consolidate
#=======================================================================================================================
# new_consolidate
#=======================================================================================================================
def new_consolidate(self, result, batch_result):
'''
Used so that it can work with the multiprocess plugin.
Monkeypatched because nose seems a bit unsupported at this time (ideally
the plugin would have this support by default).
'''
ret = original(self, result, batch_result)
parent_frame = sys._getframe().f_back
# addr is something as D:\pytesting1\src\mod1\hello.py:TestCase.testMet4
# so, convert it to what report_cond expects
addr = parent_frame.f_locals['addr']
i = addr.rindex(':')
addr = [addr[:i], addr[i + 1:]]
output, testsRun, failures, errors, errorClasses = batch_result
if failures or errors:
for failure in failures:
PYDEV_NOSE_PLUGIN_SINGLETON.report_cond('fail', addr, output, failure)
for error in errors:
PYDEV_NOSE_PLUGIN_SINGLETON.report_cond('error', addr, output, error)
else:
PYDEV_NOSE_PLUGIN_SINGLETON.report_cond('ok', addr, output)
return ret
MultiProcessTestRunner.consolidate = new_consolidate

View File

@ -0,0 +1,267 @@
import unittest
from _pydev_bundle._pydev_saved_modules import thread
import queue as Queue
from _pydev_runfiles import pydev_runfiles_xml_rpc
import time
import os
import threading
import sys
#=======================================================================================================================
# flatten_test_suite
#=======================================================================================================================
def flatten_test_suite(test_suite, ret):
if isinstance(test_suite, unittest.TestSuite):
for t in test_suite._tests:
flatten_test_suite(t, ret)
elif isinstance(test_suite, unittest.TestCase):
ret.append(test_suite)
#=======================================================================================================================
# execute_tests_in_parallel
#=======================================================================================================================
def execute_tests_in_parallel(tests, jobs, split, verbosity, coverage_files, coverage_include):
'''
@param tests: list(PydevTestSuite)
A list with the suites to be run
@param split: str
Either 'module' or the number of tests that should be run in each batch
@param coverage_files: list(file)
A list with the files that should be used for giving coverage information (if empty, coverage information
should not be gathered).
@param coverage_include: str
The pattern that should be included in the coverage.
@return: bool
Returns True if the tests were actually executed in parallel. If the tests were not executed because only 1
should be used (e.g.: 2 jobs were requested for running 1 test), False will be returned and no tests will be
run.
It may also return False if in debug mode (in which case, multi-processes are not accepted)
'''
try:
from _pydevd_bundle.pydevd_comm import get_global_debugger
if get_global_debugger() is not None:
return False
except:
pass # Ignore any error here.
# This queue will receive the tests to be run. Each entry in a queue is a list with the tests to be run together When
# split == 'tests', each list will have a single element, when split == 'module', each list will have all the tests
# from a given module.
tests_queue = []
queue_elements = []
if split == 'module':
module_to_tests = {}
for test in tests:
lst = []
flatten_test_suite(test, lst)
for test in lst:
key = (test.__pydev_pyfile__, test.__pydev_module_name__)
module_to_tests.setdefault(key, []).append(test)
for key, tests in module_to_tests.items():
queue_elements.append(tests)
if len(queue_elements) < jobs:
# Don't create jobs we will never use.
jobs = len(queue_elements)
elif split == 'tests':
for test in tests:
lst = []
flatten_test_suite(test, lst)
for test in lst:
queue_elements.append([test])
if len(queue_elements) < jobs:
# Don't create jobs we will never use.
jobs = len(queue_elements)
else:
raise AssertionError('Do not know how to handle: %s' % (split,))
for test_cases in queue_elements:
test_queue_elements = []
for test_case in test_cases:
try:
test_name = test_case.__class__.__name__ + "." + test_case._testMethodName
except AttributeError:
# Support for jython 2.1 (__testMethodName is pseudo-private in the test case)
test_name = test_case.__class__.__name__ + "." + test_case._TestCase__testMethodName
test_queue_elements.append(test_case.__pydev_pyfile__ + '|' + test_name)
tests_queue.append(test_queue_elements)
if jobs < 2:
return False
sys.stdout.write('Running tests in parallel with: %s jobs.\n' % (jobs,))
queue = Queue.Queue()
for item in tests_queue:
queue.put(item, block=False)
providers = []
clients = []
for i in range(jobs):
test_cases_provider = CommunicationThread(queue)
providers.append(test_cases_provider)
test_cases_provider.start()
port = test_cases_provider.port
if coverage_files:
clients.append(ClientThread(i, port, verbosity, coverage_files.pop(0), coverage_include))
else:
clients.append(ClientThread(i, port, verbosity))
for client in clients:
client.start()
client_alive = True
while client_alive:
client_alive = False
for client in clients:
# Wait for all the clients to exit.
if not client.finished:
client_alive = True
time.sleep(.2)
break
for provider in providers:
provider.shutdown()
return True
#=======================================================================================================================
# CommunicationThread
#=======================================================================================================================
class CommunicationThread(threading.Thread):
def __init__(self, tests_queue):
threading.Thread.__init__(self)
self.daemon = True
self.queue = tests_queue
self.finished = False
from _pydev_bundle.pydev_imports import SimpleXMLRPCServer
from _pydev_bundle import pydev_localhost
# Create server
server = SimpleXMLRPCServer((pydev_localhost.get_localhost(), 0), logRequests=False)
server.register_function(self.GetTestsToRun)
server.register_function(self.notifyStartTest)
server.register_function(self.notifyTest)
server.register_function(self.notifyCommands)
self.port = server.socket.getsockname()[1]
self.server = server
def GetTestsToRun(self, job_id):
'''
@param job_id:
@return: list(str)
Each entry is a string in the format: filename|Test.testName
'''
try:
ret = self.queue.get(block=False)
return ret
except: # Any exception getting from the queue (empty or not) means we finished our work on providing the tests.
self.finished = True
return []
def notifyCommands(self, job_id, commands):
# Batch notification.
for command in commands:
getattr(self, command[0])(job_id, *command[1], **command[2])
return True
def notifyStartTest(self, job_id, *args, **kwargs):
pydev_runfiles_xml_rpc.notifyStartTest(*args, **kwargs)
return True
def notifyTest(self, job_id, *args, **kwargs):
pydev_runfiles_xml_rpc.notifyTest(*args, **kwargs)
return True
def shutdown(self):
if hasattr(self.server, 'shutdown'):
self.server.shutdown()
else:
self._shutdown = True
def run(self):
if hasattr(self.server, 'shutdown'):
self.server.serve_forever()
else:
self._shutdown = False
while not self._shutdown:
self.server.handle_request()
#=======================================================================================================================
# Client
#=======================================================================================================================
class ClientThread(threading.Thread):
def __init__(self, job_id, port, verbosity, coverage_output_file=None, coverage_include=None):
threading.Thread.__init__(self)
self.daemon = True
self.port = port
self.job_id = job_id
self.verbosity = verbosity
self.finished = False
self.coverage_output_file = coverage_output_file
self.coverage_include = coverage_include
def _reader_thread(self, pipe, target):
while True:
target.write(pipe.read(1))
def run(self):
try:
from _pydev_runfiles import pydev_runfiles_parallel_client
# TODO: Support Jython:
#
# For jython, instead of using sys.executable, we should use:
# r'D:\bin\jdk_1_5_09\bin\java.exe',
# '-classpath',
# 'D:/bin/jython-2.2.1/jython.jar',
# 'org.python.util.jython',
args = [
sys.executable,
pydev_runfiles_parallel_client.__file__,
str(self.job_id),
str(self.port),
str(self.verbosity),
]
if self.coverage_output_file and self.coverage_include:
args.append(self.coverage_output_file)
args.append(self.coverage_include)
import subprocess
if False:
proc = subprocess.Popen(args, env=os.environ, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
thread.start_new_thread(self._reader_thread, (proc.stdout, sys.stdout))
thread.start_new_thread(target=self._reader_thread, args=(proc.stderr, sys.stderr))
else:
proc = subprocess.Popen(args, env=os.environ, shell=False)
proc.wait()
finally:
self.finished = True

View File

@ -0,0 +1,214 @@
from _pydev_bundle.pydev_imports import xmlrpclib, _queue
Queue = _queue.Queue
import traceback
import sys
from _pydev_runfiles.pydev_runfiles_coverage import start_coverage_support_from_params
import threading
#=======================================================================================================================
# ParallelNotification
#=======================================================================================================================
class ParallelNotification(object):
def __init__(self, method, args, kwargs):
self.method = method
self.args = args
self.kwargs = kwargs
def to_tuple(self):
return self.method, self.args, self.kwargs
#=======================================================================================================================
# KillServer
#=======================================================================================================================
class KillServer(object):
pass
#=======================================================================================================================
# ServerComm
#=======================================================================================================================
class ServerComm(threading.Thread):
def __init__(self, job_id, server):
self.notifications_queue = Queue()
threading.Thread.__init__(self)
self.setDaemon(False) #Wait for all the notifications to be passed before exiting!
assert job_id is not None
assert port is not None
self.job_id = job_id
self.finished = False
self.server = server
def run(self):
while True:
kill_found = False
commands = []
command = self.notifications_queue.get(block=True)
if isinstance(command, KillServer):
kill_found = True
else:
assert isinstance(command, ParallelNotification)
commands.append(command.to_tuple())
try:
while True:
command = self.notifications_queue.get(block=False) #No block to create a batch.
if isinstance(command, KillServer):
kill_found = True
else:
assert isinstance(command, ParallelNotification)
commands.append(command.to_tuple())
except:
pass #That's OK, we're getting it until it becomes empty so that we notify multiple at once.
if commands:
try:
#Batch notification.
self.server.lock.acquire()
try:
self.server.notifyCommands(self.job_id, commands)
finally:
self.server.lock.release()
except:
traceback.print_exc()
if kill_found:
self.finished = True
return
#=======================================================================================================================
# ServerFacade
#=======================================================================================================================
class ServerFacade(object):
def __init__(self, notifications_queue):
self.notifications_queue = notifications_queue
def notifyTestsCollected(self, *args, **kwargs):
pass #This notification won't be passed
def notifyTestRunFinished(self, *args, **kwargs):
pass #This notification won't be passed
def notifyStartTest(self, *args, **kwargs):
self.notifications_queue.put_nowait(ParallelNotification('notifyStartTest', args, kwargs))
def notifyTest(self, *args, **kwargs):
self.notifications_queue.put_nowait(ParallelNotification('notifyTest', args, kwargs))
#=======================================================================================================================
# run_client
#=======================================================================================================================
def run_client(job_id, port, verbosity, coverage_output_file, coverage_include):
job_id = int(job_id)
from _pydev_bundle import pydev_localhost
server = xmlrpclib.Server('http://%s:%s' % (pydev_localhost.get_localhost(), port))
server.lock = threading.Lock()
server_comm = ServerComm(job_id, server)
server_comm.start()
try:
server_facade = ServerFacade(server_comm.notifications_queue)
from _pydev_runfiles import pydev_runfiles
from _pydev_runfiles import pydev_runfiles_xml_rpc
pydev_runfiles_xml_rpc.set_server(server_facade)
#Starts None and when the 1st test is gotten, it's started (because a server may be initiated and terminated
#before receiving any test -- which would mean a different process got all the tests to run).
coverage = None
try:
tests_to_run = [1]
while tests_to_run:
#Investigate: is it dangerous to use the same xmlrpclib server from different threads?
#It seems it should be, as it creates a new connection for each request...
server.lock.acquire()
try:
tests_to_run = server.GetTestsToRun(job_id)
finally:
server.lock.release()
if not tests_to_run:
break
if coverage is None:
_coverage_files, coverage = start_coverage_support_from_params(
None, coverage_output_file, 1, coverage_include)
files_to_tests = {}
for test in tests_to_run:
filename_and_test = test.split('|')
if len(filename_and_test) == 2:
files_to_tests.setdefault(filename_and_test[0], []).append(filename_and_test[1])
configuration = pydev_runfiles.Configuration(
'',
verbosity,
None,
None,
None,
files_to_tests,
1, #Always single job here
None,
#The coverage is handled in this loop.
coverage_output_file=None,
coverage_include=None,
)
test_runner = pydev_runfiles.PydevTestRunner(configuration)
sys.stdout.flush()
test_runner.run_tests(handle_coverage=False)
finally:
if coverage is not None:
coverage.stop()
coverage.save()
except:
traceback.print_exc()
server_comm.notifications_queue.put_nowait(KillServer())
#=======================================================================================================================
# main
#=======================================================================================================================
if __name__ == '__main__':
if len(sys.argv) -1 == 3:
job_id, port, verbosity = sys.argv[1:]
coverage_output_file, coverage_include = None, None
elif len(sys.argv) -1 == 5:
job_id, port, verbosity, coverage_output_file, coverage_include = sys.argv[1:]
else:
raise AssertionError('Could not find out how to handle the parameters: '+sys.argv[1:])
job_id = int(job_id)
port = int(port)
verbosity = int(verbosity)
run_client(job_id, port, verbosity, coverage_output_file, coverage_include)

View File

@ -0,0 +1,306 @@
from _pydev_runfiles import pydev_runfiles_xml_rpc
import pickle
import zlib
import base64
import os
from pydevd_file_utils import canonical_normalized_path
import pytest
import sys
import time
from pathlib import Path
#=========================================================================
# Load filters with tests we should skip
#=========================================================================
py_test_accept_filter = None
def _load_filters():
global py_test_accept_filter
if py_test_accept_filter is None:
py_test_accept_filter = os.environ.get('PYDEV_PYTEST_SKIP')
if py_test_accept_filter:
py_test_accept_filter = pickle.loads(
zlib.decompress(base64.b64decode(py_test_accept_filter)))
# Newer versions of pytest resolve symlinks, so, we
# may need to filter with a resolved path too.
new_dct = {}
for filename, value in py_test_accept_filter.items():
new_dct[canonical_normalized_path(str(Path(filename).resolve()))] = value
py_test_accept_filter.update(new_dct)
else:
py_test_accept_filter = {}
def is_in_xdist_node():
main_pid = os.environ.get('PYDEV_MAIN_PID')
if main_pid and main_pid != str(os.getpid()):
return True
return False
connected = False
def connect_to_server_for_communication_to_xml_rpc_on_xdist():
global connected
if connected:
return
connected = True
if is_in_xdist_node():
port = os.environ.get('PYDEV_PYTEST_SERVER')
if not port:
sys.stderr.write(
'Error: no PYDEV_PYTEST_SERVER environment variable defined.\n')
else:
pydev_runfiles_xml_rpc.initialize_server(int(port), daemon=True)
PY2 = sys.version_info[0] <= 2
PY3 = not PY2
class State:
start_time = time.time()
buf_err = None
buf_out = None
def start_redirect():
if State.buf_out is not None:
return
from _pydevd_bundle import pydevd_io
State.buf_err = pydevd_io.start_redirect(keep_original_redirection=True, std='stderr')
State.buf_out = pydevd_io.start_redirect(keep_original_redirection=True, std='stdout')
def get_curr_output():
buf_out = State.buf_out
buf_err = State.buf_err
return buf_out.getvalue() if buf_out is not None else '', buf_err.getvalue() if buf_err is not None else ''
def pytest_unconfigure():
if is_in_xdist_node():
return
# Only report that it finished when on the main node (we don't want to report
# the finish on each separate node).
pydev_runfiles_xml_rpc.notifyTestRunFinished(
'Finished in: %.2f secs.' % (time.time() - State.start_time,))
def pytest_collection_modifyitems(session, config, items):
# A note: in xdist, this is not called on the main process, only in the
# secondary nodes, so, we'll actually make the filter and report it multiple
# times.
connect_to_server_for_communication_to_xml_rpc_on_xdist()
_load_filters()
if not py_test_accept_filter:
pydev_runfiles_xml_rpc.notifyTestsCollected(len(items))
return # Keep on going (nothing to filter)
new_items = []
for item in items:
f = canonical_normalized_path(str(item.parent.fspath))
name = item.name
if f not in py_test_accept_filter:
# print('Skip file: %s' % (f,))
continue # Skip the file
i = name.find('[')
name_without_parametrize = None
if i > 0:
name_without_parametrize = name[:i]
accept_tests = py_test_accept_filter[f]
if item.cls is not None:
class_name = item.cls.__name__
else:
class_name = None
for test in accept_tests:
if test == name:
# Direct match of the test (just go on with the default
# loading)
new_items.append(item)
break
if name_without_parametrize is not None and test == name_without_parametrize:
# This happens when parameterizing pytest tests on older versions
# of pytest where the test name doesn't include the fixture name
# in it.
new_items.append(item)
break
if class_name is not None:
if test == class_name + '.' + name:
new_items.append(item)
break
if name_without_parametrize is not None and test == class_name + '.' + name_without_parametrize:
new_items.append(item)
break
if class_name == test:
new_items.append(item)
break
else:
pass
# print('Skip test: %s.%s. Accept: %s' % (class_name, name, accept_tests))
# Modify the original list
items[:] = new_items
pydev_runfiles_xml_rpc.notifyTestsCollected(len(items))
try:
"""
pytest > 5.4 uses own version of TerminalWriter based on py.io.TerminalWriter
and assumes there is a specific method TerminalWriter._write_source
so try load pytest version first or fallback to default one
"""
from _pytest._io import TerminalWriter
except ImportError:
from py.io import TerminalWriter
def _get_error_contents_from_report(report):
if report.longrepr is not None:
try:
tw = TerminalWriter(stringio=True)
stringio = tw.stringio
except TypeError:
import io
stringio = io.StringIO()
tw = TerminalWriter(file=stringio)
tw.hasmarkup = False
report.toterminal(tw)
exc = stringio.getvalue()
s = exc.strip()
if s:
return s
return ''
def pytest_collectreport(report):
error_contents = _get_error_contents_from_report(report)
if error_contents:
report_test('fail', '<collect errors>', '<collect errors>', '', error_contents, 0.0)
def append_strings(s1, s2):
if s1.__class__ == s2.__class__:
return s1 + s2
# Prefer str
if isinstance(s1, bytes):
s1 = s1.decode('utf-8', 'replace')
if isinstance(s2, bytes):
s2 = s2.decode('utf-8', 'replace')
return s1 + s2
def pytest_runtest_logreport(report):
if is_in_xdist_node():
# When running with xdist, we don't want the report to be called from the node, only
# from the main process.
return
report_duration = report.duration
report_when = report.when
report_outcome = report.outcome
if hasattr(report, 'wasxfail'):
if report_outcome != 'skipped':
report_outcome = 'passed'
if report_outcome == 'passed':
# passed on setup/teardown: no need to report if in setup or teardown
# (only on the actual test if it passed).
if report_when in ('setup', 'teardown'):
return
status = 'ok'
elif report_outcome == 'skipped':
status = 'skip'
else:
# It has only passed, skipped and failed (no error), so, let's consider
# error if not on call.
if report_when in ('setup', 'teardown'):
status = 'error'
else:
# any error in the call (not in setup or teardown) is considered a
# regular failure.
status = 'fail'
# This will work if pytest is not capturing it, if it is, nothing will
# come from here...
captured_output, error_contents = getattr(report, 'pydev_captured_output', ''), getattr(report, 'pydev_error_contents', '')
for type_section, value in report.sections:
if value:
if type_section in ('err', 'stderr', 'Captured stderr call'):
error_contents = append_strings(error_contents, value)
else:
captured_output = append_strings(error_contents, value)
filename = getattr(report, 'pydev_fspath_strpath', '<unable to get>')
test = report.location[2]
if report_outcome != 'skipped':
# On skipped, we'll have a traceback for the skip, which is not what we
# want.
exc = _get_error_contents_from_report(report)
if exc:
if error_contents:
error_contents = append_strings(error_contents, '----------------------------- Exceptions -----------------------------\n')
error_contents = append_strings(error_contents, exc)
report_test(status, filename, test, captured_output, error_contents, report_duration)
def report_test(status, filename, test, captured_output, error_contents, duration):
'''
@param filename: 'D:\\src\\mod1\\hello.py'
@param test: 'TestCase.testMet1'
@param status: fail, error, ok
'''
time_str = '%.2f' % (duration,)
pydev_runfiles_xml_rpc.notifyTest(
status, captured_output, error_contents, filename, test, time_str)
if not hasattr(pytest, 'hookimpl'):
raise AssertionError('Please upgrade pytest (the current version of pytest: %s is unsupported)' % (pytest.__version__,))
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_makereport(item, call):
outcome = yield
report = outcome.get_result()
report.pydev_fspath_strpath = item.fspath.strpath
report.pydev_captured_output, report.pydev_error_contents = get_curr_output()
@pytest.mark.tryfirst
def pytest_runtest_setup(item):
'''
Note: with xdist will be on a secondary process.
'''
# We have our own redirection: if xdist does its redirection, we'll have
# nothing in our contents (which is OK), but if it does, we'll get nothing
# from pytest but will get our own here.
start_redirect()
filename = item.fspath.strpath
test = item.location[2]
pydev_runfiles_xml_rpc.notifyStartTest(filename, test)

View File

@ -0,0 +1,150 @@
import unittest as python_unittest
from _pydev_runfiles import pydev_runfiles_xml_rpc
import time
from _pydevd_bundle import pydevd_io
import traceback
from _pydevd_bundle.pydevd_constants import * # @UnusedWildImport
from io import StringIO
#=======================================================================================================================
# PydevTextTestRunner
#=======================================================================================================================
class PydevTextTestRunner(python_unittest.TextTestRunner):
def _makeResult(self):
return PydevTestResult(self.stream, self.descriptions, self.verbosity)
_PythonTextTestResult = python_unittest.TextTestRunner()._makeResult().__class__
#=======================================================================================================================
# PydevTestResult
#=======================================================================================================================
class PydevTestResult(_PythonTextTestResult):
def addSubTest(self, test, subtest, err):
"""Called at the end of a subtest.
'err' is None if the subtest ended successfully, otherwise it's a
tuple of values as returned by sys.exc_info().
"""
_PythonTextTestResult.addSubTest(self, test, subtest, err)
if err is not None:
subdesc = subtest._subDescription()
error = (test, self._exc_info_to_string(err, test))
self._reportErrors([error], [], '', '%s %s' % (self.get_test_name(test), subdesc))
def startTest(self, test):
_PythonTextTestResult.startTest(self, test)
self.buf = pydevd_io.start_redirect(keep_original_redirection=True, std='both')
self.start_time = time.time()
self._current_errors_stack = []
self._current_failures_stack = []
try:
test_name = test.__class__.__name__ + "." + test._testMethodName
except AttributeError:
# Support for jython 2.1 (__testMethodName is pseudo-private in the test case)
test_name = test.__class__.__name__ + "." + test._TestCase__testMethodName
pydev_runfiles_xml_rpc.notifyStartTest(
test.__pydev_pyfile__, test_name)
def get_test_name(self, test):
try:
try:
test_name = test.__class__.__name__ + "." + test._testMethodName
except AttributeError:
# Support for jython 2.1 (__testMethodName is pseudo-private in the test case)
try:
test_name = test.__class__.__name__ + "." + test._TestCase__testMethodName
# Support for class/module exceptions (test is instance of _ErrorHolder)
except:
test_name = test.description.split()[1][1:-1] + ' <' + test.description.split()[0] + '>'
except:
traceback.print_exc()
return '<unable to get test name>'
return test_name
def stopTest(self, test):
end_time = time.time()
pydevd_io.end_redirect(std='both')
_PythonTextTestResult.stopTest(self, test)
captured_output = self.buf.getvalue()
del self.buf
error_contents = ''
test_name = self.get_test_name(test)
diff_time = '%.2f' % (end_time - self.start_time)
skipped = False
outcome = getattr(test, '_outcome', None)
if outcome is not None:
skipped = bool(getattr(outcome, 'skipped', None))
if skipped:
pydev_runfiles_xml_rpc.notifyTest(
'skip', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
elif not self._current_errors_stack and not self._current_failures_stack:
pydev_runfiles_xml_rpc.notifyTest(
'ok', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
else:
self._reportErrors(self._current_errors_stack, self._current_failures_stack, captured_output, test_name)
def _reportErrors(self, errors, failures, captured_output, test_name, diff_time=''):
error_contents = []
for test, s in errors + failures:
if type(s) == type((1,)): # If it's a tuple (for jython 2.1)
sio = StringIO()
traceback.print_exception(s[0], s[1], s[2], file=sio)
s = sio.getvalue()
error_contents.append(s)
sep = '\n' + self.separator1
error_contents = sep.join(error_contents)
if errors and not failures:
try:
pydev_runfiles_xml_rpc.notifyTest(
'error', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
except:
file_start = error_contents.find('File "')
file_end = error_contents.find('", ', file_start)
if file_start != -1 and file_end != -1:
file = error_contents[file_start + 6:file_end]
else:
file = '<unable to get file>'
pydev_runfiles_xml_rpc.notifyTest(
'error', captured_output, error_contents, file, test_name, diff_time)
elif failures and not errors:
pydev_runfiles_xml_rpc.notifyTest(
'fail', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
else: # Ok, we got both, errors and failures. Let's mark it as an error in the end.
pydev_runfiles_xml_rpc.notifyTest(
'error', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
def addError(self, test, err):
_PythonTextTestResult.addError(self, test, err)
# Support for class/module exceptions (test is instance of _ErrorHolder)
if not hasattr(self, '_current_errors_stack') or test.__class__.__name__ == '_ErrorHolder':
# Not in start...end, so, report error now (i.e.: django pre/post-setup)
self._reportErrors([self.errors[-1]], [], '', self.get_test_name(test))
else:
self._current_errors_stack.append(self.errors[-1])
def addFailure(self, test, err):
_PythonTextTestResult.addFailure(self, test, err)
if not hasattr(self, '_current_failures_stack'):
# Not in start...end, so, report error now (i.e.: django pre/post-setup)
self._reportErrors([], [self.failures[-1]], '', self.get_test_name(test))
else:
self._current_failures_stack.append(self.failures[-1])
class PydevTestSuite(python_unittest.TestSuite):
pass

View File

@ -0,0 +1,257 @@
import sys
import threading
import traceback
import warnings
from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding
from _pydev_bundle.pydev_imports import xmlrpclib, _queue
from _pydevd_bundle.pydevd_constants import Null
Queue = _queue.Queue
# This may happen in IronPython (in Python it shouldn't happen as there are
# 'fast' replacements that are used in xmlrpclib.py)
warnings.filterwarnings(
'ignore', 'The xmllib module is obsolete.*', DeprecationWarning)
file_system_encoding = getfilesystemencoding()
#=======================================================================================================================
# _ServerHolder
#=======================================================================================================================
class _ServerHolder:
'''
Helper so that we don't have to use a global here.
'''
SERVER = None
#=======================================================================================================================
# set_server
#=======================================================================================================================
def set_server(server):
_ServerHolder.SERVER = server
#=======================================================================================================================
# ParallelNotification
#=======================================================================================================================
class ParallelNotification(object):
def __init__(self, method, args):
self.method = method
self.args = args
def to_tuple(self):
return self.method, self.args
#=======================================================================================================================
# KillServer
#=======================================================================================================================
class KillServer(object):
pass
#=======================================================================================================================
# ServerFacade
#=======================================================================================================================
class ServerFacade(object):
def __init__(self, notifications_queue):
self.notifications_queue = notifications_queue
def notifyTestsCollected(self, *args):
self.notifications_queue.put_nowait(ParallelNotification('notifyTestsCollected', args))
def notifyConnected(self, *args):
self.notifications_queue.put_nowait(ParallelNotification('notifyConnected', args))
def notifyTestRunFinished(self, *args):
self.notifications_queue.put_nowait(ParallelNotification('notifyTestRunFinished', args))
def notifyStartTest(self, *args):
self.notifications_queue.put_nowait(ParallelNotification('notifyStartTest', args))
def notifyTest(self, *args):
new_args = []
for arg in args:
new_args.append(_encode_if_needed(arg))
args = tuple(new_args)
self.notifications_queue.put_nowait(ParallelNotification('notifyTest', args))
#=======================================================================================================================
# ServerComm
#=======================================================================================================================
class ServerComm(threading.Thread):
def __init__(self, notifications_queue, port, daemon=False):
threading.Thread.__init__(self)
self.setDaemon(daemon) # If False, wait for all the notifications to be passed before exiting!
self.finished = False
self.notifications_queue = notifications_queue
from _pydev_bundle import pydev_localhost
# It is necessary to specify an encoding, that matches
# the encoding of all bytes-strings passed into an
# XMLRPC call: "All 8-bit strings in the data structure are assumed to use the
# packet encoding. Unicode strings are automatically converted,
# where necessary."
# Byte strings most likely come from file names.
encoding = file_system_encoding
if encoding == "mbcs":
# Windos symbolic name for the system encoding CP_ACP.
# We need to convert it into a encoding that is recognized by Java.
# Unfortunately this is not always possible. You could use
# GetCPInfoEx and get a name similar to "windows-1251". Then
# you need a table to translate on a best effort basis. Much to complicated.
# ISO-8859-1 is good enough.
encoding = "ISO-8859-1"
self.server = xmlrpclib.Server('http://%s:%s' % (pydev_localhost.get_localhost(), port),
encoding=encoding)
def run(self):
while True:
kill_found = False
commands = []
command = self.notifications_queue.get(block=True)
if isinstance(command, KillServer):
kill_found = True
else:
assert isinstance(command, ParallelNotification)
commands.append(command.to_tuple())
try:
while True:
command = self.notifications_queue.get(block=False) # No block to create a batch.
if isinstance(command, KillServer):
kill_found = True
else:
assert isinstance(command, ParallelNotification)
commands.append(command.to_tuple())
except:
pass # That's OK, we're getting it until it becomes empty so that we notify multiple at once.
if commands:
try:
self.server.notifyCommands(commands)
except:
traceback.print_exc()
if kill_found:
self.finished = True
return
#=======================================================================================================================
# initialize_server
#=======================================================================================================================
def initialize_server(port, daemon=False):
if _ServerHolder.SERVER is None:
if port is not None:
notifications_queue = Queue()
_ServerHolder.SERVER = ServerFacade(notifications_queue)
_ServerHolder.SERVER_COMM = ServerComm(notifications_queue, port, daemon)
_ServerHolder.SERVER_COMM.start()
else:
# Create a null server, so that we keep the interface even without any connection.
_ServerHolder.SERVER = Null()
_ServerHolder.SERVER_COMM = Null()
try:
_ServerHolder.SERVER.notifyConnected()
except:
traceback.print_exc()
#=======================================================================================================================
# notifyTest
#=======================================================================================================================
def notifyTestsCollected(tests_count):
assert tests_count is not None
try:
_ServerHolder.SERVER.notifyTestsCollected(tests_count)
except:
traceback.print_exc()
#=======================================================================================================================
# notifyStartTest
#=======================================================================================================================
def notifyStartTest(file, test):
'''
@param file: the tests file (c:/temp/test.py)
@param test: the test ran (i.e.: TestCase.test1)
'''
assert file is not None
if test is None:
test = '' # Could happen if we have an import error importing module.
try:
_ServerHolder.SERVER.notifyStartTest(file, test)
except:
traceback.print_exc()
def _encode_if_needed(obj):
# In the java side we expect strings to be ISO-8859-1 (org.python.pydev.debug.pyunit.PyUnitServer.initializeDispatches().new Dispatch() {...}.getAsStr(Object))
if isinstance(obj, str): # Unicode in py3
return xmlrpclib.Binary(obj.encode('ISO-8859-1', 'xmlcharrefreplace'))
elif isinstance(obj, bytes):
try:
return xmlrpclib.Binary(obj.decode(sys.stdin.encoding).encode('ISO-8859-1', 'xmlcharrefreplace'))
except:
return xmlrpclib.Binary(obj) # bytes already
return obj
#=======================================================================================================================
# notifyTest
#=======================================================================================================================
def notifyTest(cond, captured_output, error_contents, file, test, time):
'''
@param cond: ok, fail, error
@param captured_output: output captured from stdout
@param captured_output: output captured from stderr
@param file: the tests file (c:/temp/test.py)
@param test: the test ran (i.e.: TestCase.test1)
@param time: float with the number of seconds elapsed
'''
assert cond is not None
assert captured_output is not None
assert error_contents is not None
assert file is not None
if test is None:
test = '' # Could happen if we have an import error importing module.
assert time is not None
try:
captured_output = _encode_if_needed(captured_output)
error_contents = _encode_if_needed(error_contents)
_ServerHolder.SERVER.notifyTest(cond, captured_output, error_contents, file, test, time)
except:
traceback.print_exc()
#=======================================================================================================================
# notifyTestRunFinished
#=======================================================================================================================
def notifyTestRunFinished(total_time):
assert total_time is not None
try:
_ServerHolder.SERVER.notifyTestRunFinished(total_time)
except:
traceback.print_exc()
#=======================================================================================================================
# force_server_kill
#=======================================================================================================================
def force_server_kill():
_ServerHolder.SERVER_COMM.notifications_queue.put_nowait(KillServer())

View File

@ -0,0 +1,592 @@
'''
Run this module to regenerate the `pydevd_schema.py` file.
Note that it'll generate it based on the current debugProtocol.json. Erase it and rerun
to download the latest version.
'''
def is_variable_to_translate(cls_name, var_name):
if var_name in ('variablesReference', 'frameId', 'threadId'):
return True
if cls_name == 'StackFrame' and var_name == 'id':
# It's frameId everywhere except on StackFrame.
return True
if cls_name == 'Thread' and var_name == 'id':
# It's threadId everywhere except on Thread.
return True
return False
def _get_noqa_for_var(prop_name):
return ' # noqa (assign to builtin)' if prop_name in ('type', 'format', 'id', 'hex', 'breakpoint', 'filter') else ''
class _OrderedSet(object):
# Not a good ordered set (just something to be small without adding any deps)
def __init__(self, initial_contents=None):
self._contents = []
self._contents_as_set = set()
if initial_contents is not None:
for x in initial_contents:
self.add(x)
def add(self, x):
if x not in self._contents_as_set:
self._contents_as_set.add(x)
self._contents.append(x)
def discard(self, x):
if x in self._contents_as_set:
self._contents_as_set.remove(x)
self._contents.remove(x)
def copy(self):
return _OrderedSet(self._contents)
def update(self, contents):
for x in contents:
self.add(x)
def __iter__(self):
return iter(self._contents)
def __contains__(self, item):
return item in self._contents_as_set
def __len__(self):
return len(self._contents)
def set_repr(self):
if len(self) == 0:
return 'set()'
lst = [repr(x) for x in self]
return 'set([' + ', '.join(lst) + '])'
class Ref(object):
def __init__(self, ref, ref_data):
self.ref = ref
self.ref_data = ref_data
def __str__(self):
return self.ref
def load_schema_data():
import os.path
import json
json_file = os.path.join(os.path.dirname(__file__), 'debugProtocol.json')
if not os.path.exists(json_file):
import requests
req = requests.get('https://raw.githubusercontent.com/microsoft/debug-adapter-protocol/gh-pages/debugAdapterProtocol.json')
assert req.status_code == 200
with open(json_file, 'wb') as stream:
stream.write(req.content)
with open(json_file, 'rb') as json_contents:
json_schema_data = json.loads(json_contents.read())
return json_schema_data
def load_custom_schema_data():
import os.path
import json
json_file = os.path.join(os.path.dirname(__file__), 'debugProtocolCustom.json')
with open(json_file, 'rb') as json_contents:
json_schema_data = json.loads(json_contents.read())
return json_schema_data
def create_classes_to_generate_structure(json_schema_data):
definitions = json_schema_data['definitions']
class_to_generatees = {}
for name, definition in definitions.items():
all_of = definition.get('allOf')
description = definition.get('description')
is_enum = definition.get('type') == 'string' and 'enum' in definition
enum_values = None
if is_enum:
enum_values = definition['enum']
properties = {}
properties.update(definition.get('properties', {}))
required = _OrderedSet(definition.get('required', _OrderedSet()))
base_definitions = []
if all_of is not None:
for definition in all_of:
ref = definition.get('$ref')
if ref is not None:
assert ref.startswith('#/definitions/')
ref = ref[len('#/definitions/'):]
base_definitions.append(ref)
else:
if not description:
description = definition.get('description')
properties.update(definition.get('properties', {}))
required.update(_OrderedSet(definition.get('required', _OrderedSet())))
if isinstance(description, (list, tuple)):
description = '\n'.join(description)
if name == 'ModulesRequest': # Hack to accept modules request without arguments (ptvsd: 2050).
required.discard('arguments')
class_to_generatees[name] = dict(
name=name,
properties=properties,
base_definitions=base_definitions,
description=description,
required=required,
is_enum=is_enum,
enum_values=enum_values
)
return class_to_generatees
def collect_bases(curr_class, classes_to_generate, memo=None):
ret = []
if memo is None:
memo = {}
base_definitions = curr_class['base_definitions']
for base_definition in base_definitions:
if base_definition not in memo:
ret.append(base_definition)
ret.extend(collect_bases(classes_to_generate[base_definition], classes_to_generate, memo))
return ret
def fill_properties_and_required_from_base(classes_to_generate):
# Now, resolve properties based on refs
for class_to_generate in classes_to_generate.values():
dct = {}
s = _OrderedSet()
for base_definition in reversed(collect_bases(class_to_generate, classes_to_generate)):
# Note: go from base to current so that the initial order of the properties has that
# same order.
dct.update(classes_to_generate[base_definition].get('properties', {}))
s.update(classes_to_generate[base_definition].get('required', _OrderedSet()))
dct.update(class_to_generate['properties'])
class_to_generate['properties'] = dct
s.update(class_to_generate['required'])
class_to_generate['required'] = s
return class_to_generate
def update_class_to_generate_description(class_to_generate):
import textwrap
description = class_to_generate['description']
lines = []
for line in description.splitlines():
wrapped = textwrap.wrap(line.strip(), 100)
lines.extend(wrapped)
lines.append('')
while lines and lines[-1] == '':
lines = lines[:-1]
class_to_generate['description'] = ' ' + ('\n '.join(lines))
def update_class_to_generate_type(classes_to_generate, class_to_generate):
properties = class_to_generate.get('properties')
for _prop_name, prop_val in properties.items():
prop_type = prop_val.get('type', '')
if not prop_type:
prop_type = prop_val.pop('$ref', '')
if prop_type:
assert prop_type.startswith('#/definitions/')
prop_type = prop_type[len('#/definitions/'):]
prop_val['type'] = Ref(prop_type, classes_to_generate[prop_type])
def update_class_to_generate_register_dec(classes_to_generate, class_to_generate):
# Default
class_to_generate['register_request'] = ''
class_to_generate['register_dec'] = '@register'
properties = class_to_generate.get('properties')
enum_type = properties.get('type', {}).get('enum')
command = None
event = None
if enum_type and len(enum_type) == 1 and next(iter(enum_type)) in ("request", "response", "event"):
msg_type = next(iter(enum_type))
if msg_type == 'response':
# The actual command is typed in the request
response_name = class_to_generate['name']
request_name = response_name[:-len('Response')] + 'Request'
if request_name in classes_to_generate:
command = classes_to_generate[request_name]['properties'].get('command')
else:
if response_name == 'ErrorResponse':
command = {'enum': ['error']}
else:
raise AssertionError('Unhandled: %s' % (response_name,))
elif msg_type == 'request':
command = properties.get('command')
elif msg_type == 'event':
command = properties.get('event')
else:
raise AssertionError('Unexpected condition.')
if command:
enum = command.get('enum')
if enum and len(enum) == 1:
class_to_generate['register_request'] = '@register_%s(%r)\n' % (msg_type, enum[0])
def extract_prop_name_and_prop(class_to_generate):
properties = class_to_generate.get('properties')
required = _OrderedSet(class_to_generate.get('required', _OrderedSet()))
# Sort so that required come first
prop_name_and_prop = list(properties.items())
def compute_sort_key(x):
key = x[0]
if key in required:
if key == 'seq':
return 0.5 # seq when required is after the other required keys (to have a default of -1).
return 0
return 1
prop_name_and_prop.sort(key=compute_sort_key)
return prop_name_and_prop
def update_class_to_generate_to_json(class_to_generate):
required = _OrderedSet(class_to_generate.get('required', _OrderedSet()))
prop_name_and_prop = extract_prop_name_and_prop(class_to_generate)
to_dict_body = ['def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be unused)']
translate_prop_names = []
for prop_name, prop in prop_name_and_prop:
if is_variable_to_translate(class_to_generate['name'], prop_name):
translate_prop_names.append(prop_name)
for prop_name, prop in prop_name_and_prop:
namespace = dict(prop_name=prop_name, noqa=_get_noqa_for_var(prop_name))
to_dict_body.append(' %(prop_name)s = self.%(prop_name)s%(noqa)s' % namespace)
if prop.get('type') == 'array':
to_dict_body.append(' if %(prop_name)s and hasattr(%(prop_name)s[0], "to_dict"):' % namespace)
to_dict_body.append(' %(prop_name)s = [x.to_dict() for x in %(prop_name)s]' % namespace)
if translate_prop_names:
to_dict_body.append(' if update_ids_to_dap:')
for prop_name in translate_prop_names:
namespace = dict(prop_name=prop_name, noqa=_get_noqa_for_var(prop_name))
to_dict_body.append(' if %(prop_name)s is not None:' % namespace)
to_dict_body.append(' %(prop_name)s = self._translate_id_to_dap(%(prop_name)s)%(noqa)s' % namespace)
if not translate_prop_names:
update_dict_ids_from_dap_body = []
else:
update_dict_ids_from_dap_body = ['', '', '@classmethod', 'def update_dict_ids_from_dap(cls, dct):']
for prop_name in translate_prop_names:
namespace = dict(prop_name=prop_name)
update_dict_ids_from_dap_body.append(' if %(prop_name)r in dct:' % namespace)
update_dict_ids_from_dap_body.append(' dct[%(prop_name)r] = cls._translate_id_from_dap(dct[%(prop_name)r])' % namespace)
update_dict_ids_from_dap_body.append(' return dct')
class_to_generate['update_dict_ids_from_dap'] = _indent_lines('\n'.join(update_dict_ids_from_dap_body))
to_dict_body.append(' dct = {')
first_not_required = False
for prop_name, prop in prop_name_and_prop:
use_to_dict = prop['type'].__class__ == Ref and not prop['type'].ref_data.get('is_enum', False)
is_array = prop['type'] == 'array'
ref_array_cls_name = ''
if is_array:
ref = prop['items'].get('$ref')
if ref is not None:
ref_array_cls_name = ref.split('/')[-1]
namespace = dict(prop_name=prop_name, ref_array_cls_name=ref_array_cls_name)
if prop_name in required:
if use_to_dict:
to_dict_body.append(' %(prop_name)r: %(prop_name)s.to_dict(update_ids_to_dap=update_ids_to_dap),' % namespace)
else:
if ref_array_cls_name:
to_dict_body.append(' %(prop_name)r: [%(ref_array_cls_name)s.update_dict_ids_to_dap(o) for o in %(prop_name)s] if (update_ids_to_dap and %(prop_name)s) else %(prop_name)s,' % namespace)
else:
to_dict_body.append(' %(prop_name)r: %(prop_name)s,' % namespace)
else:
if not first_not_required:
first_not_required = True
to_dict_body.append(' }')
to_dict_body.append(' if %(prop_name)s is not None:' % namespace)
if use_to_dict:
to_dict_body.append(' dct[%(prop_name)r] = %(prop_name)s.to_dict(update_ids_to_dap=update_ids_to_dap)' % namespace)
else:
if ref_array_cls_name:
to_dict_body.append(' dct[%(prop_name)r] = [%(ref_array_cls_name)s.update_dict_ids_to_dap(o) for o in %(prop_name)s] if (update_ids_to_dap and %(prop_name)s) else %(prop_name)s' % namespace)
else:
to_dict_body.append(' dct[%(prop_name)r] = %(prop_name)s' % namespace)
if not first_not_required:
first_not_required = True
to_dict_body.append(' }')
to_dict_body.append(' dct.update(self.kwargs)')
to_dict_body.append(' return dct')
class_to_generate['to_dict'] = _indent_lines('\n'.join(to_dict_body))
if not translate_prop_names:
update_dict_ids_to_dap_body = []
else:
update_dict_ids_to_dap_body = ['', '', '@classmethod', 'def update_dict_ids_to_dap(cls, dct):']
for prop_name in translate_prop_names:
namespace = dict(prop_name=prop_name)
update_dict_ids_to_dap_body.append(' if %(prop_name)r in dct:' % namespace)
update_dict_ids_to_dap_body.append(' dct[%(prop_name)r] = cls._translate_id_to_dap(dct[%(prop_name)r])' % namespace)
update_dict_ids_to_dap_body.append(' return dct')
class_to_generate['update_dict_ids_to_dap'] = _indent_lines('\n'.join(update_dict_ids_to_dap_body))
def update_class_to_generate_init(class_to_generate):
args = []
init_body = []
docstring = []
required = _OrderedSet(class_to_generate.get('required', _OrderedSet()))
prop_name_and_prop = extract_prop_name_and_prop(class_to_generate)
translate_prop_names = []
for prop_name, prop in prop_name_and_prop:
if is_variable_to_translate(class_to_generate['name'], prop_name):
translate_prop_names.append(prop_name)
enum = prop.get('enum')
if enum and len(enum) == 1:
init_body.append(' self.%(prop_name)s = %(enum)r' % dict(prop_name=prop_name, enum=next(iter(enum))))
else:
if prop_name in required:
if prop_name == 'seq':
args.append(prop_name + '=-1')
else:
args.append(prop_name)
else:
args.append(prop_name + '=None')
if prop['type'].__class__ == Ref:
ref = prop['type']
ref_data = ref.ref_data
if ref_data.get('is_enum', False):
init_body.append(' if %s is not None:' % (prop_name,))
init_body.append(' assert %s in %s.VALID_VALUES' % (prop_name, str(ref)))
init_body.append(' self.%(prop_name)s = %(prop_name)s' % dict(
prop_name=prop_name))
else:
namespace = dict(
prop_name=prop_name,
ref_name=str(ref)
)
init_body.append(' if %(prop_name)s is None:' % namespace)
init_body.append(' self.%(prop_name)s = %(ref_name)s()' % namespace)
init_body.append(' else:')
init_body.append(' self.%(prop_name)s = %(ref_name)s(update_ids_from_dap=update_ids_from_dap, **%(prop_name)s) if %(prop_name)s.__class__ != %(ref_name)s else %(prop_name)s' % namespace
)
else:
init_body.append(' self.%(prop_name)s = %(prop_name)s' % dict(prop_name=prop_name))
if prop['type'] == 'array':
ref = prop['items'].get('$ref')
if ref is not None:
ref_array_cls_name = ref.split('/')[-1]
init_body.append(' if update_ids_from_dap and self.%(prop_name)s:' % dict(prop_name=prop_name))
init_body.append(' for o in self.%(prop_name)s:' % dict(prop_name=prop_name))
init_body.append(' %(ref_array_cls_name)s.update_dict_ids_from_dap(o)' % dict(ref_array_cls_name=ref_array_cls_name))
prop_type = prop['type']
prop_description = prop.get('description', '')
if isinstance(prop_description, (list, tuple)):
prop_description = '\n '.join(prop_description)
docstring.append(':param %(prop_type)s %(prop_name)s: %(prop_description)s' % dict(
prop_type=prop_type, prop_name=prop_name, prop_description=prop_description))
if translate_prop_names:
init_body.append(' if update_ids_from_dap:')
for prop_name in translate_prop_names:
init_body.append(' self.%(prop_name)s = self._translate_id_from_dap(self.%(prop_name)s)' % dict(prop_name=prop_name))
docstring = _indent_lines('\n'.join(docstring))
init_body = '\n'.join(init_body)
# Actually bundle the whole __init__ from the parts.
args = ', '.join(args)
if args:
args = ', ' + args
# Note: added kwargs because some messages are expected to be extended by the user (so, we'll actually
# make all extendable so that we don't have to worry about which ones -- we loose a little on typing,
# but may be better than doing a allow list based on something only pointed out in the documentation).
class_to_generate['init'] = '''def __init__(self%(args)s, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused)
"""
%(docstring)s
"""
%(init_body)s
self.kwargs = kwargs
''' % dict(args=args, init_body=init_body, docstring=docstring)
class_to_generate['init'] = _indent_lines(class_to_generate['init'])
def update_class_to_generate_props(class_to_generate):
import json
def default(o):
if isinstance(o, Ref):
return o.ref
raise AssertionError('Unhandled: %s' % (o,))
properties = class_to_generate['properties']
class_to_generate['props'] = ' __props__ = %s' % _indent_lines(
json.dumps(properties, indent=4, default=default)).strip()
def update_class_to_generate_refs(class_to_generate):
properties = class_to_generate['properties']
class_to_generate['refs'] = ' __refs__ = %s' % _OrderedSet(
key for (key, val) in properties.items() if val['type'].__class__ == Ref).set_repr()
def update_class_to_generate_enums(class_to_generate):
class_to_generate['enums'] = ''
if class_to_generate.get('is_enum', False):
enums = ''
for enum in class_to_generate['enum_values']:
enums += ' %s = %r\n' % (enum.upper(), enum)
enums += '\n'
enums += ' VALID_VALUES = %s\n\n' % _OrderedSet(class_to_generate['enum_values']).set_repr()
class_to_generate['enums'] = enums
def update_class_to_generate_objects(classes_to_generate, class_to_generate):
properties = class_to_generate['properties']
for key, val in properties.items():
if 'type' not in val:
val['type'] = 'TypeNA'
continue
if val['type'] == 'object':
create_new = val.copy()
create_new.update({
'name': '%s%s' % (class_to_generate['name'], key.title()),
'description': ' "%s" of %s' % (key, class_to_generate['name'])
})
if 'properties' not in create_new:
create_new['properties'] = {}
assert create_new['name'] not in classes_to_generate
classes_to_generate[create_new['name']] = create_new
update_class_to_generate_type(classes_to_generate, create_new)
update_class_to_generate_props(create_new)
# Update nested object types
update_class_to_generate_objects(classes_to_generate, create_new)
val['type'] = Ref(create_new['name'], classes_to_generate[create_new['name']])
val.pop('properties', None)
def gen_debugger_protocol():
import os.path
import sys
if sys.version_info[:2] < (3, 6):
raise AssertionError('Must be run with Python 3.6 onwards (to keep dict order).')
classes_to_generate = create_classes_to_generate_structure(load_schema_data())
classes_to_generate.update(create_classes_to_generate_structure(load_custom_schema_data()))
class_to_generate = fill_properties_and_required_from_base(classes_to_generate)
for class_to_generate in list(classes_to_generate.values()):
update_class_to_generate_description(class_to_generate)
update_class_to_generate_type(classes_to_generate, class_to_generate)
update_class_to_generate_props(class_to_generate)
update_class_to_generate_objects(classes_to_generate, class_to_generate)
for class_to_generate in classes_to_generate.values():
update_class_to_generate_refs(class_to_generate)
update_class_to_generate_init(class_to_generate)
update_class_to_generate_enums(class_to_generate)
update_class_to_generate_to_json(class_to_generate)
update_class_to_generate_register_dec(classes_to_generate, class_to_generate)
class_template = '''
%(register_request)s%(register_dec)s
class %(name)s(BaseSchema):
"""
%(description)s
Note: automatically generated code. Do not edit manually.
"""
%(enums)s%(props)s
%(refs)s
__slots__ = list(__props__.keys()) + ['kwargs']
%(init)s%(update_dict_ids_from_dap)s
%(to_dict)s%(update_dict_ids_to_dap)s
'''
contents = []
contents.append('# coding: utf-8')
contents.append('# Automatically generated code.')
contents.append('# Do not edit manually.')
contents.append('# Generated by running: %s' % os.path.basename(__file__))
contents.append('from .pydevd_base_schema import BaseSchema, register, register_request, register_response, register_event')
contents.append('')
for class_to_generate in classes_to_generate.values():
contents.append(class_template % class_to_generate)
parent_dir = os.path.dirname(__file__)
schema = os.path.join(parent_dir, 'pydevd_schema.py')
with open(schema, 'w', encoding='utf-8') as stream:
stream.write('\n'.join(contents))
def _indent_lines(lines, indent=' '):
out_lines = []
for line in lines.splitlines(keepends=True):
out_lines.append(indent + line)
return ''.join(out_lines)
if __name__ == '__main__':
gen_debugger_protocol()

View File

@ -0,0 +1,147 @@
from _pydevd_bundle._debug_adapter.pydevd_schema_log import debug_exception
import json
import itertools
from functools import partial
class BaseSchema(object):
@staticmethod
def initialize_ids_translation():
BaseSchema._dap_id_to_obj_id = {0:0, None:None}
BaseSchema._obj_id_to_dap_id = {0:0, None:None}
BaseSchema._next_dap_id = partial(next, itertools.count(1))
def to_json(self):
return json.dumps(self.to_dict())
@staticmethod
def _translate_id_to_dap(obj_id):
if obj_id == '*':
return '*'
# Note: we don't invalidate ids, so, if some object starts using the same id
# of another object, the same id will be used.
dap_id = BaseSchema._obj_id_to_dap_id.get(obj_id)
if dap_id is None:
dap_id = BaseSchema._obj_id_to_dap_id[obj_id] = BaseSchema._next_dap_id()
BaseSchema._dap_id_to_obj_id[dap_id] = obj_id
return dap_id
@staticmethod
def _translate_id_from_dap(dap_id):
if dap_id == '*':
return '*'
try:
return BaseSchema._dap_id_to_obj_id[dap_id]
except:
raise KeyError('Wrong ID sent from the client: %s' % (dap_id,))
@staticmethod
def update_dict_ids_to_dap(dct):
return dct
@staticmethod
def update_dict_ids_from_dap(dct):
return dct
BaseSchema.initialize_ids_translation()
_requests_to_types = {}
_responses_to_types = {}
_event_to_types = {}
_all_messages = {}
def register(cls):
_all_messages[cls.__name__] = cls
return cls
def register_request(command):
def do_register(cls):
_requests_to_types[command] = cls
return cls
return do_register
def register_response(command):
def do_register(cls):
_responses_to_types[command] = cls
return cls
return do_register
def register_event(event):
def do_register(cls):
_event_to_types[event] = cls
return cls
return do_register
def from_dict(dct, update_ids_from_dap=False):
msg_type = dct.get('type')
if msg_type is None:
raise ValueError('Unable to make sense of message: %s' % (dct,))
if msg_type == 'request':
to_type = _requests_to_types
use = dct['command']
elif msg_type == 'response':
to_type = _responses_to_types
use = dct['command']
else:
to_type = _event_to_types
use = dct['event']
cls = to_type.get(use)
if cls is None:
raise ValueError('Unable to create message from dict: %s. %s not in %s' % (dct, use, sorted(to_type.keys())))
try:
return cls(update_ids_from_dap=update_ids_from_dap, **dct)
except:
msg = 'Error creating %s from %s' % (cls, dct)
debug_exception(msg)
raise
def from_json(json_msg, update_ids_from_dap=False, on_dict_loaded=lambda dct:None):
if isinstance(json_msg, bytes):
json_msg = json_msg.decode('utf-8')
as_dict = json.loads(json_msg)
on_dict_loaded(as_dict)
try:
return from_dict(as_dict, update_ids_from_dap=update_ids_from_dap)
except:
if as_dict.get('type') == 'response' and not as_dict.get('success'):
# Error messages may not have required body (return as a generic Response).
Response = _all_messages['Response']
return Response(**as_dict)
else:
raise
def get_response_class(request):
if request.__class__ == dict:
return _responses_to_types[request['command']]
return _responses_to_types[request.command]
def build_response(request, kwargs=None):
if kwargs is None:
kwargs = {'success':True}
else:
if 'success' not in kwargs:
kwargs['success'] = True
response_class = _responses_to_types[request.command]
kwargs.setdefault('seq', -1) # To be overwritten before sending
return response_class(command=request.command, request_seq=request.seq, **kwargs)

View File

@ -0,0 +1,46 @@
import os
import traceback
from _pydevd_bundle.pydevd_constants import ForkSafeLock
_pid = os.getpid()
_pid_msg = '%s: ' % (_pid,)
_debug_lock = ForkSafeLock()
DEBUG = False
DEBUG_FILE = os.path.join(os.path.dirname(__file__), '__debug_output__.txt')
def debug(msg):
if DEBUG:
with _debug_lock:
_pid_prefix = _pid_msg
if isinstance(msg, bytes):
_pid_prefix = _pid_prefix.encode('utf-8')
if not msg.endswith(b'\r') and not msg.endswith(b'\n'):
msg += b'\n'
mode = 'a+b'
else:
if not msg.endswith('\r') and not msg.endswith('\n'):
msg += '\n'
mode = 'a+'
with open(DEBUG_FILE, mode) as stream:
stream.write(_pid_prefix)
stream.write(msg)
def debug_exception(msg=None):
if DEBUG:
if msg:
debug(msg)
with _debug_lock:
with open(DEBUG_FILE, 'a+') as stream:
_pid_prefix = _pid_msg
if isinstance(msg, bytes):
_pid_prefix = _pid_prefix.encode('utf-8')
stream.write(_pid_prefix)
traceback.print_exc(file=stream)

View File

@ -0,0 +1,554 @@
"""
A copy of the code module in the standard library with some changes to work with
async evaluation.
Utilities needed to emulate Python's interactive interpreter.
"""
# Inspired by similar code by Jeff Epler and Fredrik Lundh.
import sys
import traceback
import inspect
# START --------------------------- from codeop import CommandCompiler, compile_command
# START --------------------------- from codeop import CommandCompiler, compile_command
# START --------------------------- from codeop import CommandCompiler, compile_command
# START --------------------------- from codeop import CommandCompiler, compile_command
# START --------------------------- from codeop import CommandCompiler, compile_command
r"""Utilities to compile possibly incomplete Python source code.
This module provides two interfaces, broadly similar to the builtin
function compile(), which take program text, a filename and a 'mode'
and:
- Return code object if the command is complete and valid
- Return None if the command is incomplete
- Raise SyntaxError, ValueError or OverflowError if the command is a
syntax error (OverflowError and ValueError can be produced by
malformed literals).
Approach:
First, check if the source consists entirely of blank lines and
comments; if so, replace it with 'pass', because the built-in
parser doesn't always do the right thing for these.
Compile three times: as is, with \n, and with \n\n appended. If it
compiles as is, it's complete. If it compiles with one \n appended,
we expect more. If it doesn't compile either way, we compare the
error we get when compiling with \n or \n\n appended. If the errors
are the same, the code is broken. But if the errors are different, we
expect more. Not intuitive; not even guaranteed to hold in future
releases; but this matches the compiler's behavior from Python 1.4
through 2.2, at least.
Caveat:
It is possible (but not likely) that the parser stops parsing with a
successful outcome before reaching the end of the source; in this
case, trailing symbols may be ignored instead of causing an error.
For example, a backslash followed by two newlines may be followed by
arbitrary garbage. This will be fixed once the API for the parser is
better.
The two interfaces are:
compile_command(source, filename, symbol):
Compiles a single command in the manner described above.
CommandCompiler():
Instances of this class have __call__ methods identical in
signature to compile_command; the difference is that if the
instance compiles program text containing a __future__ statement,
the instance 'remembers' and compiles all subsequent program texts
with the statement in force.
The module also provides another class:
Compile():
Instances of this class act like the built-in function compile,
but with 'memory' in the sense described above.
"""
import __future__
_features = [getattr(__future__, fname)
for fname in __future__.all_feature_names]
__all__ = ["compile_command", "Compile", "CommandCompiler"]
PyCF_DONT_IMPLY_DEDENT = 0x200 # Matches pythonrun.h
def _maybe_compile(compiler, source, filename, symbol):
# Check for source consisting of only blank lines and comments
for line in source.split("\n"):
line = line.strip()
if line and line[0] != '#':
break # Leave it alone
else:
if symbol != "eval":
source = "pass" # Replace it with a 'pass' statement
err = err1 = err2 = None
code = code1 = code2 = None
try:
code = compiler(source, filename, symbol)
except SyntaxError as err:
pass
try:
code1 = compiler(source + "\n", filename, symbol)
except SyntaxError as e:
err1 = e
try:
code2 = compiler(source + "\n\n", filename, symbol)
except SyntaxError as e:
err2 = e
try:
if code:
return code
if not code1 and repr(err1) == repr(err2):
raise err1
finally:
err1 = err2 = None
def _compile(source, filename, symbol):
return compile(source, filename, symbol, PyCF_DONT_IMPLY_DEDENT)
def compile_command(source, filename="<input>", symbol="single"):
r"""Compile a command and determine whether it is incomplete.
Arguments:
source -- the source string; may contain \n characters
filename -- optional filename from which source was read; default
"<input>"
symbol -- optional grammar start symbol; "single" (default) or "eval"
Return value / exceptions raised:
- Return a code object if the command is complete and valid
- Return None if the command is incomplete
- Raise SyntaxError, ValueError or OverflowError if the command is a
syntax error (OverflowError and ValueError can be produced by
malformed literals).
"""
return _maybe_compile(_compile, source, filename, symbol)
class Compile:
"""Instances of this class behave much like the built-in compile
function, but if one is used to compile text containing a future
statement, it "remembers" and compiles all subsequent program texts
with the statement in force."""
def __init__(self):
self.flags = PyCF_DONT_IMPLY_DEDENT
try:
from ast import PyCF_ALLOW_TOP_LEVEL_AWAIT
self.flags |= PyCF_ALLOW_TOP_LEVEL_AWAIT
except:
pass
def __call__(self, source, filename, symbol):
codeob = compile(source, filename, symbol, self.flags, 1)
for feature in _features:
if codeob.co_flags & feature.compiler_flag:
self.flags |= feature.compiler_flag
return codeob
class CommandCompiler:
"""Instances of this class have __call__ methods identical in
signature to compile_command; the difference is that if the
instance compiles program text containing a __future__ statement,
the instance 'remembers' and compiles all subsequent program texts
with the statement in force."""
def __init__(self,):
self.compiler = Compile()
def __call__(self, source, filename="<input>", symbol="single"):
r"""Compile a command and determine whether it is incomplete.
Arguments:
source -- the source string; may contain \n characters
filename -- optional filename from which source was read;
default "<input>"
symbol -- optional grammar start symbol; "single" (default) or
"eval"
Return value / exceptions raised:
- Return a code object if the command is complete and valid
- Return None if the command is incomplete
- Raise SyntaxError, ValueError or OverflowError if the command is a
syntax error (OverflowError and ValueError can be produced by
malformed literals).
"""
return _maybe_compile(self.compiler, source, filename, symbol)
# END --------------------------- from codeop import CommandCompiler, compile_command
# END --------------------------- from codeop import CommandCompiler, compile_command
# END --------------------------- from codeop import CommandCompiler, compile_command
# END --------------------------- from codeop import CommandCompiler, compile_command
# END --------------------------- from codeop import CommandCompiler, compile_command
__all__ = ["InteractiveInterpreter", "InteractiveConsole", "interact",
"compile_command"]
from _pydev_bundle._pydev_saved_modules import threading
class _EvalAwaitInNewEventLoop(threading.Thread):
def __init__(self, compiled, updated_globals, updated_locals):
threading.Thread.__init__(self)
self.daemon = True
self._compiled = compiled
self._updated_globals = updated_globals
self._updated_locals = updated_locals
# Output
self.evaluated_value = None
self.exc = None
async def _async_func(self):
return await eval(self._compiled, self._updated_locals, self._updated_globals)
def run(self):
try:
import asyncio
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
self.evaluated_value = asyncio.run(self._async_func())
except:
self.exc = sys.exc_info()
class InteractiveInterpreter:
"""Base class for InteractiveConsole.
This class deals with parsing and interpreter state (the user's
namespace); it doesn't deal with input buffering or prompting or
input file naming (the filename is always passed in explicitly).
"""
def __init__(self, locals=None):
"""Constructor.
The optional 'locals' argument specifies the dictionary in
which code will be executed; it defaults to a newly created
dictionary with key "__name__" set to "__console__" and key
"__doc__" set to None.
"""
if locals is None:
locals = {"__name__": "__console__", "__doc__": None}
self.locals = locals
self.compile = CommandCompiler()
def runsource(self, source, filename="<input>", symbol="single"):
"""Compile and run some source in the interpreter.
Arguments are as for compile_command().
One of several things can happen:
1) The input is incorrect; compile_command() raised an
exception (SyntaxError or OverflowError). A syntax traceback
will be printed by calling the showsyntaxerror() method.
2) The input is incomplete, and more input is required;
compile_command() returned None. Nothing happens.
3) The input is complete; compile_command() returned a code
object. The code is executed by calling self.runcode() (which
also handles run-time exceptions, except for SystemExit).
The return value is True in case 2, False in the other cases (unless
an exception is raised). The return value can be used to
decide whether to use sys.ps1 or sys.ps2 to prompt the next
line.
"""
try:
code = self.compile(source, filename, symbol)
except (OverflowError, SyntaxError, ValueError):
# Case 1
self.showsyntaxerror(filename)
return False
if code is None:
# Case 2
return True
# Case 3
self.runcode(code)
return False
def runcode(self, code):
"""Execute a code object.
When an exception occurs, self.showtraceback() is called to
display a traceback. All exceptions are caught except
SystemExit, which is reraised.
A note about KeyboardInterrupt: this exception may occur
elsewhere in this code, and may not always be caught. The
caller should be prepared to deal with it.
"""
try:
is_async = False
if hasattr(inspect, 'CO_COROUTINE'):
is_async = inspect.CO_COROUTINE & code.co_flags == inspect.CO_COROUTINE
if is_async:
t = _EvalAwaitInNewEventLoop(code, self.locals, None)
t.start()
t.join()
if t.exc:
raise t.exc[1].with_traceback(t.exc[2])
else:
exec(code, self.locals)
except SystemExit:
raise
except:
self.showtraceback()
def showsyntaxerror(self, filename=None):
"""Display the syntax error that just occurred.
This doesn't display a stack trace because there isn't one.
If a filename is given, it is stuffed in the exception instead
of what was there before (because Python's parser always uses
"<string>" when reading from a string).
The output is written by self.write(), below.
"""
type, value, tb = sys.exc_info()
sys.last_type = type
sys.last_value = value
sys.last_traceback = tb
if filename and type is SyntaxError:
# Work hard to stuff the correct filename in the exception
try:
msg, (dummy_filename, lineno, offset, line) = value.args
except ValueError:
# Not the format we expect; leave it alone
pass
else:
# Stuff in the right filename
value = SyntaxError(msg, (filename, lineno, offset, line))
sys.last_value = value
if sys.excepthook is sys.__excepthook__:
lines = traceback.format_exception_only(type, value)
self.write(''.join(lines))
else:
# If someone has set sys.excepthook, we let that take precedence
# over self.write
sys.excepthook(type, value, tb)
def showtraceback(self):
"""Display the exception that just occurred.
We remove the first stack item because it is our own code.
The output is written by self.write(), below.
"""
sys.last_type, sys.last_value, last_tb = ei = sys.exc_info()
sys.last_traceback = last_tb
try:
lines = traceback.format_exception(ei[0], ei[1], last_tb.tb_next)
if sys.excepthook is sys.__excepthook__:
self.write(''.join(lines))
else:
# If someone has set sys.excepthook, we let that take precedence
# over self.write
sys.excepthook(ei[0], ei[1], last_tb)
finally:
last_tb = ei = None
def write(self, data):
"""Write a string.
The base implementation writes to sys.stderr; a subclass may
replace this with a different implementation.
"""
sys.stderr.write(data)
class InteractiveConsole(InteractiveInterpreter):
"""Closely emulate the behavior of the interactive Python interpreter.
This class builds on InteractiveInterpreter and adds prompting
using the familiar sys.ps1 and sys.ps2, and input buffering.
"""
def __init__(self, locals=None, filename="<console>"):
"""Constructor.
The optional locals argument will be passed to the
InteractiveInterpreter base class.
The optional filename argument should specify the (file)name
of the input stream; it will show up in tracebacks.
"""
InteractiveInterpreter.__init__(self, locals)
self.filename = filename
self.resetbuffer()
def resetbuffer(self):
"""Reset the input buffer."""
self.buffer = []
def interact(self, banner=None, exitmsg=None):
"""Closely emulate the interactive Python console.
The optional banner argument specifies the banner to print
before the first interaction; by default it prints a banner
similar to the one printed by the real Python interpreter,
followed by the current class name in parentheses (so as not
to confuse this with the real interpreter -- since it's so
close!).
The optional exitmsg argument specifies the exit message
printed when exiting. Pass the empty string to suppress
printing an exit message. If exitmsg is not given or None,
a default message is printed.
"""
try:
sys.ps1
except AttributeError:
sys.ps1 = ">>> "
try:
sys.ps2
except AttributeError:
sys.ps2 = "... "
cprt = 'Type "help", "copyright", "credits" or "license" for more information.'
if banner is None:
self.write("Python %s on %s\n%s\n(%s)\n" %
(sys.version, sys.platform, cprt,
self.__class__.__name__))
elif banner:
self.write("%s\n" % str(banner))
more = 0
while 1:
try:
if more:
prompt = sys.ps2
else:
prompt = sys.ps1
try:
line = self.raw_input(prompt)
except EOFError:
self.write("\n")
break
else:
more = self.push(line)
except KeyboardInterrupt:
self.write("\nKeyboardInterrupt\n")
self.resetbuffer()
more = 0
if exitmsg is None:
self.write('now exiting %s...\n' % self.__class__.__name__)
elif exitmsg != '':
self.write('%s\n' % exitmsg)
def push(self, line):
"""Push a line to the interpreter.
The line should not have a trailing newline; it may have
internal newlines. The line is appended to a buffer and the
interpreter's runsource() method is called with the
concatenated contents of the buffer as source. If this
indicates that the command was executed or invalid, the buffer
is reset; otherwise, the command is incomplete, and the buffer
is left as it was after the line was appended. The return
value is 1 if more input is required, 0 if the line was dealt
with in some way (this is the same as runsource()).
"""
self.buffer.append(line)
source = "\n".join(self.buffer)
more = self.runsource(source, self.filename)
if not more:
self.resetbuffer()
return more
def raw_input(self, prompt=""):
"""Write a prompt and read a line.
The returned line does not include the trailing newline.
When the user enters the EOF key sequence, EOFError is raised.
The base implementation uses the built-in function
input(); a subclass may replace this with a different
implementation.
"""
return input(prompt)
def interact(banner=None, readfunc=None, local=None, exitmsg=None):
"""Closely emulate the interactive Python interpreter.
This is a backwards compatible interface to the InteractiveConsole
class. When readfunc is not specified, it attempts to import the
readline module to enable GNU readline if it is available.
Arguments (all optional, all default to None):
banner -- passed to InteractiveConsole.interact()
readfunc -- if not None, replaces InteractiveConsole.raw_input()
local -- passed to InteractiveInterpreter.__init__()
exitmsg -- passed to InteractiveConsole.interact()
"""
console = InteractiveConsole(local)
if readfunc is not None:
console.raw_input = readfunc
else:
try:
import readline
except ImportError:
pass
console.interact(banner, exitmsg)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-q', action='store_true',
help="don't print version and copyright messages")
args = parser.parse_args()
if args.q or sys.flags.quiet:
banner = ''
else:
banner = None
interact(banner)

View File

@ -0,0 +1,19 @@
# Defines which version of the PyDBAdditionalThreadInfo we'll use.
from _pydevd_bundle.pydevd_constants import ENV_FALSE_LOWER_VALUES, USE_CYTHON_FLAG, \
ENV_TRUE_LOWER_VALUES
if USE_CYTHON_FLAG in ENV_TRUE_LOWER_VALUES:
# We must import the cython version if forcing cython
from _pydevd_bundle.pydevd_cython_wrapper import PyDBAdditionalThreadInfo, set_additional_thread_info, _set_additional_thread_info_lock # @UnusedImport
elif USE_CYTHON_FLAG in ENV_FALSE_LOWER_VALUES:
# Use the regular version if not forcing cython
from _pydevd_bundle.pydevd_additional_thread_info_regular import PyDBAdditionalThreadInfo, set_additional_thread_info, _set_additional_thread_info_lock # @UnusedImport @Reimport
else:
# Regular: use fallback if not found (message is already given elsewhere).
try:
from _pydevd_bundle.pydevd_cython_wrapper import PyDBAdditionalThreadInfo, set_additional_thread_info, _set_additional_thread_info_lock
except ImportError:
from _pydevd_bundle.pydevd_additional_thread_info_regular import PyDBAdditionalThreadInfo, set_additional_thread_info, _set_additional_thread_info_lock # @UnusedImport

View File

@ -0,0 +1,153 @@
from _pydevd_bundle.pydevd_constants import (STATE_RUN, PYTHON_SUSPEND, SUPPORT_GEVENT, ForkSafeLock,
_current_frames)
from _pydev_bundle import pydev_log
# IFDEF CYTHON
# pydev_log.debug("Using Cython speedups")
# ELSE
from _pydevd_bundle.pydevd_frame import PyDBFrame
# ENDIF
version = 11
#=======================================================================================================================
# PyDBAdditionalThreadInfo
#=======================================================================================================================
# IFDEF CYTHON
# cdef class PyDBAdditionalThreadInfo:
# ELSE
class PyDBAdditionalThreadInfo(object):
# ENDIF
# Note: the params in cython are declared in pydevd_cython.pxd.
# IFDEF CYTHON
# ELSE
__slots__ = [
'pydev_state',
'pydev_step_stop',
'pydev_original_step_cmd',
'pydev_step_cmd',
'pydev_notify_kill',
'pydev_django_resolve_frame',
'pydev_call_from_jinja2',
'pydev_call_inside_jinja2',
'is_tracing',
'conditional_breakpoint_exception',
'pydev_message',
'suspend_type',
'pydev_next_line',
'pydev_func_name',
'suspended_at_unhandled',
'trace_suspend_type',
'top_level_thread_tracer_no_back_frames',
'top_level_thread_tracer_unhandled',
'thread_tracer',
'step_in_initial_location',
# Used for CMD_SMART_STEP_INTO (to know which smart step into variant to use)
'pydev_smart_parent_offset',
'pydev_smart_child_offset',
# Used for CMD_SMART_STEP_INTO (list[_pydevd_bundle.pydevd_bytecode_utils.Variant])
# Filled when the cmd_get_smart_step_into_variants is requested (so, this is a copy
# of the last request for a given thread and pydev_smart_parent_offset/pydev_smart_child_offset relies on it).
'pydev_smart_step_into_variants',
'target_id_to_smart_step_into_variant',
'pydev_use_scoped_step_frame',
]
# ENDIF
def __init__(self):
self.pydev_state = STATE_RUN # STATE_RUN or STATE_SUSPEND
self.pydev_step_stop = None
# Note: we have `pydev_original_step_cmd` and `pydev_step_cmd` because the original is to
# say the action that started it and the other is to say what's the current tracing behavior
# (because it's possible that we start with a step over but may have to switch to a
# different step strategy -- for instance, if a step over is done and we return the current
# method the strategy is changed to a step in).
self.pydev_original_step_cmd = -1 # Something as CMD_STEP_INTO, CMD_STEP_OVER, etc.
self.pydev_step_cmd = -1 # Something as CMD_STEP_INTO, CMD_STEP_OVER, etc.
self.pydev_notify_kill = False
self.pydev_django_resolve_frame = False
self.pydev_call_from_jinja2 = None
self.pydev_call_inside_jinja2 = None
self.is_tracing = 0
self.conditional_breakpoint_exception = None
self.pydev_message = ''
self.suspend_type = PYTHON_SUSPEND
self.pydev_next_line = -1
self.pydev_func_name = '.invalid.' # Must match the type in cython
self.suspended_at_unhandled = False
self.trace_suspend_type = 'trace' # 'trace' or 'frame_eval'
self.top_level_thread_tracer_no_back_frames = []
self.top_level_thread_tracer_unhandled = None
self.thread_tracer = None
self.step_in_initial_location = None
self.pydev_smart_parent_offset = -1
self.pydev_smart_child_offset = -1
self.pydev_smart_step_into_variants = ()
self.target_id_to_smart_step_into_variant = {}
# Flag to indicate ipython use-case where each line will be executed as a call/line/return
# in a new new frame but in practice we want to consider each new frame as if it was all
# part of the same frame.
#
# In practice this means that a step over shouldn't revert to a step in and we need some
# special logic to know when we should stop in a step over as we need to consider 2
# different frames as being equal if they're logically the continuation of a frame
# being executed by ipython line by line.
#
# See: https://github.com/microsoft/debugpy/issues/869#issuecomment-1132141003
self.pydev_use_scoped_step_frame = False
def get_topmost_frame(self, thread):
'''
Gets the topmost frame for the given thread. Note that it may be None
and callers should remove the reference to the frame as soon as possible
to avoid disturbing user code.
'''
# sys._current_frames(): dictionary with thread id -> topmost frame
current_frames = _current_frames()
topmost_frame = current_frames.get(thread.ident)
if topmost_frame is None:
# Note: this is expected for dummy threads (so, getting the topmost frame should be
# treated as optional).
pydev_log.info(
'Unable to get topmost frame for thread: %s, thread.ident: %s, id(thread): %s\nCurrent frames: %s.\n'
'GEVENT_SUPPORT: %s',
thread,
thread.ident,
id(thread),
current_frames,
SUPPORT_GEVENT,
)
return topmost_frame
def __str__(self):
return 'State:%s Stop:%s Cmd: %s Kill:%s' % (
self.pydev_state, self.pydev_step_stop, self.pydev_step_cmd, self.pydev_notify_kill)
_set_additional_thread_info_lock = ForkSafeLock()
def set_additional_thread_info(thread):
try:
additional_info = thread.additional_info
if additional_info is None:
raise AttributeError()
except:
with _set_additional_thread_info_lock:
# If it's not there, set it within a lock to avoid any racing
# conditions.
additional_info = getattr(thread, 'additional_info', None)
if additional_info is None:
additional_info = PyDBAdditionalThreadInfo()
thread.additional_info = additional_info
return additional_info

View File

@ -0,0 +1,184 @@
from _pydev_bundle import pydev_log
from _pydevd_bundle import pydevd_import_class
from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame
from _pydev_bundle._pydev_saved_modules import threading
class ExceptionBreakpoint(object):
def __init__(
self,
qname,
condition,
expression,
notify_on_handled_exceptions,
notify_on_unhandled_exceptions,
notify_on_user_unhandled_exceptions,
notify_on_first_raise_only,
ignore_libraries
):
exctype = get_exception_class(qname)
self.qname = qname
if exctype is not None:
self.name = exctype.__name__
else:
self.name = None
self.condition = condition
self.expression = expression
self.notify_on_unhandled_exceptions = notify_on_unhandled_exceptions
self.notify_on_handled_exceptions = notify_on_handled_exceptions
self.notify_on_first_raise_only = notify_on_first_raise_only
self.notify_on_user_unhandled_exceptions = notify_on_user_unhandled_exceptions
self.ignore_libraries = ignore_libraries
self.type = exctype
def __str__(self):
return self.qname
@property
def has_condition(self):
return self.condition is not None
def handle_hit_condition(self, frame):
return False
class LineBreakpoint(object):
def __init__(self, breakpoint_id, line, condition, func_name, expression, suspend_policy="NONE", hit_condition=None, is_logpoint=False):
self.breakpoint_id = breakpoint_id
self.line = line
self.condition = condition
self.func_name = func_name
self.expression = expression
self.suspend_policy = suspend_policy
self.hit_condition = hit_condition
self._hit_count = 0
self._hit_condition_lock = threading.Lock()
self.is_logpoint = is_logpoint
@property
def has_condition(self):
return bool(self.condition) or bool(self.hit_condition)
def handle_hit_condition(self, frame):
if not self.hit_condition:
return False
ret = False
with self._hit_condition_lock:
self._hit_count += 1
expr = self.hit_condition.replace('@HIT@', str(self._hit_count))
try:
ret = bool(eval(expr, frame.f_globals, frame.f_locals))
except Exception:
ret = False
return ret
class FunctionBreakpoint(object):
def __init__(self, func_name, condition, expression, suspend_policy="NONE", hit_condition=None, is_logpoint=False):
self.condition = condition
self.func_name = func_name
self.expression = expression
self.suspend_policy = suspend_policy
self.hit_condition = hit_condition
self._hit_count = 0
self._hit_condition_lock = threading.Lock()
self.is_logpoint = is_logpoint
@property
def has_condition(self):
return bool(self.condition) or bool(self.hit_condition)
def handle_hit_condition(self, frame):
if not self.hit_condition:
return False
ret = False
with self._hit_condition_lock:
self._hit_count += 1
expr = self.hit_condition.replace('@HIT@', str(self._hit_count))
try:
ret = bool(eval(expr, frame.f_globals, frame.f_locals))
except Exception:
ret = False
return ret
def get_exception_breakpoint(exctype, exceptions):
if not exctype:
exception_full_qname = None
else:
exception_full_qname = str(exctype.__module__) + '.' + exctype.__name__
exc = None
if exceptions is not None:
try:
return exceptions[exception_full_qname]
except KeyError:
for exception_breakpoint in exceptions.values():
if exception_breakpoint.type is not None and issubclass(exctype, exception_breakpoint.type):
if exc is None or issubclass(exception_breakpoint.type, exc.type):
exc = exception_breakpoint
return exc
def stop_on_unhandled_exception(py_db, thread, additional_info, arg):
exctype, value, tb = arg
break_on_uncaught_exceptions = py_db.break_on_uncaught_exceptions
if break_on_uncaught_exceptions:
exception_breakpoint = py_db.get_exception_breakpoint(exctype, break_on_uncaught_exceptions)
else:
exception_breakpoint = None
if not exception_breakpoint:
return
if tb is None: # sometimes it can be None, e.g. with GTK
return
if exctype is KeyboardInterrupt:
return
if exctype is SystemExit and py_db.ignore_system_exit_code(value):
return
frames = []
user_frame = None
while tb is not None:
if not py_db.exclude_exception_by_filter(exception_breakpoint, tb):
user_frame = tb.tb_frame
frames.append(tb.tb_frame)
tb = tb.tb_next
if user_frame is None:
return
frames_byid = dict([(id(frame), frame) for frame in frames])
add_exception_to_frame(user_frame, arg)
if exception_breakpoint.condition is not None:
eval_result = py_db.handle_breakpoint_condition(additional_info, exception_breakpoint, user_frame)
if not eval_result:
return
if exception_breakpoint.expression is not None:
py_db.handle_breakpoint_expression(exception_breakpoint, additional_info, user_frame)
try:
additional_info.pydev_message = exception_breakpoint.qname
except:
additional_info.pydev_message = exception_breakpoint.qname.encode('utf-8')
pydev_log.debug('Handling post-mortem stop on exception breakpoint %s' % (exception_breakpoint.qname,))
py_db.do_stop_on_unhandled_exception(thread, user_frame, frames_byid, arg)
def get_exception_class(kls):
try:
return eval(kls)
except:
return pydevd_import_class.import_name(kls)

View File

@ -0,0 +1,843 @@
"""
Bytecode analysing utils. Originally added for using in smart step into.
Note: not importable from Python 2.
"""
from _pydev_bundle import pydev_log
from types import CodeType
from _pydevd_frame_eval.vendored.bytecode.instr import _Variable
from _pydevd_frame_eval.vendored import bytecode
from _pydevd_frame_eval.vendored.bytecode import cfg as bytecode_cfg
import dis
import opcode as _opcode
from _pydevd_bundle.pydevd_constants import KeyifyList, DebugInfoHolder, IS_PY311_OR_GREATER
from bisect import bisect
from collections import deque
# When True, throws errors on unknown bytecodes, when False, ignore those as if they didn't change the stack.
STRICT_MODE = False
DEBUG = False
_BINARY_OPS = set([opname for opname in dis.opname if opname.startswith('BINARY_')])
_BINARY_OP_MAP = {
'BINARY_POWER': '__pow__',
'BINARY_MULTIPLY': '__mul__',
'BINARY_MATRIX_MULTIPLY': '__matmul__',
'BINARY_FLOOR_DIVIDE': '__floordiv__',
'BINARY_TRUE_DIVIDE': '__div__',
'BINARY_MODULO': '__mod__',
'BINARY_ADD': '__add__',
'BINARY_SUBTRACT': '__sub__',
'BINARY_LSHIFT': '__lshift__',
'BINARY_RSHIFT': '__rshift__',
'BINARY_AND': '__and__',
'BINARY_OR': '__or__',
'BINARY_XOR': '__xor__',
'BINARY_SUBSCR': '__getitem__',
'BINARY_DIVIDE': '__div__'
}
_COMP_OP_MAP = {
'<': '__lt__',
'<=': '__le__',
'==': '__eq__',
'!=': '__ne__',
'>': '__gt__',
'>=': '__ge__',
'in': '__contains__',
'not in': '__contains__',
}
class Target(object):
__slots__ = ['arg', 'lineno', 'offset', 'children_targets']
def __init__(self, arg, lineno, offset, children_targets=()):
self.arg = arg
self.lineno = lineno
self.offset = offset
self.children_targets = children_targets
def __repr__(self):
ret = []
for s in self.__slots__:
ret.append('%s: %s' % (s, getattr(self, s)))
return 'Target(%s)' % ', '.join(ret)
__str__ = __repr__
class _TargetIdHashable(object):
def __init__(self, target):
self.target = target
def __eq__(self, other):
if not hasattr(other, 'target'):
return
return other.target is self.target
def __ne__(self, other):
return not self == other
def __hash__(self):
return id(self.target)
class _StackInterpreter(object):
'''
Good reference: https://github.com/python/cpython/blob/fcb55c0037baab6f98f91ee38ce84b6f874f034a/Python/ceval.c
'''
def __init__(self, bytecode):
self.bytecode = bytecode
self._stack = deque()
self.function_calls = []
self.load_attrs = {}
self.func = set()
self.func_name_id_to_code_object = {}
def __str__(self):
return 'Stack:\nFunction calls:\n%s\nLoad attrs:\n%s\n' % (self.function_calls, list(self.load_attrs.values()))
def _getname(self, instr):
if instr.opcode in _opcode.hascompare:
cmp_op = dis.cmp_op[instr.arg]
if cmp_op not in ('exception match', 'BAD'):
return _COMP_OP_MAP.get(cmp_op, cmp_op)
return instr.arg
def _getcallname(self, instr):
if instr.name == 'BINARY_SUBSCR':
return '__getitem__().__call__'
if instr.name == 'CALL_FUNCTION':
# Note: previously a '__call__().__call__' was returned, but this was a bit weird
# and on Python 3.9 this construct could appear for some internal things where
# it wouldn't be expected.
# Note: it'd be what we had in func()().
return None
if instr.name == 'MAKE_FUNCTION':
return '__func__().__call__'
if instr.name == 'LOAD_ASSERTION_ERROR':
return 'AssertionError'
name = self._getname(instr)
if isinstance(name, CodeType):
name = name.co_qualname # Note: only available for Python 3.11
if isinstance(name, _Variable):
name = name.name
if not isinstance(name, str):
return None
if name.endswith('>'): # xxx.<listcomp>, xxx.<lambda>, ...
return name.split('.')[-1]
return name
def _no_stack_change(self, instr):
pass # Can be aliased when the instruction does nothing.
def on_LOAD_GLOBAL(self, instr):
self._stack.append(instr)
def on_POP_TOP(self, instr):
try:
self._stack.pop()
except IndexError:
pass # Ok (in the end of blocks)
def on_LOAD_ATTR(self, instr):
self.on_POP_TOP(instr) # replaces the current top
self._stack.append(instr)
self.load_attrs[_TargetIdHashable(instr)] = Target(self._getname(instr), instr.lineno, instr.offset)
on_LOOKUP_METHOD = on_LOAD_ATTR # Improvement in PyPy
def on_LOAD_CONST(self, instr):
self._stack.append(instr)
on_LOAD_DEREF = on_LOAD_CONST
on_LOAD_NAME = on_LOAD_CONST
on_LOAD_CLOSURE = on_LOAD_CONST
on_LOAD_CLASSDEREF = on_LOAD_CONST
# Although it actually changes the stack, it's inconsequential for us as a function call can't
# really be found there.
on_IMPORT_NAME = _no_stack_change
on_IMPORT_FROM = _no_stack_change
on_IMPORT_STAR = _no_stack_change
on_SETUP_ANNOTATIONS = _no_stack_change
def on_STORE_FAST(self, instr):
try:
self._stack.pop()
except IndexError:
pass # Ok, we may have a block just with the store
# Note: it stores in the locals and doesn't put anything in the stack.
on_STORE_GLOBAL = on_STORE_FAST
on_STORE_DEREF = on_STORE_FAST
on_STORE_ATTR = on_STORE_FAST
on_STORE_NAME = on_STORE_FAST
on_DELETE_NAME = on_POP_TOP
on_DELETE_ATTR = on_POP_TOP
on_DELETE_GLOBAL = on_POP_TOP
on_DELETE_FAST = on_POP_TOP
on_DELETE_DEREF = on_POP_TOP
on_DICT_UPDATE = on_POP_TOP
on_SET_UPDATE = on_POP_TOP
on_GEN_START = on_POP_TOP
def on_NOP(self, instr):
pass
def _handle_call_from_instr(self, func_name_instr, func_call_instr):
self.load_attrs.pop(_TargetIdHashable(func_name_instr), None)
call_name = self._getcallname(func_name_instr)
target = None
if not call_name:
pass # Ignore if we can't identify a name
elif call_name in ('<listcomp>', '<genexpr>', '<setcomp>', '<dictcomp>'):
code_obj = self.func_name_id_to_code_object[_TargetIdHashable(func_name_instr)]
if code_obj is not None:
children_targets = _get_smart_step_into_targets(code_obj)
if children_targets:
# i.e.: we have targets inside of a <listcomp> or <genexpr>.
# Note that to actually match this in the debugger we need to do matches on 2 frames,
# the one with the <listcomp> and then the actual target inside the <listcomp>.
target = Target(call_name, func_name_instr.lineno, func_call_instr.offset, children_targets)
self.function_calls.append(
target)
else:
# Ok, regular call
target = Target(call_name, func_name_instr.lineno, func_call_instr.offset)
self.function_calls.append(target)
if DEBUG and target is not None:
print('Created target', target)
self._stack.append(func_call_instr) # Keep the func call as the result
def on_COMPARE_OP(self, instr):
try:
_right = self._stack.pop()
except IndexError:
return
try:
_left = self._stack.pop()
except IndexError:
return
cmp_op = dis.cmp_op[instr.arg]
if cmp_op not in ('exception match', 'BAD'):
self.function_calls.append(Target(self._getname(instr), instr.lineno, instr.offset))
self._stack.append(instr)
def on_IS_OP(self, instr):
try:
self._stack.pop()
except IndexError:
return
try:
self._stack.pop()
except IndexError:
return
def on_BINARY_SUBSCR(self, instr):
try:
_sub = self._stack.pop()
except IndexError:
return
try:
_container = self._stack.pop()
except IndexError:
return
self.function_calls.append(Target(_BINARY_OP_MAP[instr.name], instr.lineno, instr.offset))
self._stack.append(instr)
on_BINARY_MATRIX_MULTIPLY = on_BINARY_SUBSCR
on_BINARY_POWER = on_BINARY_SUBSCR
on_BINARY_MULTIPLY = on_BINARY_SUBSCR
on_BINARY_FLOOR_DIVIDE = on_BINARY_SUBSCR
on_BINARY_TRUE_DIVIDE = on_BINARY_SUBSCR
on_BINARY_MODULO = on_BINARY_SUBSCR
on_BINARY_ADD = on_BINARY_SUBSCR
on_BINARY_SUBTRACT = on_BINARY_SUBSCR
on_BINARY_LSHIFT = on_BINARY_SUBSCR
on_BINARY_RSHIFT = on_BINARY_SUBSCR
on_BINARY_AND = on_BINARY_SUBSCR
on_BINARY_OR = on_BINARY_SUBSCR
on_BINARY_XOR = on_BINARY_SUBSCR
def on_LOAD_METHOD(self, instr):
self.on_POP_TOP(instr) # Remove the previous as we're loading something from it.
self._stack.append(instr)
def on_MAKE_FUNCTION(self, instr):
if not IS_PY311_OR_GREATER:
# The qualifier name is no longer put in the stack.
qualname = self._stack.pop()
code_obj_instr = self._stack.pop()
else:
# In 3.11 the code object has a co_qualname which we can use.
qualname = code_obj_instr = self._stack.pop()
arg = instr.arg
if arg & 0x08:
_func_closure = self._stack.pop()
if arg & 0x04:
_func_annotations = self._stack.pop()
if arg & 0x02:
_func_kwdefaults = self._stack.pop()
if arg & 0x01:
_func_defaults = self._stack.pop()
call_name = self._getcallname(qualname)
if call_name in ('<genexpr>', '<listcomp>', '<setcomp>', '<dictcomp>'):
if isinstance(code_obj_instr.arg, CodeType):
self.func_name_id_to_code_object[_TargetIdHashable(qualname)] = code_obj_instr.arg
self._stack.append(qualname)
def on_LOAD_FAST(self, instr):
self._stack.append(instr)
def on_LOAD_ASSERTION_ERROR(self, instr):
self._stack.append(instr)
on_LOAD_BUILD_CLASS = on_LOAD_FAST
def on_CALL_METHOD(self, instr):
# pop the actual args
for _ in range(instr.arg):
self._stack.pop()
func_name_instr = self._stack.pop()
self._handle_call_from_instr(func_name_instr, instr)
def on_PUSH_NULL(self, instr):
self._stack.append(instr)
def on_CALL_FUNCTION(self, instr):
arg = instr.arg
argc = arg & 0xff # positional args
argc += ((arg >> 8) * 2) # keyword args
# pop the actual args
for _ in range(argc):
try:
self._stack.pop()
except IndexError:
return
try:
func_name_instr = self._stack.pop()
except IndexError:
return
self._handle_call_from_instr(func_name_instr, instr)
def on_CALL_FUNCTION_KW(self, instr):
# names of kw args
_names_of_kw_args = self._stack.pop()
# pop the actual args
arg = instr.arg
argc = arg & 0xff # positional args
argc += ((arg >> 8) * 2) # keyword args
for _ in range(argc):
self._stack.pop()
func_name_instr = self._stack.pop()
self._handle_call_from_instr(func_name_instr, instr)
def on_CALL_FUNCTION_VAR(self, instr):
# var name
_var_arg = self._stack.pop()
# pop the actual args
arg = instr.arg
argc = arg & 0xff # positional args
argc += ((arg >> 8) * 2) # keyword args
for _ in range(argc):
self._stack.pop()
func_name_instr = self._stack.pop()
self._handle_call_from_instr(func_name_instr, instr)
def on_CALL_FUNCTION_VAR_KW(self, instr):
# names of kw args
_names_of_kw_args = self._stack.pop()
arg = instr.arg
argc = arg & 0xff # positional args
argc += ((arg >> 8) * 2) # keyword args
# also pop **kwargs
self._stack.pop()
# pop the actual args
for _ in range(argc):
self._stack.pop()
func_name_instr = self._stack.pop()
self._handle_call_from_instr(func_name_instr, instr)
def on_CALL_FUNCTION_EX(self, instr):
if instr.arg & 0x01:
_kwargs = self._stack.pop()
_callargs = self._stack.pop()
func_name_instr = self._stack.pop()
self._handle_call_from_instr(func_name_instr, instr)
on_YIELD_VALUE = _no_stack_change
on_GET_AITER = _no_stack_change
on_GET_ANEXT = _no_stack_change
on_END_ASYNC_FOR = _no_stack_change
on_BEFORE_ASYNC_WITH = _no_stack_change
on_SETUP_ASYNC_WITH = _no_stack_change
on_YIELD_FROM = _no_stack_change
on_SETUP_LOOP = _no_stack_change
on_FOR_ITER = _no_stack_change
on_BREAK_LOOP = _no_stack_change
on_JUMP_ABSOLUTE = _no_stack_change
on_RERAISE = _no_stack_change
on_LIST_TO_TUPLE = _no_stack_change
on_CALL_FINALLY = _no_stack_change
on_POP_FINALLY = _no_stack_change
def on_JUMP_IF_FALSE_OR_POP(self, instr):
try:
self._stack.pop()
except IndexError:
return
on_JUMP_IF_TRUE_OR_POP = on_JUMP_IF_FALSE_OR_POP
def on_JUMP_IF_NOT_EXC_MATCH(self, instr):
try:
self._stack.pop()
except IndexError:
return
try:
self._stack.pop()
except IndexError:
return
def on_ROT_TWO(self, instr):
try:
p0 = self._stack.pop()
except IndexError:
return
try:
p1 = self._stack.pop()
except:
self._stack.append(p0)
return
self._stack.append(p0)
self._stack.append(p1)
def on_ROT_THREE(self, instr):
try:
p0 = self._stack.pop()
except IndexError:
return
try:
p1 = self._stack.pop()
except:
self._stack.append(p0)
return
try:
p2 = self._stack.pop()
except:
self._stack.append(p0)
self._stack.append(p1)
return
self._stack.append(p0)
self._stack.append(p1)
self._stack.append(p2)
def on_ROT_FOUR(self, instr):
try:
p0 = self._stack.pop()
except IndexError:
return
try:
p1 = self._stack.pop()
except:
self._stack.append(p0)
return
try:
p2 = self._stack.pop()
except:
self._stack.append(p0)
self._stack.append(p1)
return
try:
p3 = self._stack.pop()
except:
self._stack.append(p0)
self._stack.append(p1)
self._stack.append(p2)
return
self._stack.append(p0)
self._stack.append(p1)
self._stack.append(p2)
self._stack.append(p3)
def on_BUILD_LIST_FROM_ARG(self, instr):
self._stack.append(instr)
def on_BUILD_MAP(self, instr):
for _i in range(instr.arg):
self._stack.pop()
self._stack.pop()
self._stack.append(instr)
def on_BUILD_CONST_KEY_MAP(self, instr):
self.on_POP_TOP(instr) # keys
for _i in range(instr.arg):
self.on_POP_TOP(instr) # value
self._stack.append(instr)
on_RETURN_VALUE = on_POP_TOP
on_POP_JUMP_IF_FALSE = on_POP_TOP
on_POP_JUMP_IF_TRUE = on_POP_TOP
on_DICT_MERGE = on_POP_TOP
on_LIST_APPEND = on_POP_TOP
on_SET_ADD = on_POP_TOP
on_LIST_EXTEND = on_POP_TOP
on_UNPACK_EX = on_POP_TOP
# ok: doesn't change the stack (converts top to getiter(top))
on_GET_ITER = _no_stack_change
on_GET_AWAITABLE = _no_stack_change
on_GET_YIELD_FROM_ITER = _no_stack_change
def on_RETURN_GENERATOR(self, instr):
self._stack.append(instr)
on_RETURN_GENERATOR = _no_stack_change
on_RESUME = _no_stack_change
def on_MAP_ADD(self, instr):
self.on_POP_TOP(instr)
self.on_POP_TOP(instr)
def on_UNPACK_SEQUENCE(self, instr):
self._stack.pop()
for _i in range(instr.arg):
self._stack.append(instr)
def on_BUILD_LIST(self, instr):
for _i in range(instr.arg):
self.on_POP_TOP(instr)
self._stack.append(instr)
on_BUILD_TUPLE = on_BUILD_LIST
on_BUILD_STRING = on_BUILD_LIST
on_BUILD_TUPLE_UNPACK_WITH_CALL = on_BUILD_LIST
on_BUILD_TUPLE_UNPACK = on_BUILD_LIST
on_BUILD_LIST_UNPACK = on_BUILD_LIST
on_BUILD_MAP_UNPACK_WITH_CALL = on_BUILD_LIST
on_BUILD_MAP_UNPACK = on_BUILD_LIST
on_BUILD_SET = on_BUILD_LIST
on_BUILD_SET_UNPACK = on_BUILD_LIST
on_SETUP_FINALLY = _no_stack_change
on_POP_FINALLY = _no_stack_change
on_BEGIN_FINALLY = _no_stack_change
on_END_FINALLY = _no_stack_change
def on_RAISE_VARARGS(self, instr):
for _i in range(instr.arg):
self.on_POP_TOP(instr)
on_POP_BLOCK = _no_stack_change
on_JUMP_FORWARD = _no_stack_change
on_POP_EXCEPT = _no_stack_change
on_SETUP_EXCEPT = _no_stack_change
on_WITH_EXCEPT_START = _no_stack_change
on_END_FINALLY = _no_stack_change
on_BEGIN_FINALLY = _no_stack_change
on_SETUP_WITH = _no_stack_change
on_WITH_CLEANUP_START = _no_stack_change
on_WITH_CLEANUP_FINISH = _no_stack_change
on_FORMAT_VALUE = _no_stack_change
on_EXTENDED_ARG = _no_stack_change
def on_INPLACE_ADD(self, instr):
# This would actually pop 2 and leave the value in the stack.
# In a += 1 it pop `a` and `1` and leave the resulting value
# for a load. In our case, let's just pop the `1` and leave the `a`
# instead of leaving the INPLACE_ADD bytecode.
try:
self._stack.pop()
except IndexError:
pass
on_INPLACE_POWER = on_INPLACE_ADD
on_INPLACE_MULTIPLY = on_INPLACE_ADD
on_INPLACE_MATRIX_MULTIPLY = on_INPLACE_ADD
on_INPLACE_TRUE_DIVIDE = on_INPLACE_ADD
on_INPLACE_FLOOR_DIVIDE = on_INPLACE_ADD
on_INPLACE_MODULO = on_INPLACE_ADD
on_INPLACE_SUBTRACT = on_INPLACE_ADD
on_INPLACE_RSHIFT = on_INPLACE_ADD
on_INPLACE_LSHIFT = on_INPLACE_ADD
on_INPLACE_AND = on_INPLACE_ADD
on_INPLACE_OR = on_INPLACE_ADD
on_INPLACE_XOR = on_INPLACE_ADD
def on_DUP_TOP(self, instr):
try:
i = self._stack[-1]
except IndexError:
# ok (in the start of block)
self._stack.append(instr)
else:
self._stack.append(i)
def on_DUP_TOP_TWO(self, instr):
if len(self._stack) == 0:
self._stack.append(instr)
return
if len(self._stack) == 1:
i = self._stack[-1]
self._stack.append(i)
self._stack.append(instr)
return
i = self._stack[-1]
j = self._stack[-2]
self._stack.append(j)
self._stack.append(i)
def on_BUILD_SLICE(self, instr):
for _ in range(instr.arg):
try:
self._stack.pop()
except IndexError:
pass
self._stack.append(instr)
def on_STORE_SUBSCR(self, instr):
try:
self._stack.pop()
self._stack.pop()
self._stack.pop()
except IndexError:
pass
def on_DELETE_SUBSCR(self, instr):
try:
self._stack.pop()
self._stack.pop()
except IndexError:
pass
# Note: on Python 3 this is only found on interactive mode to print the results of
# some evaluation.
on_PRINT_EXPR = on_POP_TOP
on_UNARY_POSITIVE = _no_stack_change
on_UNARY_NEGATIVE = _no_stack_change
on_UNARY_NOT = _no_stack_change
on_UNARY_INVERT = _no_stack_change
on_CACHE = _no_stack_change
on_PRECALL = _no_stack_change
def _get_smart_step_into_targets(code):
'''
:return list(Target)
'''
b = bytecode.Bytecode.from_code(code)
cfg = bytecode_cfg.ControlFlowGraph.from_bytecode(b)
ret = []
for block in cfg:
if DEBUG:
print('\nStart block----')
stack = _StackInterpreter(block)
for instr in block:
try:
func_name = 'on_%s' % (instr.name,)
func = getattr(stack, func_name, None)
if DEBUG:
if instr.name != 'CACHE': # Filter the ones we don't want to see.
print('\nWill handle: ', instr, '>>', stack._getname(instr), '<<')
print('Current stack:')
for entry in stack._stack:
print(' arg:', stack._getname(entry), '(', entry, ')')
if func is None:
if STRICT_MODE:
raise AssertionError('%s not found.' % (func_name,))
else:
continue
func(instr)
except:
if STRICT_MODE:
raise # Error in strict mode.
else:
# In non-strict mode, log it (if in verbose mode) and keep on going.
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 2:
pydev_log.exception('Exception computing step into targets (handled).')
ret.extend(stack.function_calls)
# No longer considering attr loads as calls (while in theory sometimes it's possible
# that something as `some.attr` can turn out to be a property which could be stepped
# in, it's not that common in practice and can be surprising for users, so, disabling
# step into from stepping into properties).
# ret.extend(stack.load_attrs.values())
return ret
# Note that the offset is unique within the frame (so, we can use it as the target id).
# Also, as the offset is the instruction offset within the frame, it's possible to
# to inspect the parent frame for frame.f_lasti to know where we actually are (as the
# caller name may not always match the new frame name).
class Variant(object):
__slots__ = ['name', 'is_visited', 'line', 'offset', 'call_order', 'children_variants', 'parent']
def __init__(self, name, is_visited, line, offset, call_order, children_variants=None):
self.name = name
self.is_visited = is_visited
self.line = line
self.offset = offset
self.call_order = call_order
self.children_variants = children_variants
self.parent = None
if children_variants:
for variant in children_variants:
variant.parent = self
def __repr__(self):
ret = []
for s in self.__slots__:
if s == 'parent':
try:
parent = self.parent
except AttributeError:
ret.append('%s: <not set>' % (s,))
else:
if parent is None:
ret.append('parent: None')
else:
ret.append('parent: %s (%s)' % (parent.name, parent.offset))
continue
if s == 'children_variants':
ret.append('children_variants: %s' % (len(self.children_variants) if self.children_variants else 0))
continue
try:
ret.append('%s: %s' % (s, getattr(self, s)))
except AttributeError:
ret.append('%s: <not set>' % (s,))
return 'Variant(%s)' % ', '.join(ret)
__str__ = __repr__
def _convert_target_to_variant(target, start_line, end_line, call_order_cache, lasti, base):
name = target.arg
if not isinstance(name, str):
return
if target.lineno > end_line:
return
if target.lineno < start_line:
return
call_order = call_order_cache.get(name, 0) + 1
call_order_cache[name] = call_order
is_visited = target.offset <= lasti
children_targets = target.children_targets
children_variants = None
if children_targets:
children_variants = [
_convert_target_to_variant(child, start_line, end_line, call_order_cache, lasti, base)
for child in target.children_targets]
return Variant(name, is_visited, target.lineno - base, target.offset, call_order, children_variants)
def calculate_smart_step_into_variants(frame, start_line, end_line, base=0):
"""
Calculate smart step into variants for the given line range.
:param frame:
:type frame: :py:class:`types.FrameType`
:param start_line:
:param end_line:
:return: A list of call names from the first to the last.
:note: it's guaranteed that the offsets appear in order.
:raise: :py:class:`RuntimeError` if failed to parse the bytecode or if dis cannot be used.
"""
variants = []
code = frame.f_code
lasti = frame.f_lasti
call_order_cache = {}
if DEBUG:
print('dis.dis:')
if IS_PY311_OR_GREATER:
dis.dis(code, show_caches=False)
else:
dis.dis(code)
for target in _get_smart_step_into_targets(code):
variant = _convert_target_to_variant(target, start_line, end_line, call_order_cache, lasti, base)
if variant is None:
continue
variants.append(variant)
return variants
def get_smart_step_into_variant_from_frame_offset(frame_f_lasti, variants):
"""
Given the frame.f_lasti, return the related `Variant`.
:note: if the offset is found before any variant available or no variants are
available, None is returned.
:rtype: Variant|NoneType
"""
if not variants:
return None
i = bisect(KeyifyList(variants, lambda entry:entry.offset), frame_f_lasti)
if i == 0:
return None
else:
return variants[i - 1]

View File

@ -0,0 +1,611 @@
"""
Decompiler that can be used with the debugger (where statements correctly represent the
line numbers).
Note: this is a work in progress / proof of concept / not ready to be used.
"""
import dis
from _pydevd_bundle.pydevd_collect_bytecode_info import iter_instructions
from _pydev_bundle import pydev_log
import sys
import inspect
from io import StringIO
class _Stack(object):
def __init__(self):
self._contents = []
def push(self, obj):
# print('push', obj)
self._contents.append(obj)
def pop(self):
return self._contents.pop(-1)
INDENT_MARKER = object()
DEDENT_MARKER = object()
_SENTINEL = object()
DEBUG = False
class _Token(object):
def __init__(self, i_line, instruction=None, tok=_SENTINEL, priority=0, after=None, end_of_line=False):
'''
:param i_line:
:param instruction:
:param tok:
:param priority:
:param after:
:param end_of_line:
Marker to signal only after all the other tokens have been written.
'''
self.i_line = i_line
if tok is not _SENTINEL:
self.tok = tok
else:
if instruction is not None:
if inspect.iscode(instruction.argval):
self.tok = ''
else:
self.tok = str(instruction.argval)
else:
raise AssertionError('Either the tok or the instruction is needed.')
self.instruction = instruction
self.priority = priority
self.end_of_line = end_of_line
self._after_tokens = set()
self._after_handler_tokens = set()
if after:
self.mark_after(after)
def mark_after(self, v):
if isinstance(v, _Token):
self._after_tokens.add(v)
elif isinstance(v, _BaseHandler):
self._after_handler_tokens.add(v)
else:
raise AssertionError('Unhandled: %s' % (v,))
def get_after_tokens(self):
ret = self._after_tokens.copy()
for handler in self._after_handler_tokens:
ret.update(handler.tokens)
return ret
def __repr__(self):
return 'Token(%s, after: %s)' % (self.tok, self.get_after_tokens())
__str__ = __repr__
class _Writer(object):
def __init__(self):
self.line_to_contents = {}
self.all_tokens = set()
def get_line(self, line):
lst = self.line_to_contents.get(line)
if lst is None:
lst = self.line_to_contents[line] = []
return lst
def indent(self, line):
self.get_line(line).append(INDENT_MARKER)
def dedent(self, line):
self.get_line(line).append(DEDENT_MARKER)
def write(self, line, token):
if token in self.all_tokens:
return
self.all_tokens.add(token)
assert isinstance(token, _Token)
lst = self.get_line(line)
lst.append(token)
class _BaseHandler(object):
def __init__(self, i_line, instruction, stack, writer, disassembler):
self.i_line = i_line
self.instruction = instruction
self.stack = stack
self.writer = writer
self.disassembler = disassembler
self.tokens = []
self._handle()
def _write_tokens(self):
for token in self.tokens:
self.writer.write(token.i_line, token)
def _handle(self):
raise NotImplementedError(self)
def __repr__(self, *args, **kwargs):
try:
return "%s line:%s" % (self.instruction, self.i_line)
except:
return object.__repr__(self)
__str__ = __repr__
_op_name_to_handler = {}
def _register(cls):
_op_name_to_handler[cls.opname] = cls
return cls
class _BasePushHandler(_BaseHandler):
def _handle(self):
self.stack.push(self)
class _BaseLoadHandler(_BasePushHandler):
def _handle(self):
_BasePushHandler._handle(self)
self.tokens = [_Token(self.i_line, self.instruction)]
@_register
class _LoadBuildClass(_BasePushHandler):
opname = "LOAD_BUILD_CLASS"
@_register
class _LoadConst(_BaseLoadHandler):
opname = "LOAD_CONST"
@_register
class _LoadName(_BaseLoadHandler):
opname = "LOAD_NAME"
@_register
class _LoadGlobal(_BaseLoadHandler):
opname = "LOAD_GLOBAL"
@_register
class _LoadFast(_BaseLoadHandler):
opname = "LOAD_FAST"
@_register
class _GetIter(_BaseHandler):
'''
Implements TOS = iter(TOS).
'''
opname = "GET_ITER"
iter_target = None
def _handle(self):
self.iter_target = self.stack.pop()
self.tokens.extend(self.iter_target.tokens)
self.stack.push(self)
@_register
class _ForIter(_BaseHandler):
'''
TOS is an iterator. Call its __next__() method. If this yields a new value, push it on the stack
(leaving the iterator below it). If the iterator indicates it is exhausted TOS is popped, and
the byte code counter is incremented by delta.
'''
opname = "FOR_ITER"
iter_in = None
def _handle(self):
self.iter_in = self.stack.pop()
self.stack.push(self)
def store_in_name(self, store_name):
for_token = _Token(self.i_line, None, 'for ')
self.tokens.append(for_token)
prev = for_token
t_name = _Token(store_name.i_line, store_name.instruction, after=prev)
self.tokens.append(t_name)
prev = t_name
in_token = _Token(store_name.i_line, None, ' in ', after=prev)
self.tokens.append(in_token)
prev = in_token
max_line = store_name.i_line
if self.iter_in:
for t in self.iter_in.tokens:
t.mark_after(prev)
max_line = max(max_line, t.i_line)
prev = t
self.tokens.extend(self.iter_in.tokens)
colon_token = _Token(self.i_line, None, ':', after=prev)
self.tokens.append(colon_token)
prev = for_token
self._write_tokens()
@_register
class _StoreName(_BaseHandler):
'''
Implements name = TOS. namei is the index of name in the attribute co_names of the code object.
The compiler tries to use STORE_FAST or STORE_GLOBAL if possible.
'''
opname = "STORE_NAME"
def _handle(self):
v = self.stack.pop()
if isinstance(v, _ForIter):
v.store_in_name(self)
else:
if not isinstance(v, _MakeFunction) or v.is_lambda:
line = self.i_line
for t in v.tokens:
line = min(line, t.i_line)
t_name = _Token(line, self.instruction)
t_equal = _Token(line, None, '=', after=t_name)
self.tokens.append(t_name)
self.tokens.append(t_equal)
for t in v.tokens:
t.mark_after(t_equal)
self.tokens.extend(v.tokens)
self._write_tokens()
@_register
class _ReturnValue(_BaseHandler):
"""
Returns with TOS to the caller of the function.
"""
opname = "RETURN_VALUE"
def _handle(self):
v = self.stack.pop()
return_token = _Token(self.i_line, None, 'return ', end_of_line=True)
self.tokens.append(return_token)
for token in v.tokens:
token.mark_after(return_token)
self.tokens.extend(v.tokens)
self._write_tokens()
@_register
class _CallFunction(_BaseHandler):
"""
CALL_FUNCTION(argc)
Calls a callable object with positional arguments. argc indicates the number of positional
arguments. The top of the stack contains positional arguments, with the right-most argument
on top. Below the arguments is a callable object to call. CALL_FUNCTION pops all arguments
and the callable object off the stack, calls the callable object with those arguments, and
pushes the return value returned by the callable object.
Changed in version 3.6: This opcode is used only for calls with positional arguments.
"""
opname = "CALL_FUNCTION"
def _handle(self):
args = []
for _i in range(self.instruction.argval + 1):
arg = self.stack.pop()
args.append(arg)
it = reversed(args)
name = next(it)
max_line = name.i_line
for t in name.tokens:
self.tokens.append(t)
tok_open_parens = _Token(name.i_line, None, '(', after=name)
self.tokens.append(tok_open_parens)
prev = tok_open_parens
for i, arg in enumerate(it):
for t in arg.tokens:
t.mark_after(name)
t.mark_after(prev)
max_line = max(max_line, t.i_line)
self.tokens.append(t)
prev = arg
if i > 0:
comma_token = _Token(prev.i_line, None, ',', after=prev)
self.tokens.append(comma_token)
prev = comma_token
tok_close_parens = _Token(max_line, None, ')', after=prev)
self.tokens.append(tok_close_parens)
self._write_tokens()
self.stack.push(self)
@_register
class _MakeFunctionPy3(_BaseHandler):
"""
Pushes a new function object on the stack. From bottom to top, the consumed stack must consist
of values if the argument carries a specified flag value
0x01 a tuple of default values for positional-only and positional-or-keyword parameters in positional order
0x02 a dictionary of keyword-only parameters' default values
0x04 an annotation dictionary
0x08 a tuple containing cells for free variables, making a closure
the code associated with the function (at TOS1)
the qualified name of the function (at TOS)
"""
opname = "MAKE_FUNCTION"
is_lambda = False
def _handle(self):
stack = self.stack
self.qualified_name = stack.pop()
self.code = stack.pop()
default_node = None
if self.instruction.argval & 0x01:
default_node = stack.pop()
is_lambda = self.is_lambda = '<lambda>' in [x.tok for x in self.qualified_name.tokens]
if not is_lambda:
def_token = _Token(self.i_line, None, 'def ')
self.tokens.append(def_token)
for token in self.qualified_name.tokens:
self.tokens.append(token)
if not is_lambda:
token.mark_after(def_token)
prev = token
open_parens_token = _Token(self.i_line, None, '(', after=prev)
self.tokens.append(open_parens_token)
prev = open_parens_token
code = self.code.instruction.argval
if default_node:
defaults = ([_SENTINEL] * (len(code.co_varnames) - len(default_node.instruction.argval))) + list(default_node.instruction.argval)
else:
defaults = [_SENTINEL] * len(code.co_varnames)
for i, arg in enumerate(code.co_varnames):
if i > 0:
comma_token = _Token(prev.i_line, None, ', ', after=prev)
self.tokens.append(comma_token)
prev = comma_token
arg_token = _Token(self.i_line, None, arg, after=prev)
self.tokens.append(arg_token)
default = defaults[i]
if default is not _SENTINEL:
eq_token = _Token(default_node.i_line, None, '=', after=prev)
self.tokens.append(eq_token)
prev = eq_token
default_token = _Token(default_node.i_line, None, str(default), after=prev)
self.tokens.append(default_token)
prev = default_token
tok_close_parens = _Token(prev.i_line, None, '):', after=prev)
self.tokens.append(tok_close_parens)
self._write_tokens()
stack.push(self)
self.writer.indent(prev.i_line + 1)
self.writer.dedent(max(self.disassembler.merge_code(code)))
_MakeFunction = _MakeFunctionPy3
def _print_after_info(line_contents, stream=None):
if stream is None:
stream = sys.stdout
for token in line_contents:
after_tokens = token.get_after_tokens()
if after_tokens:
s = '%s after: %s\n' % (
repr(token.tok),
('"' + '", "'.join(t.tok for t in token.get_after_tokens()) + '"'))
stream.write(s)
else:
stream.write('%s (NO REQUISITES)' % repr(token.tok))
def _compose_line_contents(line_contents, previous_line_tokens):
lst = []
handled = set()
add_to_end_of_line = []
delete_indexes = []
for i, token in enumerate(line_contents):
if token.end_of_line:
add_to_end_of_line.append(token)
delete_indexes.append(i)
for i in reversed(delete_indexes):
del line_contents[i]
del delete_indexes
while line_contents:
added = False
delete_indexes = []
for i, token in enumerate(line_contents):
after_tokens = token.get_after_tokens()
for after in after_tokens:
if after not in handled and after not in previous_line_tokens:
break
else:
added = True
previous_line_tokens.add(token)
handled.add(token)
lst.append(token.tok)
delete_indexes.append(i)
for i in reversed(delete_indexes):
del line_contents[i]
if not added:
if add_to_end_of_line:
line_contents.extend(add_to_end_of_line)
del add_to_end_of_line[:]
continue
# Something is off, let's just add as is.
for token in line_contents:
if token not in handled:
lst.append(token.tok)
stream = StringIO()
_print_after_info(line_contents, stream)
pydev_log.critical('Error. After markers are not correct:\n%s', stream.getvalue())
break
return ''.join(lst)
class _PyCodeToSource(object):
def __init__(self, co, memo=None):
if memo is None:
memo = {}
self.memo = memo
self.co = co
self.instructions = list(iter_instructions(co))
self.stack = _Stack()
self.writer = _Writer()
def _process_next(self, i_line):
instruction = self.instructions.pop(0)
handler_class = _op_name_to_handler.get(instruction.opname)
if handler_class is not None:
s = handler_class(i_line, instruction, self.stack, self.writer, self)
if DEBUG:
print(s)
else:
if DEBUG:
print("UNHANDLED", instruction)
def build_line_to_contents(self):
co = self.co
op_offset_to_line = dict(dis.findlinestarts(co))
curr_line_index = 0
instructions = self.instructions
while instructions:
instruction = instructions[0]
new_line_index = op_offset_to_line.get(instruction.offset)
if new_line_index is not None:
if new_line_index is not None:
curr_line_index = new_line_index
self._process_next(curr_line_index)
return self.writer.line_to_contents
def merge_code(self, code):
if DEBUG:
print('merge code ----')
# for d in dir(code):
# if not d.startswith('_'):
# print(d, getattr(code, d))
line_to_contents = _PyCodeToSource(code, self.memo).build_line_to_contents()
lines = []
for line, contents in sorted(line_to_contents.items()):
lines.append(line)
self.writer.get_line(line).extend(contents)
if DEBUG:
print('end merge code ----')
return lines
def disassemble(self):
show_lines = False
line_to_contents = self.build_line_to_contents()
stream = StringIO()
last_line = 0
indent = ''
previous_line_tokens = set()
for i_line, contents in sorted(line_to_contents.items()):
while last_line < i_line - 1:
if show_lines:
stream.write(u"%s.\n" % (last_line + 1,))
else:
stream.write(u"\n")
last_line += 1
line_contents = []
dedents_found = 0
for part in contents:
if part is INDENT_MARKER:
if DEBUG:
print('found indent', i_line)
indent += ' '
continue
if part is DEDENT_MARKER:
if DEBUG:
print('found dedent', i_line)
dedents_found += 1
continue
line_contents.append(part)
s = indent + _compose_line_contents(line_contents, previous_line_tokens)
if show_lines:
stream.write(u"%s. %s\n" % (i_line, s))
else:
stream.write(u"%s\n" % s)
if dedents_found:
indent = indent[:-(4 * dedents_found)]
last_line = i_line
return stream.getvalue()
def code_obj_to_source(co):
"""
Converts a code object to source code to provide a suitable representation for the compiler when
the actual source code is not found.
This is a work in progress / proof of concept / not ready to be used.
"""
ret = _PyCodeToSource(co).disassemble()
if DEBUG:
print(ret)
return ret

View File

@ -0,0 +1,925 @@
import dis
import inspect
import sys
from collections import namedtuple
from _pydev_bundle import pydev_log
from opcode import (EXTENDED_ARG, HAVE_ARGUMENT, cmp_op, hascompare, hasconst,
hasfree, hasjrel, haslocal, hasname, opname)
from io import StringIO
class TryExceptInfo(object):
def __init__(self, try_line, ignore=False):
'''
:param try_line:
:param ignore:
Usually we should ignore any block that's not a try..except
(this can happen for finally blocks, with statements, etc, for
which we create temporary entries).
'''
self.try_line = try_line
self.ignore = ignore
self.except_line = -1
self.except_end_line = -1
self.raise_lines_in_except = []
# Note: these may not be available if generated from source instead of bytecode.
self.except_bytecode_offset = -1
self.except_end_bytecode_offset = -1
def is_line_in_try_block(self, line):
return self.try_line <= line < self.except_line
def is_line_in_except_block(self, line):
return self.except_line <= line <= self.except_end_line
def __str__(self):
lst = [
'{try:',
str(self.try_line),
' except ',
str(self.except_line),
' end block ',
str(self.except_end_line),
]
if self.raise_lines_in_except:
lst.append(' raises: %s' % (', '.join(str(x) for x in self.raise_lines_in_except),))
lst.append('}')
return ''.join(lst)
__repr__ = __str__
class ReturnInfo(object):
def __init__(self, return_line):
self.return_line = return_line
def __str__(self):
return '{return: %s}' % (self.return_line,)
__repr__ = __str__
def _get_line(op_offset_to_line, op_offset, firstlineno, search=False):
op_offset_original = op_offset
while op_offset >= 0:
ret = op_offset_to_line.get(op_offset)
if ret is not None:
return ret - firstlineno
if not search:
return ret
else:
op_offset -= 1
raise AssertionError('Unable to find line for offset: %s.Info: %s' % (
op_offset_original, op_offset_to_line))
def debug(s):
pass
_Instruction = namedtuple('_Instruction', 'opname, opcode, starts_line, argval, is_jump_target, offset, argrepr')
def _iter_as_bytecode_as_instructions_py2(co):
code = co.co_code
op_offset_to_line = dict(dis.findlinestarts(co))
labels = set(dis.findlabels(code))
bytecode_len = len(code)
i = 0
extended_arg = 0
free = None
op_to_name = opname
while i < bytecode_len:
c = code[i]
op = ord(c)
is_jump_target = i in labels
curr_op_name = op_to_name[op]
initial_bytecode_offset = i
i = i + 1
if op < HAVE_ARGUMENT:
yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), None, is_jump_target, initial_bytecode_offset, '')
else:
oparg = ord(code[i]) + ord(code[i + 1]) * 256 + extended_arg
extended_arg = 0
i = i + 2
if op == EXTENDED_ARG:
extended_arg = oparg * 65536
if op in hasconst:
yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), co.co_consts[oparg], is_jump_target, initial_bytecode_offset, repr(co.co_consts[oparg]))
elif op in hasname:
yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), co.co_names[oparg], is_jump_target, initial_bytecode_offset, str(co.co_names[oparg]))
elif op in hasjrel:
argval = i + oparg
yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), argval, is_jump_target, initial_bytecode_offset, "to " + repr(argval))
elif op in haslocal:
yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), co.co_varnames[oparg], is_jump_target, initial_bytecode_offset, str(co.co_varnames[oparg]))
elif op in hascompare:
yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), cmp_op[oparg], is_jump_target, initial_bytecode_offset, cmp_op[oparg])
elif op in hasfree:
if free is None:
free = co.co_cellvars + co.co_freevars
yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), free[oparg], is_jump_target, initial_bytecode_offset, str(free[oparg]))
else:
yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), oparg, is_jump_target, initial_bytecode_offset, str(oparg))
def iter_instructions(co):
if sys.version_info[0] < 3:
iter_in = _iter_as_bytecode_as_instructions_py2(co)
else:
iter_in = dis.Bytecode(co)
iter_in = list(iter_in)
bytecode_to_instruction = {}
for instruction in iter_in:
bytecode_to_instruction[instruction.offset] = instruction
if iter_in:
for instruction in iter_in:
yield instruction
def collect_return_info(co, use_func_first_line=False):
if not hasattr(co, 'co_lines') and not hasattr(co, 'co_lnotab'):
return []
if use_func_first_line:
firstlineno = co.co_firstlineno
else:
firstlineno = 0
lst = []
op_offset_to_line = dict(dis.findlinestarts(co))
for instruction in iter_instructions(co):
curr_op_name = instruction.opname
if curr_op_name == 'RETURN_VALUE':
lst.append(ReturnInfo(_get_line(op_offset_to_line, instruction.offset, firstlineno, search=True)))
return lst
if sys.version_info[:2] <= (3, 9):
class _TargetInfo(object):
def __init__(self, except_end_instruction, jump_if_not_exc_instruction=None):
self.except_end_instruction = except_end_instruction
self.jump_if_not_exc_instruction = jump_if_not_exc_instruction
def __str__(self):
msg = ['_TargetInfo(']
msg.append(self.except_end_instruction.opname)
if self.jump_if_not_exc_instruction:
msg.append(' - ')
msg.append(self.jump_if_not_exc_instruction.opname)
msg.append('(')
msg.append(str(self.jump_if_not_exc_instruction.argval))
msg.append(')')
msg.append(')')
return ''.join(msg)
def _get_except_target_info(instructions, exception_end_instruction_index, offset_to_instruction_idx):
next_3 = [j_instruction.opname for j_instruction in instructions[exception_end_instruction_index:exception_end_instruction_index + 3]]
# print('next_3:', [(j_instruction.opname, j_instruction.argval) for j_instruction in instructions[exception_end_instruction_index:exception_end_instruction_index + 3]])
if next_3 == ['POP_TOP', 'POP_TOP', 'POP_TOP']: # try..except without checking exception.
try:
jump_instruction = instructions[exception_end_instruction_index - 1]
if jump_instruction.opname not in ('JUMP_FORWARD', 'JUMP_ABSOLUTE'):
return None
except IndexError:
pass
if jump_instruction.opname == 'JUMP_ABSOLUTE':
# On latest versions of Python 3 the interpreter has a go-backwards step,
# used to show the initial line of a for/while, etc (which is this
# JUMP_ABSOLUTE)... we're not really interested in it, but rather on where
# it points to.
except_end_instruction = instructions[offset_to_instruction_idx[jump_instruction.argval]]
idx = offset_to_instruction_idx[except_end_instruction.argval]
# Search for the POP_EXCEPT which should be at the end of the block.
for pop_except_instruction in reversed(instructions[:idx]):
if pop_except_instruction.opname == 'POP_EXCEPT':
except_end_instruction = pop_except_instruction
return _TargetInfo(except_end_instruction)
else:
return None # i.e.: Continue outer loop
else:
# JUMP_FORWARD
i = offset_to_instruction_idx[jump_instruction.argval]
try:
# i.e.: the jump is to the instruction after the block finishes (so, we need to
# get the previous instruction as that should be the place where the exception
# block finishes).
except_end_instruction = instructions[i - 1]
except:
pydev_log.critical('Error when computing try..except block end.')
return None
return _TargetInfo(except_end_instruction)
elif next_3 and next_3[0] == 'DUP_TOP': # try..except AssertionError.
iter_in = instructions[exception_end_instruction_index + 1:]
for j, jump_if_not_exc_instruction in enumerate(iter_in):
if jump_if_not_exc_instruction.opname == 'JUMP_IF_NOT_EXC_MATCH':
# Python 3.9
except_end_instruction = instructions[offset_to_instruction_idx[jump_if_not_exc_instruction.argval]]
return _TargetInfo(except_end_instruction, jump_if_not_exc_instruction)
elif jump_if_not_exc_instruction.opname == 'COMPARE_OP' and jump_if_not_exc_instruction.argval == 'exception match':
# Python 3.8 and before
try:
next_instruction = iter_in[j + 1]
except:
continue
if next_instruction.opname == 'POP_JUMP_IF_FALSE':
except_end_instruction = instructions[offset_to_instruction_idx[next_instruction.argval]]
return _TargetInfo(except_end_instruction, next_instruction)
else:
return None # i.e.: Continue outer loop
else:
# i.e.: we're not interested in try..finally statements, only try..except.
return None
def collect_try_except_info(co, use_func_first_line=False):
# We no longer have 'END_FINALLY', so, we need to do things differently in Python 3.9
if not hasattr(co, 'co_lines') and not hasattr(co, 'co_lnotab'):
return []
if use_func_first_line:
firstlineno = co.co_firstlineno
else:
firstlineno = 0
try_except_info_lst = []
op_offset_to_line = dict(dis.findlinestarts(co))
offset_to_instruction_idx = {}
instructions = list(iter_instructions(co))
for i, instruction in enumerate(instructions):
offset_to_instruction_idx[instruction.offset] = i
for i, instruction in enumerate(instructions):
curr_op_name = instruction.opname
if curr_op_name in ('SETUP_FINALLY', 'SETUP_EXCEPT'): # SETUP_EXCEPT before Python 3.8, SETUP_FINALLY Python 3.8 onwards.
exception_end_instruction_index = offset_to_instruction_idx[instruction.argval]
jump_instruction = instructions[exception_end_instruction_index - 1]
if jump_instruction.opname not in ('JUMP_FORWARD', 'JUMP_ABSOLUTE'):
continue
except_end_instruction = None
indexes_checked = set()
indexes_checked.add(exception_end_instruction_index)
target_info = _get_except_target_info(instructions, exception_end_instruction_index, offset_to_instruction_idx)
while target_info is not None:
# Handle a try..except..except..except.
jump_instruction = target_info.jump_if_not_exc_instruction
except_end_instruction = target_info.except_end_instruction
if jump_instruction is not None:
check_index = offset_to_instruction_idx[jump_instruction.argval]
if check_index in indexes_checked:
break
indexes_checked.add(check_index)
target_info = _get_except_target_info(instructions, check_index, offset_to_instruction_idx)
else:
break
if except_end_instruction is not None:
try_except_info = TryExceptInfo(
_get_line(op_offset_to_line, instruction.offset, firstlineno, search=True),
ignore=False
)
try_except_info.except_bytecode_offset = instruction.argval
try_except_info.except_line = _get_line(
op_offset_to_line,
try_except_info.except_bytecode_offset,
firstlineno,
search=True
)
try_except_info.except_end_bytecode_offset = except_end_instruction.offset
try_except_info.except_end_line = _get_line(op_offset_to_line, except_end_instruction.offset, firstlineno, search=True)
try_except_info_lst.append(try_except_info)
for raise_instruction in instructions[i:offset_to_instruction_idx[try_except_info.except_end_bytecode_offset]]:
if raise_instruction.opname == 'RAISE_VARARGS':
if raise_instruction.argval == 0:
try_except_info.raise_lines_in_except.append(
_get_line(op_offset_to_line, raise_instruction.offset, firstlineno, search=True))
return try_except_info_lst
elif sys.version_info[:2] == (3, 10):
class _TargetInfo(object):
def __init__(self, except_end_instruction, jump_if_not_exc_instruction=None):
self.except_end_instruction = except_end_instruction
self.jump_if_not_exc_instruction = jump_if_not_exc_instruction
def __str__(self):
msg = ['_TargetInfo(']
msg.append(self.except_end_instruction.opname)
if self.jump_if_not_exc_instruction:
msg.append(' - ')
msg.append(self.jump_if_not_exc_instruction.opname)
msg.append('(')
msg.append(str(self.jump_if_not_exc_instruction.argval))
msg.append(')')
msg.append(')')
return ''.join(msg)
def _get_except_target_info(instructions, exception_end_instruction_index, offset_to_instruction_idx):
next_3 = [j_instruction.opname for j_instruction in instructions[exception_end_instruction_index:exception_end_instruction_index + 3]]
# print('next_3:', [(j_instruction.opname, j_instruction.argval) for j_instruction in instructions[exception_end_instruction_index:exception_end_instruction_index + 3]])
if next_3 == ['POP_TOP', 'POP_TOP', 'POP_TOP']: # try..except without checking exception.
# Previously there was a jump which was able to point where the exception would end. This
# is no longer true, now a bare except doesn't really have any indication in the bytecode
# where the end would be expected if the exception wasn't raised, so, we just blindly
# search for a POP_EXCEPT from the current position.
for pop_except_instruction in instructions[exception_end_instruction_index + 3:]:
if pop_except_instruction.opname == 'POP_EXCEPT':
except_end_instruction = pop_except_instruction
return _TargetInfo(except_end_instruction)
elif next_3 and next_3[0] == 'DUP_TOP': # try..except AssertionError.
iter_in = instructions[exception_end_instruction_index + 1:]
for jump_if_not_exc_instruction in iter_in:
if jump_if_not_exc_instruction.opname == 'JUMP_IF_NOT_EXC_MATCH':
# Python 3.9
except_end_instruction = instructions[offset_to_instruction_idx[jump_if_not_exc_instruction.argval]]
return _TargetInfo(except_end_instruction, jump_if_not_exc_instruction)
else:
return None # i.e.: Continue outer loop
else:
# i.e.: we're not interested in try..finally statements, only try..except.
return None
def collect_try_except_info(co, use_func_first_line=False):
# We no longer have 'END_FINALLY', so, we need to do things differently in Python 3.9
if not hasattr(co, 'co_lines') and not hasattr(co, 'co_lnotab'):
return []
if use_func_first_line:
firstlineno = co.co_firstlineno
else:
firstlineno = 0
try_except_info_lst = []
op_offset_to_line = dict(dis.findlinestarts(co))
offset_to_instruction_idx = {}
instructions = list(iter_instructions(co))
for i, instruction in enumerate(instructions):
offset_to_instruction_idx[instruction.offset] = i
for i, instruction in enumerate(instructions):
curr_op_name = instruction.opname
if curr_op_name == 'SETUP_FINALLY':
exception_end_instruction_index = offset_to_instruction_idx[instruction.argval]
jump_instruction = instructions[exception_end_instruction_index]
if jump_instruction.opname != 'DUP_TOP':
continue
except_end_instruction = None
indexes_checked = set()
indexes_checked.add(exception_end_instruction_index)
target_info = _get_except_target_info(instructions, exception_end_instruction_index, offset_to_instruction_idx)
while target_info is not None:
# Handle a try..except..except..except.
jump_instruction = target_info.jump_if_not_exc_instruction
except_end_instruction = target_info.except_end_instruction
if jump_instruction is not None:
check_index = offset_to_instruction_idx[jump_instruction.argval]
if check_index in indexes_checked:
break
indexes_checked.add(check_index)
target_info = _get_except_target_info(instructions, check_index, offset_to_instruction_idx)
else:
break
if except_end_instruction is not None:
try_except_info = TryExceptInfo(
_get_line(op_offset_to_line, instruction.offset, firstlineno, search=True),
ignore=False
)
try_except_info.except_bytecode_offset = instruction.argval
try_except_info.except_line = _get_line(
op_offset_to_line,
try_except_info.except_bytecode_offset,
firstlineno,
search=True
)
try_except_info.except_end_bytecode_offset = except_end_instruction.offset
# On Python 3.10 the final line of the except end isn't really correct, rather,
# it's engineered to be the same line of the except and not the end line of the
# block, so, the approach taken is to search for the biggest line between the
# except and the end instruction
except_end_line = -1
start_i = offset_to_instruction_idx[try_except_info.except_bytecode_offset]
end_i = offset_to_instruction_idx[except_end_instruction.offset]
for instruction in instructions[start_i: end_i + 1]:
found_at_line = op_offset_to_line.get(instruction.offset)
if found_at_line is not None and found_at_line > except_end_line:
except_end_line = found_at_line
try_except_info.except_end_line = except_end_line - firstlineno
try_except_info_lst.append(try_except_info)
for raise_instruction in instructions[i:offset_to_instruction_idx[try_except_info.except_end_bytecode_offset]]:
if raise_instruction.opname == 'RAISE_VARARGS':
if raise_instruction.argval == 0:
try_except_info.raise_lines_in_except.append(
_get_line(op_offset_to_line, raise_instruction.offset, firstlineno, search=True))
return try_except_info_lst
elif sys.version_info[:2] >= (3, 11):
def collect_try_except_info(co, use_func_first_line=False):
'''
Note: if the filename is available and we can get the source,
`collect_try_except_info_from_source` is preferred (this is kept as
a fallback for cases where sources aren't available).
'''
return []
import ast as ast_module
class _Visitor(ast_module.NodeVisitor):
def __init__(self):
self.try_except_infos = []
self._stack = []
self._in_except_stack = []
self.max_line = -1
def generic_visit(self, node):
if hasattr(node, 'lineno'):
if node.lineno > self.max_line:
self.max_line = node.lineno
return ast_module.NodeVisitor.generic_visit(self, node)
def visit_Try(self, node):
info = TryExceptInfo(node.lineno, ignore=True)
self._stack.append(info)
self.generic_visit(node)
assert info is self._stack.pop()
if not info.ignore:
self.try_except_infos.insert(0, info)
if sys.version_info[0] < 3:
visit_TryExcept = visit_Try
def visit_ExceptHandler(self, node):
info = self._stack[-1]
info.ignore = False
if info.except_line == -1:
info.except_line = node.lineno
self._in_except_stack.append(info)
self.generic_visit(node)
if hasattr(node, 'end_lineno'):
info.except_end_line = node.end_lineno
else:
info.except_end_line = self.max_line
self._in_except_stack.pop()
if sys.version_info[0] >= 3:
def visit_Raise(self, node):
for info in self._in_except_stack:
if node.exc is None:
info.raise_lines_in_except.append(node.lineno)
self.generic_visit(node)
else:
def visit_Raise(self, node):
for info in self._in_except_stack:
if node.type is None and node.tback is None:
info.raise_lines_in_except.append(node.lineno)
self.generic_visit(node)
def collect_try_except_info_from_source(filename):
with open(filename, 'rb') as stream:
contents = stream.read()
return collect_try_except_info_from_contents(contents, filename)
def collect_try_except_info_from_contents(contents, filename='<unknown>'):
ast = ast_module.parse(contents, filename)
visitor = _Visitor()
visitor.visit(ast)
return visitor.try_except_infos
RESTART_FROM_LOOKAHEAD = object()
SEPARATOR = object()
class _MsgPart(object):
def __init__(self, line, tok):
assert line >= 0
self.line = line
self.tok = tok
@classmethod
def add_to_line_to_contents(cls, obj, line_to_contents, line=None):
if isinstance(obj, (list, tuple)):
for o in obj:
cls.add_to_line_to_contents(o, line_to_contents, line=line)
return
if isinstance(obj, str):
assert line is not None
line = int(line)
lst = line_to_contents.setdefault(line, [])
lst.append(obj)
return
if isinstance(obj, _MsgPart):
if isinstance(obj.tok, (list, tuple)):
cls.add_to_line_to_contents(obj.tok, line_to_contents, line=obj.line)
return
if isinstance(obj.tok, str):
lst = line_to_contents.setdefault(obj.line, [])
lst.append(obj.tok)
return
raise AssertionError("Unhandled: %" % (obj,))
class _Disassembler(object):
def __init__(self, co, firstlineno, level=0):
self.co = co
self.firstlineno = firstlineno
self.level = level
self.instructions = list(iter_instructions(co))
op_offset_to_line = self.op_offset_to_line = dict(dis.findlinestarts(co))
# Update offsets so that all offsets have the line index (and update it based on
# the passed firstlineno).
line_index = co.co_firstlineno - firstlineno
for instruction in self.instructions:
new_line_index = op_offset_to_line.get(instruction.offset)
if new_line_index is not None:
line_index = new_line_index - firstlineno
op_offset_to_line[instruction.offset] = line_index
else:
op_offset_to_line[instruction.offset] = line_index
BIG_LINE_INT = 9999999
SMALL_LINE_INT = -1
def min_line(self, *args):
m = self.BIG_LINE_INT
for arg in args:
if isinstance(arg, (list, tuple)):
m = min(m, self.min_line(*arg))
elif isinstance(arg, _MsgPart):
m = min(m, arg.line)
elif hasattr(arg, 'offset'):
m = min(m, self.op_offset_to_line[arg.offset])
return m
def max_line(self, *args):
m = self.SMALL_LINE_INT
for arg in args:
if isinstance(arg, (list, tuple)):
m = max(m, self.max_line(*arg))
elif isinstance(arg, _MsgPart):
m = max(m, arg.line)
elif hasattr(arg, 'offset'):
m = max(m, self.op_offset_to_line[arg.offset])
return m
def _lookahead(self):
'''
This handles and converts some common constructs from bytecode to actual source code.
It may change the list of instructions.
'''
msg = self._create_msg_part
found = []
fullrepr = None
# Collect all the load instructions
for next_instruction in self.instructions:
if next_instruction.opname in ('LOAD_GLOBAL', 'LOAD_FAST', 'LOAD_CONST', 'LOAD_NAME'):
found.append(next_instruction)
else:
break
if not found:
return None
if next_instruction.opname == 'LOAD_ATTR':
prev_instruction = found[-1]
# Remove the current LOAD_ATTR
assert self.instructions.pop(len(found)) is next_instruction
# Add the LOAD_ATTR to the previous LOAD
self.instructions[len(found) - 1] = _Instruction(
prev_instruction.opname,
prev_instruction.opcode,
prev_instruction.starts_line,
prev_instruction.argval,
False, # prev_instruction.is_jump_target,
prev_instruction.offset,
(
msg(prev_instruction),
msg(prev_instruction, '.'),
msg(next_instruction)
),
)
return RESTART_FROM_LOOKAHEAD
if next_instruction.opname in ('CALL_FUNCTION', 'PRECALL'):
if len(found) == next_instruction.argval + 1:
force_restart = False
delta = 0
else:
force_restart = True
if len(found) > next_instruction.argval + 1:
delta = len(found) - (next_instruction.argval + 1)
else:
return None # This is odd
del_upto = delta + next_instruction.argval + 2 # +2 = NAME / CALL_FUNCTION
if next_instruction.opname == 'PRECALL':
del_upto += 1 # Also remove the CALL right after the PRECALL.
del self.instructions[delta:del_upto]
found = iter(found[delta:])
call_func = next(found)
args = list(found)
fullrepr = [
msg(call_func),
msg(call_func, '('),
]
prev = call_func
for i, arg in enumerate(args):
if i > 0:
fullrepr.append(msg(prev, ', '))
prev = arg
fullrepr.append(msg(arg))
fullrepr.append(msg(prev, ')'))
if force_restart:
self.instructions.insert(delta, _Instruction(
call_func.opname,
call_func.opcode,
call_func.starts_line,
call_func.argval,
False, # call_func.is_jump_target,
call_func.offset,
tuple(fullrepr),
))
return RESTART_FROM_LOOKAHEAD
elif next_instruction.opname == 'BUILD_TUPLE':
if len(found) == next_instruction.argval:
force_restart = False
delta = 0
else:
force_restart = True
if len(found) > next_instruction.argval:
delta = len(found) - (next_instruction.argval)
else:
return None # This is odd
del self.instructions[delta:delta + next_instruction.argval + 1] # +1 = BUILD_TUPLE
found = iter(found[delta:])
args = [instruction for instruction in found]
if args:
first_instruction = args[0]
else:
first_instruction = next_instruction
prev = first_instruction
fullrepr = []
fullrepr.append(msg(prev, '('))
for i, arg in enumerate(args):
if i > 0:
fullrepr.append(msg(prev, ', '))
prev = arg
fullrepr.append(msg(arg))
fullrepr.append(msg(prev, ')'))
if force_restart:
self.instructions.insert(delta, _Instruction(
first_instruction.opname,
first_instruction.opcode,
first_instruction.starts_line,
first_instruction.argval,
False, # first_instruction.is_jump_target,
first_instruction.offset,
tuple(fullrepr),
))
return RESTART_FROM_LOOKAHEAD
if fullrepr is not None and self.instructions:
if self.instructions[0].opname == 'POP_TOP':
self.instructions.pop(0)
if self.instructions[0].opname in ('STORE_FAST', 'STORE_NAME'):
next_instruction = self.instructions.pop(0)
return msg(next_instruction), msg(next_instruction, ' = '), fullrepr
if self.instructions[0].opname == 'RETURN_VALUE':
next_instruction = self.instructions.pop(0)
return msg(next_instruction, 'return ', line=self.min_line(next_instruction, fullrepr)), fullrepr
return fullrepr
def _decorate_jump_target(self, instruction, instruction_repr):
if instruction.is_jump_target:
return ('|', str(instruction.offset), '|', instruction_repr)
return instruction_repr
def _create_msg_part(self, instruction, tok=None, line=None):
dec = self._decorate_jump_target
if line is None or line in (self.BIG_LINE_INT, self.SMALL_LINE_INT):
line = self.op_offset_to_line[instruction.offset]
argrepr = instruction.argrepr
if isinstance(argrepr, str) and argrepr.startswith('NULL + '):
argrepr = argrepr[7:]
return _MsgPart(
line, tok if tok is not None else dec(instruction, argrepr))
def _next_instruction_to_str(self, line_to_contents):
# indent = ''
# if self.level > 0:
# indent += ' ' * self.level
# print(indent, 'handle', self.instructions[0])
if self.instructions:
ret = self._lookahead()
if ret:
return ret
msg = self._create_msg_part
instruction = self.instructions.pop(0)
if instruction.opname in 'RESUME':
return None
if instruction.opname in ('LOAD_GLOBAL', 'LOAD_FAST', 'LOAD_CONST', 'LOAD_NAME'):
next_instruction = self.instructions[0]
if next_instruction.opname in ('STORE_FAST', 'STORE_NAME'):
self.instructions.pop(0)
return (
msg(next_instruction),
msg(next_instruction, ' = '),
msg(instruction))
if next_instruction.opname == 'RETURN_VALUE':
self.instructions.pop(0)
return (msg(instruction, 'return ', line=self.min_line(instruction)), msg(instruction))
if next_instruction.opname == 'RAISE_VARARGS' and next_instruction.argval == 1:
self.instructions.pop(0)
return (msg(instruction, 'raise ', line=self.min_line(instruction)), msg(instruction))
if instruction.opname == 'LOAD_CONST':
if inspect.iscode(instruction.argval):
code_line_to_contents = _Disassembler(
instruction.argval, self.firstlineno, self.level + 1
).build_line_to_contents()
for contents in code_line_to_contents.values():
contents.insert(0, ' ')
for line, contents in code_line_to_contents.items():
line_to_contents.setdefault(line, []).extend(contents)
return msg(instruction, 'LOAD_CONST(code)')
if instruction.opname == 'RAISE_VARARGS':
if instruction.argval == 0:
return msg(instruction, 'raise')
if instruction.opname == 'SETUP_FINALLY':
return msg(instruction, ('try(', instruction.argrepr, '):'))
if instruction.argrepr:
return msg(instruction, (instruction.opname, '(', instruction.argrepr, ')'))
if instruction.argval:
return msg(instruction, '%s{%s}' % (instruction.opname, instruction.argval,))
return msg(instruction, instruction.opname)
def build_line_to_contents(self):
# print('----')
# for instruction in self.instructions:
# print(instruction)
# print('----\n\n')
line_to_contents = {}
instructions = self.instructions
while instructions:
s = self._next_instruction_to_str(line_to_contents)
if s is RESTART_FROM_LOOKAHEAD:
continue
if s is None:
continue
_MsgPart.add_to_line_to_contents(s, line_to_contents)
m = self.max_line(s)
if m != self.SMALL_LINE_INT:
line_to_contents.setdefault(m, []).append(SEPARATOR)
return line_to_contents
def disassemble(self):
line_to_contents = self.build_line_to_contents()
stream = StringIO()
last_line = 0
show_lines = False
for line, contents in sorted(line_to_contents.items()):
while last_line < line - 1:
if show_lines:
stream.write('%s.\n' % (last_line + 1,))
else:
stream.write('\n')
last_line += 1
if show_lines:
stream.write('%s. ' % (line,))
for i, content in enumerate(contents):
if content == SEPARATOR:
if i != len(contents) - 1:
stream.write(', ')
else:
stream.write(content)
stream.write('\n')
last_line = line
return stream.getvalue()
def code_to_bytecode_representation(co, use_func_first_line=False):
'''
A simple disassemble of bytecode.
It does not attempt to provide the full Python source code, rather, it provides a low-level
representation of the bytecode, respecting the lines (so, its target is making the bytecode
easier to grasp and not providing the original source code).
Note that it does show jump locations/targets and converts some common bytecode constructs to
Python code to make it a bit easier to understand.
'''
# Reference for bytecodes:
# https://docs.python.org/3/library/dis.html
if use_func_first_line:
firstlineno = co.co_firstlineno
else:
firstlineno = 0
return _Disassembler(co, firstlineno).disassemble()

View File

@ -0,0 +1,208 @@
CMD_RUN = 101
CMD_LIST_THREADS = 102
CMD_THREAD_CREATE = 103
CMD_THREAD_KILL = 104
CMD_THREAD_SUSPEND = 105
CMD_THREAD_RUN = 106
CMD_STEP_INTO = 107
CMD_STEP_OVER = 108
CMD_STEP_RETURN = 109
CMD_GET_VARIABLE = 110
CMD_SET_BREAK = 111
CMD_REMOVE_BREAK = 112
CMD_EVALUATE_EXPRESSION = 113
CMD_GET_FRAME = 114
CMD_EXEC_EXPRESSION = 115
CMD_WRITE_TO_CONSOLE = 116
CMD_CHANGE_VARIABLE = 117
CMD_RUN_TO_LINE = 118
CMD_RELOAD_CODE = 119
CMD_GET_COMPLETIONS = 120
# Note: renumbered (conflicted on merge)
CMD_CONSOLE_EXEC = 121
CMD_ADD_EXCEPTION_BREAK = 122
CMD_REMOVE_EXCEPTION_BREAK = 123
CMD_LOAD_SOURCE = 124
CMD_ADD_DJANGO_EXCEPTION_BREAK = 125
CMD_REMOVE_DJANGO_EXCEPTION_BREAK = 126
CMD_SET_NEXT_STATEMENT = 127
CMD_SMART_STEP_INTO = 128
CMD_EXIT = 129
CMD_SIGNATURE_CALL_TRACE = 130
CMD_SET_PY_EXCEPTION = 131
CMD_GET_FILE_CONTENTS = 132
CMD_SET_PROPERTY_TRACE = 133
# Pydev debug console commands
CMD_EVALUATE_CONSOLE_EXPRESSION = 134
CMD_RUN_CUSTOM_OPERATION = 135
CMD_GET_BREAKPOINT_EXCEPTION = 136
CMD_STEP_CAUGHT_EXCEPTION = 137
CMD_SEND_CURR_EXCEPTION_TRACE = 138
CMD_SEND_CURR_EXCEPTION_TRACE_PROCEEDED = 139
CMD_IGNORE_THROWN_EXCEPTION_AT = 140
CMD_ENABLE_DONT_TRACE = 141
CMD_SHOW_CONSOLE = 142
CMD_GET_ARRAY = 143
CMD_STEP_INTO_MY_CODE = 144
CMD_GET_CONCURRENCY_EVENT = 145
CMD_SHOW_RETURN_VALUES = 146
CMD_INPUT_REQUESTED = 147
CMD_GET_DESCRIPTION = 148
CMD_PROCESS_CREATED = 149
CMD_SHOW_CYTHON_WARNING = 150
CMD_LOAD_FULL_VALUE = 151
CMD_GET_THREAD_STACK = 152
# This is mostly for unit-tests to diagnose errors on ci.
CMD_THREAD_DUMP_TO_STDERR = 153
# Sent from the client to signal that we should stop when we start executing user code.
CMD_STOP_ON_START = 154
# When the debugger is stopped in an exception, this command will provide the details of the current exception (in the current thread).
CMD_GET_EXCEPTION_DETAILS = 155
# Allows configuring pydevd settings (can be called multiple times and only keys
# available in the json will be configured -- keys not passed will not change the
# previous configuration).
CMD_PYDEVD_JSON_CONFIG = 156
CMD_THREAD_SUSPEND_SINGLE_NOTIFICATION = 157
CMD_THREAD_RESUME_SINGLE_NOTIFICATION = 158
CMD_STEP_OVER_MY_CODE = 159
CMD_STEP_RETURN_MY_CODE = 160
CMD_SET_PY_EXCEPTION_JSON = 161
CMD_SET_PATH_MAPPING_JSON = 162
CMD_GET_SMART_STEP_INTO_VARIANTS = 163 # XXX: PyCharm has 160 for this (we're currently incompatible anyways).
CMD_REDIRECT_OUTPUT = 200
CMD_GET_NEXT_STATEMENT_TARGETS = 201
CMD_SET_PROJECT_ROOTS = 202
CMD_MODULE_EVENT = 203
CMD_PROCESS_EVENT = 204
CMD_AUTHENTICATE = 205
CMD_STEP_INTO_COROUTINE = 206
CMD_LOAD_SOURCE_FROM_FRAME_ID = 207
CMD_SET_FUNCTION_BREAK = 208
CMD_VERSION = 501
CMD_RETURN = 502
CMD_SET_PROTOCOL = 503
CMD_ERROR = 901
# this number can be changed if there's need to do so
# if the io is too big, we'll not send all (could make the debugger too non-responsive)
MAX_IO_MSG_SIZE = 10000
VERSION_STRING = "@@BUILD_NUMBER@@"
from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding
file_system_encoding = getfilesystemencoding()
filesystem_encoding_is_utf8 = file_system_encoding.lower() in ('utf-8', 'utf_8', 'utf8')
ID_TO_MEANING = {
'101': 'CMD_RUN',
'102': 'CMD_LIST_THREADS',
'103': 'CMD_THREAD_CREATE',
'104': 'CMD_THREAD_KILL',
'105': 'CMD_THREAD_SUSPEND',
'106': 'CMD_THREAD_RUN',
'107': 'CMD_STEP_INTO',
'108': 'CMD_STEP_OVER',
'109': 'CMD_STEP_RETURN',
'110': 'CMD_GET_VARIABLE',
'111': 'CMD_SET_BREAK',
'112': 'CMD_REMOVE_BREAK',
'113': 'CMD_EVALUATE_EXPRESSION',
'114': 'CMD_GET_FRAME',
'115': 'CMD_EXEC_EXPRESSION',
'116': 'CMD_WRITE_TO_CONSOLE',
'117': 'CMD_CHANGE_VARIABLE',
'118': 'CMD_RUN_TO_LINE',
'119': 'CMD_RELOAD_CODE',
'120': 'CMD_GET_COMPLETIONS',
'121': 'CMD_CONSOLE_EXEC',
'122': 'CMD_ADD_EXCEPTION_BREAK',
'123': 'CMD_REMOVE_EXCEPTION_BREAK',
'124': 'CMD_LOAD_SOURCE',
'125': 'CMD_ADD_DJANGO_EXCEPTION_BREAK',
'126': 'CMD_REMOVE_DJANGO_EXCEPTION_BREAK',
'127': 'CMD_SET_NEXT_STATEMENT',
'128': 'CMD_SMART_STEP_INTO',
'129': 'CMD_EXIT',
'130': 'CMD_SIGNATURE_CALL_TRACE',
'131': 'CMD_SET_PY_EXCEPTION',
'132': 'CMD_GET_FILE_CONTENTS',
'133': 'CMD_SET_PROPERTY_TRACE',
'134': 'CMD_EVALUATE_CONSOLE_EXPRESSION',
'135': 'CMD_RUN_CUSTOM_OPERATION',
'136': 'CMD_GET_BREAKPOINT_EXCEPTION',
'137': 'CMD_STEP_CAUGHT_EXCEPTION',
'138': 'CMD_SEND_CURR_EXCEPTION_TRACE',
'139': 'CMD_SEND_CURR_EXCEPTION_TRACE_PROCEEDED',
'140': 'CMD_IGNORE_THROWN_EXCEPTION_AT',
'141': 'CMD_ENABLE_DONT_TRACE',
'142': 'CMD_SHOW_CONSOLE',
'143': 'CMD_GET_ARRAY',
'144': 'CMD_STEP_INTO_MY_CODE',
'145': 'CMD_GET_CONCURRENCY_EVENT',
'146': 'CMD_SHOW_RETURN_VALUES',
'147': 'CMD_INPUT_REQUESTED',
'148': 'CMD_GET_DESCRIPTION',
'149': 'CMD_PROCESS_CREATED', # Note: this is actually a notification of a sub-process created.
'150': 'CMD_SHOW_CYTHON_WARNING',
'151': 'CMD_LOAD_FULL_VALUE',
'152': 'CMD_GET_THREAD_STACK',
'153': 'CMD_THREAD_DUMP_TO_STDERR',
'154': 'CMD_STOP_ON_START',
'155': 'CMD_GET_EXCEPTION_DETAILS',
'156': 'CMD_PYDEVD_JSON_CONFIG',
'157': 'CMD_THREAD_SUSPEND_SINGLE_NOTIFICATION',
'158': 'CMD_THREAD_RESUME_SINGLE_NOTIFICATION',
'159': 'CMD_STEP_OVER_MY_CODE',
'160': 'CMD_STEP_RETURN_MY_CODE',
'161': 'CMD_SET_PY_EXCEPTION_JSON',
'162': 'CMD_SET_PATH_MAPPING_JSON',
'163': 'CMD_GET_SMART_STEP_INTO_VARIANTS',
'200': 'CMD_REDIRECT_OUTPUT',
'201': 'CMD_GET_NEXT_STATEMENT_TARGETS',
'202': 'CMD_SET_PROJECT_ROOTS',
'203': 'CMD_MODULE_EVENT',
'204': 'CMD_PROCESS_EVENT', # DAP process event.
'205': 'CMD_AUTHENTICATE',
'206': 'CMD_STEP_INTO_COROUTINE',
'207': 'CMD_LOAD_SOURCE_FROM_FRAME_ID',
'501': 'CMD_VERSION',
'502': 'CMD_RETURN',
'503': 'CMD_SET_PROTOCOL',
'901': 'CMD_ERROR',
}
def constant_to_str(constant):
s = ID_TO_MEANING.get(str(constant))
if not s:
s = '<Unknown: %s>' % (constant,)
return s

View File

@ -0,0 +1,185 @@
import os
import sys
class ArgHandlerWithParam:
'''
Handler for some arguments which needs a value
'''
def __init__(self, arg_name, convert_val=None, default_val=None):
self.arg_name = arg_name
self.arg_v_rep = '--%s' % (arg_name,)
self.convert_val = convert_val
self.default_val = default_val
def to_argv(self, lst, setup):
v = setup.get(self.arg_name)
if v is not None and v != self.default_val:
lst.append(self.arg_v_rep)
lst.append('%s' % (v,))
def handle_argv(self, argv, i, setup):
assert argv[i] == self.arg_v_rep
del argv[i]
val = argv[i]
if self.convert_val:
val = self.convert_val(val)
setup[self.arg_name] = val
del argv[i]
class ArgHandlerBool:
'''
If a given flag is received, mark it as 'True' in setup.
'''
def __init__(self, arg_name, default_val=False):
self.arg_name = arg_name
self.arg_v_rep = '--%s' % (arg_name,)
self.default_val = default_val
def to_argv(self, lst, setup):
v = setup.get(self.arg_name)
if v:
lst.append(self.arg_v_rep)
def handle_argv(self, argv, i, setup):
assert argv[i] == self.arg_v_rep
del argv[i]
setup[self.arg_name] = True
def convert_ppid(ppid):
ret = int(ppid)
if ret != 0:
if ret == os.getpid():
raise AssertionError(
'ppid passed is the same as the current process pid (%s)!' % (ret,))
return ret
ACCEPTED_ARG_HANDLERS = [
ArgHandlerWithParam('port', int, 0),
ArgHandlerWithParam('ppid', convert_ppid, 0),
ArgHandlerWithParam('vm_type'),
ArgHandlerWithParam('client'),
ArgHandlerWithParam('access-token'),
ArgHandlerWithParam('client-access-token'),
ArgHandlerWithParam('debug-mode'),
ArgHandlerWithParam('preimport'),
# Logging
ArgHandlerWithParam('log-file'),
ArgHandlerWithParam('log-level', int, None),
ArgHandlerBool('server'),
ArgHandlerBool('multiproc'), # Used by PyCharm (reuses connection: ssh tunneling)
ArgHandlerBool('multiprocess'), # Used by PyDev (creates new connection to ide)
ArgHandlerBool('save-signatures'),
ArgHandlerBool('save-threading'),
ArgHandlerBool('save-asyncio'),
ArgHandlerBool('print-in-debugger-startup'),
ArgHandlerBool('cmd-line'),
ArgHandlerBool('module'),
ArgHandlerBool('skip-notify-stdin'),
# The ones below should've been just one setting to specify the protocol, but for compatibility
# reasons they're passed as a flag but are mutually exclusive.
ArgHandlerBool('json-dap'), # Protocol used by ptvsd to communicate with pydevd (a single json message in each read)
ArgHandlerBool('json-dap-http'), # Actual DAP (json messages over http protocol).
ArgHandlerBool('protocol-quoted-line'), # Custom protocol with quoted lines.
ArgHandlerBool('protocol-http'), # Custom protocol with http.
]
ARGV_REP_TO_HANDLER = {}
for handler in ACCEPTED_ARG_HANDLERS:
ARGV_REP_TO_HANDLER[handler.arg_v_rep] = handler
def get_pydevd_file():
import pydevd
f = pydevd.__file__
if f.endswith('.pyc'):
f = f[:-1]
elif f.endswith('$py.class'):
f = f[:-len('$py.class')] + '.py'
return f
def setup_to_argv(setup, skip_names=None):
'''
:param dict setup:
A dict previously gotten from process_command_line.
:param set skip_names:
The names in the setup which shouldn't be converted to argv.
:note: does not handle --file nor --DEBUG.
'''
if skip_names is None:
skip_names = set()
ret = [get_pydevd_file()]
for handler in ACCEPTED_ARG_HANDLERS:
if handler.arg_name in setup and handler.arg_name not in skip_names:
handler.to_argv(ret, setup)
return ret
def process_command_line(argv):
""" parses the arguments.
removes our arguments from the command line """
setup = {}
for handler in ACCEPTED_ARG_HANDLERS:
setup[handler.arg_name] = handler.default_val
setup['file'] = ''
setup['qt-support'] = ''
initial_argv = tuple(argv)
i = 0
del argv[0]
while i < len(argv):
handler = ARGV_REP_TO_HANDLER.get(argv[i])
if handler is not None:
handler.handle_argv(argv, i, setup)
elif argv[i].startswith('--qt-support'):
# The --qt-support is special because we want to keep backward compatibility:
# Previously, just passing '--qt-support' meant that we should use the auto-discovery mode
# whereas now, if --qt-support is passed, it should be passed as --qt-support=<mode>, where
# mode can be one of 'auto', 'none', 'pyqt5', 'pyqt4', 'pyside', 'pyside2'.
if argv[i] == '--qt-support':
setup['qt-support'] = 'auto'
elif argv[i].startswith('--qt-support='):
qt_support = argv[i][len('--qt-support='):]
valid_modes = ('none', 'auto', 'pyqt5', 'pyqt4', 'pyside', 'pyside2')
if qt_support not in valid_modes:
raise ValueError("qt-support mode invalid: " + qt_support)
if qt_support == 'none':
# On none, actually set an empty string to evaluate to False.
setup['qt-support'] = ''
else:
setup['qt-support'] = qt_support
else:
raise ValueError("Unexpected definition for qt-support flag: " + argv[i])
del argv[i]
elif argv[i] == '--file':
# --file is special because it's the last one (so, no handler for it).
del argv[i]
setup['file'] = argv[i]
i = len(argv) # pop out, file is our last argument
elif argv[i] == '--DEBUG':
sys.stderr.write('pydevd: --DEBUG parameter deprecated. Use `--debug-level=3` instead.\n')
else:
raise ValueError("Unexpected option: %s when processing: %s" % (argv[i], initial_argv))
return setup

View File

@ -0,0 +1,346 @@
import time
from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding
from _pydev_bundle._pydev_saved_modules import threading
from _pydevd_bundle import pydevd_xml
from _pydevd_bundle.pydevd_constants import GlobalDebuggerHolder
from _pydevd_bundle.pydevd_constants import get_thread_id
from _pydevd_bundle.pydevd_net_command import NetCommand
from _pydevd_bundle.pydevd_concurrency_analyser.pydevd_thread_wrappers import ObjectWrapper, wrap_attr
import pydevd_file_utils
from _pydev_bundle import pydev_log
import sys
file_system_encoding = getfilesystemencoding()
from urllib.parse import quote
threadingCurrentThread = threading.current_thread
DONT_TRACE_THREADING = ['threading.py', 'pydevd.py']
INNER_METHODS = ['_stop']
INNER_FILES = ['threading.py']
THREAD_METHODS = ['start', '_stop', 'join']
LOCK_METHODS = ['__init__', 'acquire', 'release', '__enter__', '__exit__']
QUEUE_METHODS = ['put', 'get']
# return time since epoch in milliseconds
cur_time = lambda: int(round(time.time() * 1000000))
def get_text_list_for_frame(frame):
# partial copy-paste from make_thread_suspend_str
curFrame = frame
cmdTextList = []
try:
while curFrame:
# print cmdText
myId = str(id(curFrame))
# print "id is ", myId
if curFrame.f_code is None:
break # Iron Python sometimes does not have it!
myName = curFrame.f_code.co_name # method name (if in method) or ? if global
if myName is None:
break # Iron Python sometimes does not have it!
# print "name is ", myName
absolute_filename = pydevd_file_utils.get_abs_path_real_path_and_base_from_frame(curFrame)[0]
my_file, _applied_mapping = pydevd_file_utils.map_file_to_client(absolute_filename)
# print "file is ", my_file
# my_file = inspect.getsourcefile(curFrame) or inspect.getfile(frame)
myLine = str(curFrame.f_lineno)
# print "line is ", myLine
# the variables are all gotten 'on-demand'
# variables = pydevd_xml.frame_vars_to_xml(curFrame.f_locals)
variables = ''
cmdTextList.append('<frame id="%s" name="%s" ' % (myId , pydevd_xml.make_valid_xml_value(myName)))
cmdTextList.append('file="%s" line="%s">' % (quote(my_file, '/>_= \t'), myLine))
cmdTextList.append(variables)
cmdTextList.append("</frame>")
curFrame = curFrame.f_back
except:
pydev_log.exception()
return cmdTextList
def send_concurrency_message(event_class, time, name, thread_id, type, event, file, line, frame, lock_id=0, parent=None):
dbg = GlobalDebuggerHolder.global_dbg
if dbg is None:
return
cmdTextList = ['<xml>']
cmdTextList.append('<' + event_class)
cmdTextList.append(' time="%s"' % pydevd_xml.make_valid_xml_value(str(time)))
cmdTextList.append(' name="%s"' % pydevd_xml.make_valid_xml_value(name))
cmdTextList.append(' thread_id="%s"' % pydevd_xml.make_valid_xml_value(thread_id))
cmdTextList.append(' type="%s"' % pydevd_xml.make_valid_xml_value(type))
if type == "lock":
cmdTextList.append(' lock_id="%s"' % pydevd_xml.make_valid_xml_value(str(lock_id)))
if parent is not None:
cmdTextList.append(' parent="%s"' % pydevd_xml.make_valid_xml_value(parent))
cmdTextList.append(' event="%s"' % pydevd_xml.make_valid_xml_value(event))
cmdTextList.append(' file="%s"' % pydevd_xml.make_valid_xml_value(file))
cmdTextList.append(' line="%s"' % pydevd_xml.make_valid_xml_value(str(line)))
cmdTextList.append('></' + event_class + '>')
cmdTextList += get_text_list_for_frame(frame)
cmdTextList.append('</xml>')
text = ''.join(cmdTextList)
if dbg.writer is not None:
dbg.writer.add_command(NetCommand(145, 0, text))
def log_new_thread(global_debugger, t):
event_time = cur_time() - global_debugger.thread_analyser.start_time
send_concurrency_message("threading_event", event_time, t.name, get_thread_id(t), "thread",
"start", "code_name", 0, None, parent=get_thread_id(t))
class ThreadingLogger:
def __init__(self):
self.start_time = cur_time()
def set_start_time(self, time):
self.start_time = time
def log_event(self, frame):
write_log = False
self_obj = None
if "self" in frame.f_locals:
self_obj = frame.f_locals["self"]
if isinstance(self_obj, threading.Thread) or self_obj.__class__ == ObjectWrapper:
write_log = True
if hasattr(frame, "f_back") and frame.f_back is not None:
back = frame.f_back
if hasattr(back, "f_back") and back.f_back is not None:
back = back.f_back
if "self" in back.f_locals:
if isinstance(back.f_locals["self"], threading.Thread):
write_log = True
try:
if write_log:
t = threadingCurrentThread()
back = frame.f_back
if not back:
return
name, _, back_base = pydevd_file_utils.get_abs_path_real_path_and_base_from_frame(back)
event_time = cur_time() - self.start_time
method_name = frame.f_code.co_name
if isinstance(self_obj, threading.Thread):
if not hasattr(self_obj, "_pydev_run_patched"):
wrap_attr(self_obj, "run")
if (method_name in THREAD_METHODS) and (back_base not in DONT_TRACE_THREADING or \
(method_name in INNER_METHODS and back_base in INNER_FILES)):
thread_id = get_thread_id(self_obj)
name = self_obj.getName()
real_method = frame.f_code.co_name
parent = None
if real_method == "_stop":
if back_base in INNER_FILES and \
back.f_code.co_name == "_wait_for_tstate_lock":
back = back.f_back.f_back
real_method = "stop"
if hasattr(self_obj, "_pydev_join_called"):
parent = get_thread_id(t)
elif real_method == "join":
# join called in the current thread, not in self object
if not self_obj.is_alive():
return
thread_id = get_thread_id(t)
name = t.name
self_obj._pydev_join_called = True
if real_method == "start":
parent = get_thread_id(t)
send_concurrency_message("threading_event", event_time, name, thread_id, "thread",
real_method, back.f_code.co_filename, back.f_lineno, back, parent=parent)
# print(event_time, self_obj.getName(), thread_id, "thread",
# real_method, back.f_code.co_filename, back.f_lineno)
if method_name == "pydev_after_run_call":
if hasattr(frame, "f_back") and frame.f_back is not None:
back = frame.f_back
if hasattr(back, "f_back") and back.f_back is not None:
back = back.f_back
if "self" in back.f_locals:
if isinstance(back.f_locals["self"], threading.Thread):
my_self_obj = frame.f_back.f_back.f_locals["self"]
my_back = frame.f_back.f_back
my_thread_id = get_thread_id(my_self_obj)
send_massage = True
if hasattr(my_self_obj, "_pydev_join_called"):
send_massage = False
# we can't detect stop after join in Python 2 yet
if send_massage:
send_concurrency_message("threading_event", event_time, "Thread", my_thread_id, "thread",
"stop", my_back.f_code.co_filename, my_back.f_lineno, my_back, parent=None)
if self_obj.__class__ == ObjectWrapper:
if back_base in DONT_TRACE_THREADING:
# do not trace methods called from threading
return
back_back_base = pydevd_file_utils.get_abs_path_real_path_and_base_from_frame(back.f_back)[2]
back = back.f_back
if back_back_base in DONT_TRACE_THREADING:
# back_back_base is the file, where the method was called froms
return
if method_name == "__init__":
send_concurrency_message("threading_event", event_time, t.name, get_thread_id(t), "lock",
method_name, back.f_code.co_filename, back.f_lineno, back, lock_id=str(id(frame.f_locals["self"])))
if "attr" in frame.f_locals and \
(frame.f_locals["attr"] in LOCK_METHODS or
frame.f_locals["attr"] in QUEUE_METHODS):
real_method = frame.f_locals["attr"]
if method_name == "call_begin":
real_method += "_begin"
elif method_name == "call_end":
real_method += "_end"
else:
return
if real_method == "release_end":
# do not log release end. Maybe use it later
return
send_concurrency_message("threading_event", event_time, t.name, get_thread_id(t), "lock",
real_method, back.f_code.co_filename, back.f_lineno, back, lock_id=str(id(self_obj)))
if real_method in ("put_end", "get_end"):
# fake release for queue, cause we don't call it directly
send_concurrency_message("threading_event", event_time, t.name, get_thread_id(t), "lock",
"release", back.f_code.co_filename, back.f_lineno, back, lock_id=str(id(self_obj)))
# print(event_time, t.name, get_thread_id(t), "lock",
# real_method, back.f_code.co_filename, back.f_lineno)
except Exception:
pydev_log.exception()
class NameManager:
def __init__(self, name_prefix):
self.tasks = {}
self.last = 0
self.prefix = name_prefix
def get(self, id):
if id not in self.tasks:
self.last += 1
self.tasks[id] = self.prefix + "-" + str(self.last)
return self.tasks[id]
class AsyncioLogger:
def __init__(self):
self.task_mgr = NameManager("Task")
self.coro_mgr = NameManager("Coro")
self.start_time = cur_time()
def get_task_id(self, frame):
asyncio = sys.modules.get('asyncio')
if asyncio is None:
# If asyncio was not imported, there's nothing to be done
# (also fixes issue where multiprocessing is imported due
# to asyncio).
return None
while frame is not None:
if "self" in frame.f_locals:
self_obj = frame.f_locals["self"]
if isinstance(self_obj, asyncio.Task):
method_name = frame.f_code.co_name
if method_name == "_step":
return id(self_obj)
frame = frame.f_back
return None
def log_event(self, frame):
event_time = cur_time() - self.start_time
# Debug loop iterations
# if isinstance(self_obj, asyncio.base_events.BaseEventLoop):
# if method_name == "_run_once":
# print("Loop iteration")
if not hasattr(frame, "f_back") or frame.f_back is None:
return
asyncio = sys.modules.get('asyncio')
if asyncio is None:
# If asyncio was not imported, there's nothing to be done
# (also fixes issue where multiprocessing is imported due
# to asyncio).
return
back = frame.f_back
if "self" in frame.f_locals:
self_obj = frame.f_locals["self"]
if isinstance(self_obj, asyncio.Task):
method_name = frame.f_code.co_name
if method_name == "set_result":
task_id = id(self_obj)
task_name = self.task_mgr.get(str(task_id))
send_concurrency_message("asyncio_event", event_time, task_name, task_name, "thread", "stop", frame.f_code.co_filename,
frame.f_lineno, frame)
method_name = back.f_code.co_name
if method_name == "__init__":
task_id = id(self_obj)
task_name = self.task_mgr.get(str(task_id))
send_concurrency_message("asyncio_event", event_time, task_name, task_name, "thread", "start", frame.f_code.co_filename,
frame.f_lineno, frame)
method_name = frame.f_code.co_name
if isinstance(self_obj, asyncio.Lock):
if method_name in ("acquire", "release"):
task_id = self.get_task_id(frame)
task_name = self.task_mgr.get(str(task_id))
if method_name == "acquire":
if not self_obj._waiters and not self_obj.locked():
send_concurrency_message("asyncio_event", event_time, task_name, task_name, "lock",
method_name + "_begin", frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj)))
if self_obj.locked():
method_name += "_begin"
else:
method_name += "_end"
elif method_name == "release":
method_name += "_end"
send_concurrency_message("asyncio_event", event_time, task_name, task_name, "lock",
method_name, frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj)))
if isinstance(self_obj, asyncio.Queue):
if method_name in ("put", "get", "_put", "_get"):
task_id = self.get_task_id(frame)
task_name = self.task_mgr.get(str(task_id))
if method_name == "put":
send_concurrency_message("asyncio_event", event_time, task_name, task_name, "lock",
"acquire_begin", frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj)))
elif method_name == "_put":
send_concurrency_message("asyncio_event", event_time, task_name, task_name, "lock",
"acquire_end", frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj)))
send_concurrency_message("asyncio_event", event_time, task_name, task_name, "lock",
"release", frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj)))
elif method_name == "get":
back = frame.f_back
if back.f_code.co_name != "send":
send_concurrency_message("asyncio_event", event_time, task_name, task_name, "lock",
"acquire_begin", frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj)))
else:
send_concurrency_message("asyncio_event", event_time, task_name, task_name, "lock",
"acquire_end", frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj)))
send_concurrency_message("asyncio_event", event_time, task_name, task_name, "lock",
"release", frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj)))

View File

@ -0,0 +1,83 @@
from _pydev_bundle._pydev_saved_modules import threading
def wrapper(fun):
def pydev_after_run_call():
pass
def inner(*args, **kwargs):
fun(*args, **kwargs)
pydev_after_run_call()
return inner
def wrap_attr(obj, attr):
t_save_start = getattr(obj, attr)
setattr(obj, attr, wrapper(t_save_start))
obj._pydev_run_patched = True
class ObjectWrapper(object):
def __init__(self, obj):
self.wrapped_object = obj
try:
import functools
functools.update_wrapper(self, obj)
except:
pass
def __getattr__(self, attr):
orig_attr = getattr(self.wrapped_object, attr) # .__getattribute__(attr)
if callable(orig_attr):
def patched_attr(*args, **kwargs):
self.call_begin(attr)
result = orig_attr(*args, **kwargs)
self.call_end(attr)
if result == self.wrapped_object:
return self
return result
return patched_attr
else:
return orig_attr
def call_begin(self, attr):
pass
def call_end(self, attr):
pass
def __enter__(self):
self.call_begin("__enter__")
self.wrapped_object.__enter__()
self.call_end("__enter__")
def __exit__(self, exc_type, exc_val, exc_tb):
self.call_begin("__exit__")
self.wrapped_object.__exit__(exc_type, exc_val, exc_tb)
def factory_wrapper(fun):
def inner(*args, **kwargs):
obj = fun(*args, **kwargs)
return ObjectWrapper(obj)
return inner
def wrap_threads():
# TODO: add wrappers for thread and _thread
# import _thread as mod
# print("Thread imported")
# mod.start_new_thread = wrapper(mod.start_new_thread)
threading.Lock = factory_wrapper(threading.Lock)
threading.RLock = factory_wrapper(threading.RLock)
# queue patching
import queue # @UnresolvedImport
queue.Queue = factory_wrapper(queue.Queue)

View File

@ -0,0 +1,270 @@
'''An helper file for the pydev debugger (REPL) console
'''
import sys
import traceback
from _pydevd_bundle.pydevconsole_code import InteractiveConsole, _EvalAwaitInNewEventLoop
from _pydev_bundle import _pydev_completer
from _pydev_bundle.pydev_console_utils import BaseInterpreterInterface, BaseStdIn
from _pydev_bundle.pydev_imports import Exec
from _pydev_bundle.pydev_override import overrides
from _pydevd_bundle import pydevd_save_locals
from _pydevd_bundle.pydevd_io import IOBuf
from pydevd_tracing import get_exception_traceback_str
from _pydevd_bundle.pydevd_xml import make_valid_xml_value
import inspect
from _pydevd_bundle.pydevd_save_locals import update_globals_and_locals
CONSOLE_OUTPUT = "output"
CONSOLE_ERROR = "error"
#=======================================================================================================================
# ConsoleMessage
#=======================================================================================================================
class ConsoleMessage:
"""Console Messages
"""
def __init__(self):
self.more = False
# List of tuple [('error', 'error_message'), ('message_list', 'output_message')]
self.console_messages = []
def add_console_message(self, message_type, message):
"""add messages in the console_messages list
"""
for m in message.split("\n"):
if m.strip():
self.console_messages.append((message_type, m))
def update_more(self, more):
"""more is set to true if further input is required from the user
else more is set to false
"""
self.more = more
def to_xml(self):
"""Create an XML for console message_list, error and more (true/false)
<xml>
<message_list>console message_list</message_list>
<error>console error</error>
<more>true/false</more>
</xml>
"""
makeValid = make_valid_xml_value
xml = '<xml><more>%s</more>' % (self.more)
for message_type, message in self.console_messages:
xml += '<%s message="%s"></%s>' % (message_type, makeValid(message), message_type)
xml += '</xml>'
return xml
#=======================================================================================================================
# _DebugConsoleStdIn
#=======================================================================================================================
class _DebugConsoleStdIn(BaseStdIn):
@overrides(BaseStdIn.readline)
def readline(self, *args, **kwargs):
sys.stderr.write('Warning: Reading from stdin is still not supported in this console.\n')
return '\n'
#=======================================================================================================================
# DebugConsole
#=======================================================================================================================
class DebugConsole(InteractiveConsole, BaseInterpreterInterface):
"""Wrapper around code.InteractiveConsole, in order to send
errors and outputs to the debug console
"""
@overrides(BaseInterpreterInterface.create_std_in)
def create_std_in(self, *args, **kwargs):
try:
if not self.__buffer_output:
return sys.stdin
except:
pass
return _DebugConsoleStdIn() # If buffered, raw_input is not supported in this console.
@overrides(InteractiveConsole.push)
def push(self, line, frame, buffer_output=True):
"""Change built-in stdout and stderr methods by the
new custom StdMessage.
execute the InteractiveConsole.push.
Change the stdout and stderr back be the original built-ins
:param buffer_output: if False won't redirect the output.
Return boolean (True if more input is required else False),
output_messages and input_messages
"""
self.__buffer_output = buffer_output
more = False
if buffer_output:
original_stdout = sys.stdout
original_stderr = sys.stderr
try:
try:
self.frame = frame
if buffer_output:
out = sys.stdout = IOBuf()
err = sys.stderr = IOBuf()
more = self.add_exec(line)
except Exception:
exc = get_exception_traceback_str()
if buffer_output:
err.buflist.append("Internal Error: %s" % (exc,))
else:
sys.stderr.write("Internal Error: %s\n" % (exc,))
finally:
# Remove frame references.
self.frame = None
frame = None
if buffer_output:
sys.stdout = original_stdout
sys.stderr = original_stderr
if buffer_output:
return more, out.buflist, err.buflist
else:
return more, [], []
@overrides(BaseInterpreterInterface.do_add_exec)
def do_add_exec(self, line):
return InteractiveConsole.push(self, line)
@overrides(InteractiveConsole.runcode)
def runcode(self, code):
"""Execute a code object.
When an exception occurs, self.showtraceback() is called to
display a traceback. All exceptions are caught except
SystemExit, which is reraised.
A note about KeyboardInterrupt: this exception may occur
elsewhere in this code, and may not always be caught. The
caller should be prepared to deal with it.
"""
try:
updated_globals = self.get_namespace()
initial_globals = updated_globals.copy()
updated_locals = None
is_async = False
if hasattr(inspect, 'CO_COROUTINE'):
is_async = inspect.CO_COROUTINE & code.co_flags == inspect.CO_COROUTINE
if is_async:
t = _EvalAwaitInNewEventLoop(code, updated_globals, updated_locals)
t.start()
t.join()
update_globals_and_locals(updated_globals, initial_globals, self.frame)
if t.exc:
raise t.exc[1].with_traceback(t.exc[2])
else:
try:
exec(code, updated_globals, updated_locals)
finally:
update_globals_and_locals(updated_globals, initial_globals, self.frame)
except SystemExit:
raise
except:
# In case sys.excepthook called, use original excepthook #PyDev-877: Debug console freezes with Python 3.5+
# (showtraceback does it on python 3.5 onwards)
sys.excepthook = sys.__excepthook__
try:
self.showtraceback()
finally:
sys.__excepthook__ = sys.excepthook
def get_namespace(self):
dbg_namespace = {}
dbg_namespace.update(self.frame.f_globals)
dbg_namespace.update(self.frame.f_locals) # locals later because it has precedence over the actual globals
return dbg_namespace
#=======================================================================================================================
# InteractiveConsoleCache
#=======================================================================================================================
class InteractiveConsoleCache:
thread_id = None
frame_id = None
interactive_console_instance = None
# Note: On Jython 2.1 we can't use classmethod or staticmethod, so, just make the functions below free-functions.
def get_interactive_console(thread_id, frame_id, frame, console_message):
"""returns the global interactive console.
interactive console should have been initialized by this time
:rtype: DebugConsole
"""
if InteractiveConsoleCache.thread_id == thread_id and InteractiveConsoleCache.frame_id == frame_id:
return InteractiveConsoleCache.interactive_console_instance
InteractiveConsoleCache.interactive_console_instance = DebugConsole()
InteractiveConsoleCache.thread_id = thread_id
InteractiveConsoleCache.frame_id = frame_id
console_stacktrace = traceback.extract_stack(frame, limit=1)
if console_stacktrace:
current_context = console_stacktrace[0] # top entry from stacktrace
context_message = 'File "%s", line %s, in %s' % (current_context[0], current_context[1], current_context[2])
console_message.add_console_message(CONSOLE_OUTPUT, "[Current context]: %s" % (context_message,))
return InteractiveConsoleCache.interactive_console_instance
def clear_interactive_console():
InteractiveConsoleCache.thread_id = None
InteractiveConsoleCache.frame_id = None
InteractiveConsoleCache.interactive_console_instance = None
def execute_console_command(frame, thread_id, frame_id, line, buffer_output=True):
"""fetch an interactive console instance from the cache and
push the received command to the console.
create and return an instance of console_message
"""
console_message = ConsoleMessage()
interpreter = get_interactive_console(thread_id, frame_id, frame, console_message)
more, output_messages, error_messages = interpreter.push(line, frame, buffer_output)
console_message.update_more(more)
for message in output_messages:
console_message.add_console_message(CONSOLE_OUTPUT, message)
for message in error_messages:
console_message.add_console_message(CONSOLE_ERROR, message)
return console_message
def get_description(frame, thread_id, frame_id, expression):
console_message = ConsoleMessage()
interpreter = get_interactive_console(thread_id, frame_id, frame, console_message)
try:
interpreter.frame = frame
return interpreter.getDescription(expression)
finally:
interpreter.frame = None
def get_completions(frame, act_tok):
""" fetch all completions, create xml for the same
return the completions xml
"""
return _pydev_completer.generate_completions_as_xml(frame, act_tok)

View File

@ -0,0 +1,807 @@
'''
This module holds the constants used for specifying the states of the debugger.
'''
from __future__ import nested_scopes
import platform
import weakref
import struct
import warnings
import functools
from contextlib import contextmanager
STATE_RUN = 1
STATE_SUSPEND = 2
PYTHON_SUSPEND = 1
DJANGO_SUSPEND = 2
JINJA2_SUSPEND = 3
int_types = (int,)
# types does not include a MethodWrapperType
try:
MethodWrapperType = type([].__str__)
except:
MethodWrapperType = None
import sys # Note: the sys import must be here anyways (others depend on it)
# Preload codecs to avoid imports to them later on which can potentially halt the debugger.
import codecs as _codecs
for _codec in ["ascii", "utf8", "utf-8", "latin1", "latin-1", "idna"]:
_codecs.lookup(_codec)
class DebugInfoHolder:
# we have to put it here because it can be set through the command line (so, the
# already imported references would not have it).
# General information
DEBUG_TRACE_LEVEL = 0 # 0 = critical, 1 = info, 2 = debug, 3 = verbose
PYDEVD_DEBUG_FILE = None
# Any filename that starts with these strings is not traced nor shown to the user.
# In Python 3.7 "<frozen ..." appears multiple times during import and should be ignored for the user.
# In PyPy "<builtin> ..." can appear and should be ignored for the user.
# <attrs is used internally by attrs
# <__array_function__ is used by numpy
IGNORE_BASENAMES_STARTING_WITH = ('<frozen ', '<builtin', '<attrs', '<__array_function__')
# Note: <string> has special heuristics to know whether it should be traced or not (it's part of
# user code when it's the <string> used in python -c and part of the library otherwise).
# Any filename that starts with these strings is considered user (project) code. Note
# that files for which we have a source mapping are also considered as a part of the project.
USER_CODE_BASENAMES_STARTING_WITH = ('<ipython',)
# Any filename that starts with these strings is considered library code (note: checked after USER_CODE_BASENAMES_STARTING_WITH).
LIBRARY_CODE_BASENAMES_STARTING_WITH = ('<',)
IS_CPYTHON = platform.python_implementation() == 'CPython'
# Hold a reference to the original _getframe (because psyco will change that as soon as it's imported)
IS_IRONPYTHON = sys.platform == 'cli'
try:
get_frame = sys._getframe
if IS_IRONPYTHON:
def get_frame():
try:
return sys._getframe()
except ValueError:
pass
except AttributeError:
def get_frame():
raise AssertionError('sys._getframe not available (possible causes: enable -X:Frames on IronPython?)')
# Used to determine the maximum size of each variable passed to eclipse -- having a big value here may make
# the communication slower -- as the variables are being gathered lazily in the latest version of eclipse,
# this value was raised from 200 to 1000.
MAXIMUM_VARIABLE_REPRESENTATION_SIZE = 1000
# Prefix for saving functions return values in locals
RETURN_VALUES_DICT = '__pydevd_ret_val_dict'
GENERATED_LEN_ATTR_NAME = 'len()'
import os
from _pydevd_bundle import pydevd_vm_type
# Constant detects when running on Jython/windows properly later on.
IS_WINDOWS = sys.platform == 'win32'
IS_LINUX = sys.platform in ('linux', 'linux2')
IS_MAC = sys.platform == 'darwin'
IS_WASM = sys.platform == 'emscripten' or sys.platform == 'wasi'
IS_64BIT_PROCESS = sys.maxsize > (2 ** 32)
IS_JYTHON = pydevd_vm_type.get_vm_type() == pydevd_vm_type.PydevdVmType.JYTHON
IS_PYPY = platform.python_implementation() == 'PyPy'
if IS_JYTHON:
import java.lang.System # @UnresolvedImport
IS_WINDOWS = java.lang.System.getProperty("os.name").lower().startswith("windows")
USE_CUSTOM_SYS_CURRENT_FRAMES = not hasattr(sys, '_current_frames') or IS_PYPY
USE_CUSTOM_SYS_CURRENT_FRAMES_MAP = USE_CUSTOM_SYS_CURRENT_FRAMES and (IS_PYPY or IS_IRONPYTHON)
if USE_CUSTOM_SYS_CURRENT_FRAMES:
# Some versions of Jython don't have it (but we can provide a replacement)
if IS_JYTHON:
from java.lang import NoSuchFieldException
from org.python.core import ThreadStateMapping
try:
cachedThreadState = ThreadStateMapping.getDeclaredField('globalThreadStates') # Dev version
except NoSuchFieldException:
cachedThreadState = ThreadStateMapping.getDeclaredField('cachedThreadState') # Release Jython 2.7.0
cachedThreadState.accessible = True
thread_states = cachedThreadState.get(ThreadStateMapping)
def _current_frames():
as_array = thread_states.entrySet().toArray()
ret = {}
for thread_to_state in as_array:
thread = thread_to_state.getKey()
if thread is None:
continue
thread_state = thread_to_state.getValue()
if thread_state is None:
continue
frame = thread_state.frame
if frame is None:
continue
ret[thread.getId()] = frame
return ret
elif USE_CUSTOM_SYS_CURRENT_FRAMES_MAP:
constructed_tid_to_last_frame = {}
# IronPython doesn't have it. Let's use our workaround...
def _current_frames():
return constructed_tid_to_last_frame
else:
raise RuntimeError('Unable to proceed (sys._current_frames not available in this Python implementation).')
else:
_current_frames = sys._current_frames
IS_PYTHON_STACKLESS = "stackless" in sys.version.lower()
CYTHON_SUPPORTED = False
python_implementation = platform.python_implementation()
if python_implementation == 'CPython':
# Only available for CPython!
CYTHON_SUPPORTED = True
#=======================================================================================================================
# Python 3?
#=======================================================================================================================
IS_PY36_OR_GREATER = sys.version_info >= (3, 6)
IS_PY37_OR_GREATER = sys.version_info >= (3, 7)
IS_PY38_OR_GREATER = sys.version_info >= (3, 8)
IS_PY39_OR_GREATER = sys.version_info >= (3, 9)
IS_PY310_OR_GREATER = sys.version_info >= (3, 10)
IS_PY311_OR_GREATER = sys.version_info >= (3, 11)
def version_str(v):
return '.'.join((str(x) for x in v[:3])) + ''.join((str(x) for x in v[3:]))
PY_VERSION_STR = version_str(sys.version_info)
try:
PY_IMPL_VERSION_STR = version_str(sys.implementation.version)
except AttributeError:
PY_IMPL_VERSION_STR = ''
try:
PY_IMPL_NAME = sys.implementation.name
except AttributeError:
PY_IMPL_NAME = ''
ENV_TRUE_LOWER_VALUES = ('yes', 'true', '1')
ENV_FALSE_LOWER_VALUES = ('no', 'false', '0')
def is_true_in_env(env_key):
if isinstance(env_key, tuple):
# If a tuple, return True if any of those ends up being true.
for v in env_key:
if is_true_in_env(v):
return True
return False
else:
return os.getenv(env_key, '').lower() in ENV_TRUE_LOWER_VALUES
def as_float_in_env(env_key, default):
value = os.getenv(env_key)
if value is None:
return default
try:
return float(value)
except Exception:
raise RuntimeError(
'Error: expected the env variable: %s to be set to a float value. Found: %s' % (
env_key, value))
def as_int_in_env(env_key, default):
value = os.getenv(env_key)
if value is None:
return default
try:
return int(value)
except Exception:
raise RuntimeError(
'Error: expected the env variable: %s to be set to a int value. Found: %s' % (
env_key, value))
# If true in env, use gevent mode.
SUPPORT_GEVENT = is_true_in_env('GEVENT_SUPPORT')
# Opt-in support to show gevent paused greenlets. False by default because if too many greenlets are
# paused the UI can slow-down (i.e.: if 1000 greenlets are paused, each one would be shown separate
# as a different thread, but if the UI isn't optimized for that the experience is lacking...).
GEVENT_SHOW_PAUSED_GREENLETS = is_true_in_env('GEVENT_SHOW_PAUSED_GREENLETS')
DISABLE_FILE_VALIDATION = is_true_in_env('PYDEVD_DISABLE_FILE_VALIDATION')
GEVENT_SUPPORT_NOT_SET_MSG = os.getenv(
'GEVENT_SUPPORT_NOT_SET_MSG',
'It seems that the gevent monkey-patching is being used.\n'
'Please set an environment variable with:\n'
'GEVENT_SUPPORT=True\n'
'to enable gevent support in the debugger.'
)
USE_LIB_COPY = SUPPORT_GEVENT
INTERACTIVE_MODE_AVAILABLE = sys.platform in ('darwin', 'win32') or os.getenv('DISPLAY') is not None
# If true in env, forces cython to be used (raises error if not available).
# If false in env, disables it.
# If not specified, uses default heuristic to determine if it should be loaded.
USE_CYTHON_FLAG = os.getenv('PYDEVD_USE_CYTHON')
if USE_CYTHON_FLAG is not None:
USE_CYTHON_FLAG = USE_CYTHON_FLAG.lower()
if USE_CYTHON_FLAG not in ENV_TRUE_LOWER_VALUES and USE_CYTHON_FLAG not in ENV_FALSE_LOWER_VALUES:
raise RuntimeError('Unexpected value for PYDEVD_USE_CYTHON: %s (enable with one of: %s, disable with one of: %s)' % (
USE_CYTHON_FLAG, ENV_TRUE_LOWER_VALUES, ENV_FALSE_LOWER_VALUES))
else:
if not CYTHON_SUPPORTED:
USE_CYTHON_FLAG = 'no'
# If true in env, forces frame eval to be used (raises error if not available).
# If false in env, disables it.
# If not specified, uses default heuristic to determine if it should be loaded.
PYDEVD_USE_FRAME_EVAL = os.getenv('PYDEVD_USE_FRAME_EVAL', '').lower()
# Values used to determine how much container items will be shown.
# PYDEVD_CONTAINER_INITIAL_EXPANDED_ITEMS:
# - Defines how many items will appear initially expanded after which a 'more...' will appear.
#
# PYDEVD_CONTAINER_BUCKET_SIZE
# - Defines the size of each bucket inside the 'more...' item
# i.e.: a bucket with size == 2 would show items such as:
# - [2:4]
# - [4:6]
# ...
#
# PYDEVD_CONTAINER_RANDOM_ACCESS_MAX_ITEMS
# - Defines the maximum number of items for dicts and sets.
#
PYDEVD_CONTAINER_INITIAL_EXPANDED_ITEMS = as_int_in_env('PYDEVD_CONTAINER_INITIAL_EXPANDED_ITEMS', 100)
PYDEVD_CONTAINER_BUCKET_SIZE = as_int_in_env('PYDEVD_CONTAINER_BUCKET_SIZE', 1000)
PYDEVD_CONTAINER_RANDOM_ACCESS_MAX_ITEMS = as_int_in_env('PYDEVD_CONTAINER_RANDOM_ACCESS_MAX_ITEMS', 500)
PYDEVD_CONTAINER_NUMPY_MAX_ITEMS = as_int_in_env('PYDEVD_CONTAINER_NUMPY_MAX_ITEMS', 500)
PYDEVD_IPYTHON_COMPATIBLE_DEBUGGING = is_true_in_env('PYDEVD_IPYTHON_COMPATIBLE_DEBUGGING')
# If specified in PYDEVD_IPYTHON_CONTEXT it must be a string with the basename
# and then the name of 2 methods in which the evaluate is done.
PYDEVD_IPYTHON_CONTEXT = ('interactiveshell.py', 'run_code', 'run_ast_nodes')
_ipython_ctx = os.getenv('PYDEVD_IPYTHON_CONTEXT')
if _ipython_ctx:
PYDEVD_IPYTHON_CONTEXT = tuple(x.strip() for x in _ipython_ctx.split(','))
assert len(PYDEVD_IPYTHON_CONTEXT) == 3, 'Invalid PYDEVD_IPYTHON_CONTEXT: %s' % (_ipython_ctx,)
# Use to disable loading the lib to set tracing to all threads (default is using heuristics based on where we're running).
LOAD_NATIVE_LIB_FLAG = os.getenv('PYDEVD_LOAD_NATIVE_LIB', '').lower()
LOG_TIME = os.getenv('PYDEVD_LOG_TIME', 'true').lower() in ENV_TRUE_LOWER_VALUES
SHOW_COMPILE_CYTHON_COMMAND_LINE = is_true_in_env('PYDEVD_SHOW_COMPILE_CYTHON_COMMAND_LINE')
LOAD_VALUES_ASYNC = is_true_in_env('PYDEVD_LOAD_VALUES_ASYNC')
DEFAULT_VALUE = "__pydevd_value_async"
ASYNC_EVAL_TIMEOUT_SEC = 60
NEXT_VALUE_SEPARATOR = "__pydev_val__"
BUILTINS_MODULE_NAME = 'builtins'
# Pandas customization.
PANDAS_MAX_ROWS = as_int_in_env('PYDEVD_PANDAS_MAX_ROWS', 60)
PANDAS_MAX_COLS = as_int_in_env('PYDEVD_PANDAS_MAX_COLS', 10)
PANDAS_MAX_COLWIDTH = as_int_in_env('PYDEVD_PANDAS_MAX_COLWIDTH', 50)
# If getting an attribute or computing some value is too slow, let the user know if the given timeout elapses.
PYDEVD_WARN_SLOW_RESOLVE_TIMEOUT = as_float_in_env('PYDEVD_WARN_SLOW_RESOLVE_TIMEOUT', 0.50)
# This timeout is used to track the time to send a message saying that the evaluation
# is taking too long and possible mitigations.
PYDEVD_WARN_EVALUATION_TIMEOUT = as_float_in_env('PYDEVD_WARN_EVALUATION_TIMEOUT', 3.)
# If True in env shows a thread dump when the evaluation times out.
PYDEVD_THREAD_DUMP_ON_WARN_EVALUATION_TIMEOUT = is_true_in_env('PYDEVD_THREAD_DUMP_ON_WARN_EVALUATION_TIMEOUT')
# This timeout is used only when the mode that all threads are stopped/resumed at once is used
# (i.e.: multi_threads_single_notification)
#
# In this mode, if some evaluation doesn't finish until this timeout, we notify the user
# and then resume all threads until the evaluation finishes.
#
# A negative value will disable the timeout and a value of 0 will automatically run all threads
# (without any notification) when the evaluation is started and pause all threads when the
# evaluation is finished. A positive value will run run all threads after the timeout
# elapses.
PYDEVD_UNBLOCK_THREADS_TIMEOUT = as_float_in_env('PYDEVD_UNBLOCK_THREADS_TIMEOUT', -1.)
# Timeout to interrupt a thread (so, if some evaluation doesn't finish until this
# timeout, the thread doing the evaluation is interrupted).
# A value <= 0 means this is disabled.
# See: _pydevd_bundle.pydevd_timeout.create_interrupt_this_thread_callback for details
# on how the thread interruption works (there are some caveats related to it).
PYDEVD_INTERRUPT_THREAD_TIMEOUT = as_float_in_env('PYDEVD_INTERRUPT_THREAD_TIMEOUT', -1)
# If PYDEVD_APPLY_PATCHING_TO_HIDE_PYDEVD_THREADS is set to False, the patching to hide pydevd threads won't be applied.
PYDEVD_APPLY_PATCHING_TO_HIDE_PYDEVD_THREADS = os.getenv('PYDEVD_APPLY_PATCHING_TO_HIDE_PYDEVD_THREADS', 'true').lower() in ENV_TRUE_LOWER_VALUES
EXCEPTION_TYPE_UNHANDLED = 'UNHANDLED'
EXCEPTION_TYPE_USER_UNHANDLED = 'USER_UNHANDLED'
EXCEPTION_TYPE_HANDLED = 'HANDLED'
SHOW_DEBUG_INFO_ENV = is_true_in_env(('PYCHARM_DEBUG', 'PYDEV_DEBUG', 'PYDEVD_DEBUG'))
if SHOW_DEBUG_INFO_ENV:
# show debug info before the debugger start
DebugInfoHolder.DEBUG_TRACE_LEVEL = 3
DebugInfoHolder.PYDEVD_DEBUG_FILE = os.getenv('PYDEVD_DEBUG_FILE')
def protect_libraries_from_patching():
"""
In this function we delete some modules from `sys.modules` dictionary and import them again inside
`_pydev_saved_modules` in order to save their original copies there. After that we can use these
saved modules within the debugger to protect them from patching by external libraries (e.g. gevent).
"""
patched = ['threading', 'thread', '_thread', 'time', 'socket', 'queue', 'select',
'xmlrpclib', 'SimpleXMLRPCServer', 'BaseHTTPServer', 'SocketServer',
'xmlrpc.client', 'xmlrpc.server', 'http.server', 'socketserver']
for name in patched:
try:
__import__(name)
except:
pass
patched_modules = dict([(k, v) for k, v in sys.modules.items()
if k in patched])
for name in patched_modules:
del sys.modules[name]
# import for side effects
import _pydev_bundle._pydev_saved_modules
for name in patched_modules:
sys.modules[name] = patched_modules[name]
if USE_LIB_COPY:
protect_libraries_from_patching()
from _pydev_bundle._pydev_saved_modules import thread, threading
_fork_safe_locks = []
if IS_JYTHON:
def ForkSafeLock(rlock=False):
if rlock:
return threading.RLock()
else:
return threading.Lock()
else:
class ForkSafeLock(object):
'''
A lock which is fork-safe (when a fork is done, `pydevd_constants.after_fork()`
should be called to reset the locks in the new process to avoid deadlocks
from a lock which was locked during the fork).
Note:
Unlike `threading.Lock` this class is not completely atomic, so, doing:
lock = ForkSafeLock()
with lock:
...
is different than using `threading.Lock` directly because the tracing may
find an additional function call on `__enter__` and on `__exit__`, so, it's
not recommended to use this in all places, only where the forking may be important
(so, for instance, the locks on PyDB should not be changed to this lock because
of that -- and those should all be collected in the new process because PyDB itself
should be completely cleared anyways).
It's possible to overcome this limitation by using `ForkSafeLock.acquire` and
`ForkSafeLock.release` instead of the context manager (as acquire/release are
bound to the original implementation, whereas __enter__/__exit__ is not due to Python
limitations).
'''
def __init__(self, rlock=False):
self._rlock = rlock
self._init()
_fork_safe_locks.append(weakref.ref(self))
def __enter__(self):
return self._lock.__enter__()
def __exit__(self, exc_type, exc_val, exc_tb):
return self._lock.__exit__(exc_type, exc_val, exc_tb)
def _init(self):
if self._rlock:
self._lock = threading.RLock()
else:
self._lock = thread.allocate_lock()
self.acquire = self._lock.acquire
self.release = self._lock.release
_fork_safe_locks.append(weakref.ref(self))
def after_fork():
'''
Must be called after a fork operation (will reset the ForkSafeLock).
'''
global _fork_safe_locks
locks = _fork_safe_locks[:]
_fork_safe_locks = []
for lock in locks:
lock = lock()
if lock is not None:
lock._init()
_thread_id_lock = ForkSafeLock()
thread_get_ident = thread.get_ident
def as_str(s):
assert isinstance(s, str)
return s
@contextmanager
def filter_all_warnings():
with warnings.catch_warnings():
warnings.filterwarnings("ignore")
yield
def silence_warnings_decorator(func):
@functools.wraps(func)
def new_func(*args, **kwargs):
with filter_all_warnings():
return func(*args, **kwargs)
return new_func
def sorted_dict_repr(d):
s = sorted(d.items(), key=lambda x:str(x[0]))
return '{' + ', '.join(('%r: %r' % x) for x in s) + '}'
def iter_chars(b):
# In Python 2, we can iterate bytes or str with individual characters, but Python 3 onwards
# changed that behavior so that when iterating bytes we actually get ints!
if isinstance(b, bytes):
# i.e.: do something as struct.unpack('3c', b)
return iter(struct.unpack(str(len(b)) + 'c', b))
return iter(b)
if IS_JYTHON:
def NO_FTRACE(frame, event, arg):
return None
else:
_curr_trace = sys.gettrace()
# Set a temporary trace which does nothing for us to test (otherwise setting frame.f_trace has no
# effect).
def _temp_trace(frame, event, arg):
return None
sys.settrace(_temp_trace)
def _check_ftrace_set_none():
'''
Will throw an error when executing a line event
'''
sys._getframe().f_trace = None
_line_event = 1
_line_event = 2
try:
_check_ftrace_set_none()
def NO_FTRACE(frame, event, arg):
frame.f_trace = None
return None
except TypeError:
def NO_FTRACE(frame, event, arg):
# In Python <= 2.6 and <= 3.4, if we're tracing a method, frame.f_trace may not be set
# to None, it must always be set to a tracing function.
# See: tests_python.test_tracing_gotchas.test_tracing_gotchas
#
# Note: Python 2.7 sometimes works and sometimes it doesn't depending on the minor
# version because of https://bugs.python.org/issue20041 (although bug reports didn't
# include the minor version, so, mark for any Python 2.7 as I'm not completely sure
# the fix in later 2.7 versions is the same one we're dealing with).
return None
sys.settrace(_curr_trace)
#=======================================================================================================================
# get_pid
#=======================================================================================================================
def get_pid():
try:
return os.getpid()
except AttributeError:
try:
# Jython does not have it!
import java.lang.management.ManagementFactory # @UnresolvedImport -- just for jython
pid = java.lang.management.ManagementFactory.getRuntimeMXBean().getName()
return pid.replace('@', '_')
except:
# ok, no pid available (will be unable to debug multiple processes)
return '000001'
def clear_cached_thread_id(thread):
with _thread_id_lock:
try:
if thread.__pydevd_id__ != 'console_main':
# The console_main is a special thread id used in the console and its id should never be reset
# (otherwise we may no longer be able to get its variables -- see: https://www.brainwy.com/tracker/PyDev/776).
del thread.__pydevd_id__
except AttributeError:
pass
# Don't let threads be collected (so that id(thread) is guaranteed to be unique).
_thread_id_to_thread_found = {}
def _get_or_compute_thread_id_with_lock(thread, is_current_thread):
with _thread_id_lock:
# We do a new check with the lock in place just to be sure that nothing changed
tid = getattr(thread, '__pydevd_id__', None)
if tid is not None:
return tid
_thread_id_to_thread_found[id(thread)] = thread
# Note: don't use thread.ident because a new thread may have the
# same id from an old thread.
pid = get_pid()
tid = 'pid_%s_id_%s' % (pid, id(thread))
thread.__pydevd_id__ = tid
return tid
def get_current_thread_id(thread):
'''
Note: the difference from get_current_thread_id to get_thread_id is that
for the current thread we can get the thread id while the thread.ident
is still not set in the Thread instance.
'''
try:
# Fast path without getting lock.
tid = thread.__pydevd_id__
if tid is None:
# Fix for https://www.brainwy.com/tracker/PyDev/645
# if __pydevd_id__ is None, recalculate it... also, use an heuristic
# that gives us always the same id for the thread (using thread.ident or id(thread)).
raise AttributeError()
except AttributeError:
tid = _get_or_compute_thread_id_with_lock(thread, is_current_thread=True)
return tid
def get_thread_id(thread):
try:
# Fast path without getting lock.
tid = thread.__pydevd_id__
if tid is None:
# Fix for https://www.brainwy.com/tracker/PyDev/645
# if __pydevd_id__ is None, recalculate it... also, use an heuristic
# that gives us always the same id for the thread (using thread.ident or id(thread)).
raise AttributeError()
except AttributeError:
tid = _get_or_compute_thread_id_with_lock(thread, is_current_thread=False)
return tid
def set_thread_id(thread, thread_id):
with _thread_id_lock:
thread.__pydevd_id__ = thread_id
#=======================================================================================================================
# Null
#=======================================================================================================================
class Null:
"""
Gotten from: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/68205
"""
def __init__(self, *args, **kwargs):
return None
def __call__(self, *args, **kwargs):
return self
def __enter__(self, *args, **kwargs):
return self
def __exit__(self, *args, **kwargs):
return self
def __getattr__(self, mname):
if len(mname) > 4 and mname[:2] == '__' and mname[-2:] == '__':
# Don't pretend to implement special method names.
raise AttributeError(mname)
return self
def __setattr__(self, name, value):
return self
def __delattr__(self, name):
return self
def __repr__(self):
return "<Null>"
def __str__(self):
return "Null"
def __len__(self):
return 0
def __getitem__(self):
return self
def __setitem__(self, *args, **kwargs):
pass
def write(self, *args, **kwargs):
pass
def __nonzero__(self):
return 0
def __iter__(self):
return iter(())
# Default instance
NULL = Null()
class KeyifyList(object):
def __init__(self, inner, key):
self.inner = inner
self.key = key
def __len__(self):
return len(self.inner)
def __getitem__(self, k):
return self.key(self.inner[k])
def call_only_once(func):
'''
To be used as a decorator
@call_only_once
def func():
print 'Calling func only this time'
Actually, in PyDev it must be called as:
func = call_only_once(func) to support older versions of Python.
'''
def new_func(*args, **kwargs):
if not new_func._called:
new_func._called = True
return func(*args, **kwargs)
new_func._called = False
return new_func
# Protocol where each line is a new message (text is quoted to prevent new lines).
# payload is xml
QUOTED_LINE_PROTOCOL = 'quoted-line'
ARGUMENT_QUOTED_LINE_PROTOCOL = 'protocol-quoted-line'
# Uses http protocol to provide a new message.
# i.e.: Content-Length:xxx\r\n\r\npayload
# payload is xml
HTTP_PROTOCOL = 'http'
ARGUMENT_HTTP_PROTOCOL = 'protocol-http'
# Message is sent without any header.
# payload is json
JSON_PROTOCOL = 'json'
ARGUMENT_JSON_PROTOCOL = 'json-dap'
# Same header as the HTTP_PROTOCOL
# payload is json
HTTP_JSON_PROTOCOL = 'http_json'
ARGUMENT_HTTP_JSON_PROTOCOL = 'json-dap-http'
ARGUMENT_PPID = 'ppid'
class _GlobalSettings:
protocol = QUOTED_LINE_PROTOCOL
def set_protocol(protocol):
expected = (HTTP_PROTOCOL, QUOTED_LINE_PROTOCOL, JSON_PROTOCOL, HTTP_JSON_PROTOCOL)
assert protocol in expected, 'Protocol (%s) should be one of: %s' % (
protocol, expected)
_GlobalSettings.protocol = protocol
def get_protocol():
return _GlobalSettings.protocol
def is_json_protocol():
return _GlobalSettings.protocol in (JSON_PROTOCOL, HTTP_JSON_PROTOCOL)
class GlobalDebuggerHolder:
'''
Holder for the global debugger.
'''
global_dbg = None # Note: don't rename (the name is used in our attach to process)
def get_global_debugger():
return GlobalDebuggerHolder.global_dbg
GetGlobalDebugger = get_global_debugger # Backward-compatibility
def set_global_debugger(dbg):
GlobalDebuggerHolder.global_dbg = dbg
if __name__ == '__main__':
if Null():
sys.stdout.write('here\n')

View File

@ -0,0 +1,116 @@
from _pydevd_bundle.pydevd_constants import get_current_thread_id, Null, ForkSafeLock
from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame
from _pydev_bundle._pydev_saved_modules import thread, threading
import sys
from _pydev_bundle import pydev_log
DEBUG = False
class CustomFramesContainer:
# Actual Values initialized later on.
custom_frames_lock = None # : :type custom_frames_lock: threading.Lock
custom_frames = None
_next_frame_id = None
_py_db_command_thread_event = None
def custom_frames_container_init(): # Note: no staticmethod on jython 2.1 (so, use free-function)
CustomFramesContainer.custom_frames_lock = ForkSafeLock()
# custom_frames can only be accessed if properly locked with custom_frames_lock!
# Key is a string identifying the frame (as well as the thread it belongs to).
# Value is a CustomFrame.
#
CustomFramesContainer.custom_frames = {}
# Only to be used in this module
CustomFramesContainer._next_frame_id = 0
# This is the event we must set to release an internal process events. It's later set by the actual debugger
# when we do create the debugger.
CustomFramesContainer._py_db_command_thread_event = Null()
# Initialize it the first time (it may be reinitialized later on when dealing with a fork).
custom_frames_container_init()
class CustomFrame:
def __init__(self, name, frame, thread_id):
# 0 = string with the representation of that frame
self.name = name
# 1 = the frame to show
self.frame = frame
# 2 = an integer identifying the last time the frame was changed.
self.mod_time = 0
# 3 = the thread id of the given frame
self.thread_id = thread_id
def add_custom_frame(frame, name, thread_id):
'''
It's possible to show paused frames by adding a custom frame through this API (it's
intended to be used for coroutines, but could potentially be used for generators too).
:param frame:
The topmost frame to be shown paused when a thread with thread.ident == thread_id is paused.
:param name:
The name to be shown for the custom thread in the UI.
:param thread_id:
The thread id to which this frame is related (must match thread.ident).
:return: str
Returns the custom thread id which will be used to show the given frame paused.
'''
with CustomFramesContainer.custom_frames_lock:
curr_thread_id = get_current_thread_id(threading.current_thread())
next_id = CustomFramesContainer._next_frame_id = CustomFramesContainer._next_frame_id + 1
# Note: the frame id kept contains an id and thread information on the thread where the frame was added
# so that later on we can check if the frame is from the current thread by doing frame_id.endswith('|'+thread_id).
frame_custom_thread_id = '__frame__:%s|%s' % (next_id, curr_thread_id)
if DEBUG:
sys.stderr.write('add_custom_frame: %s (%s) %s %s\n' % (
frame_custom_thread_id, get_abs_path_real_path_and_base_from_frame(frame)[-1], frame.f_lineno, frame.f_code.co_name))
CustomFramesContainer.custom_frames[frame_custom_thread_id] = CustomFrame(name, frame, thread_id)
CustomFramesContainer._py_db_command_thread_event.set()
return frame_custom_thread_id
def update_custom_frame(frame_custom_thread_id, frame, thread_id, name=None):
with CustomFramesContainer.custom_frames_lock:
if DEBUG:
sys.stderr.write('update_custom_frame: %s\n' % frame_custom_thread_id)
try:
old = CustomFramesContainer.custom_frames[frame_custom_thread_id]
if name is not None:
old.name = name
old.mod_time += 1
old.thread_id = thread_id
except:
sys.stderr.write('Unable to get frame to replace: %s\n' % (frame_custom_thread_id,))
pydev_log.exception()
CustomFramesContainer._py_db_command_thread_event.set()
def remove_custom_frame(frame_custom_thread_id):
with CustomFramesContainer.custom_frames_lock:
if DEBUG:
sys.stderr.write('remove_custom_frame: %s\n' % frame_custom_thread_id)
CustomFramesContainer.custom_frames.pop(frame_custom_thread_id, None)
CustomFramesContainer._py_db_command_thread_event.set()

View File

@ -0,0 +1,27 @@
cdef class PyDBAdditionalThreadInfo:
cdef public int pydev_state
cdef public object pydev_step_stop # Actually, it's a frame or None
cdef public int pydev_original_step_cmd
cdef public int pydev_step_cmd
cdef public bint pydev_notify_kill
cdef public object pydev_smart_step_stop # Actually, it's a frame or None
cdef public bint pydev_django_resolve_frame
cdef public object pydev_call_from_jinja2
cdef public object pydev_call_inside_jinja2
cdef public int is_tracing
cdef public tuple conditional_breakpoint_exception
cdef public str pydev_message
cdef public int suspend_type
cdef public int pydev_next_line
cdef public str pydev_func_name
cdef public bint suspended_at_unhandled
cdef public str trace_suspend_type
cdef public object top_level_thread_tracer_no_back_frames
cdef public object top_level_thread_tracer_unhandled
cdef public object thread_tracer
cdef public object step_in_initial_location
cdef public int pydev_smart_parent_offset
cdef public int pydev_smart_child_offset
cdef public tuple pydev_smart_step_into_variants
cdef public dict target_id_to_smart_step_into_variant
cdef public bint pydev_use_scoped_step_frame

View File

@ -0,0 +1,52 @@
import sys
try:
try:
from _pydevd_bundle_ext import pydevd_cython as mod
except ImportError:
from _pydevd_bundle import pydevd_cython as mod
except ImportError:
import struct
try:
is_python_64bit = (struct.calcsize('P') == 8)
except:
# In Jython this call fails, but this is Ok, we don't support Jython for speedups anyways.
raise ImportError
plat = '32'
if is_python_64bit:
plat = '64'
# We also accept things as:
#
# _pydevd_bundle.pydevd_cython_win32_27_32
# _pydevd_bundle.pydevd_cython_win32_34_64
#
# to have multiple pre-compiled pyds distributed along the IDE
# (generated by build_tools/build_binaries_windows.py).
mod_name = 'pydevd_cython_%s_%s%s_%s' % (sys.platform, sys.version_info[0], sys.version_info[1], plat)
check_name = '_pydevd_bundle.%s' % (mod_name,)
mod = getattr(__import__(check_name), mod_name)
# Regardless of how it was found, make sure it's later available as the
# initial name so that the expected types from cython in frame eval
# are valid.
sys.modules['_pydevd_bundle.pydevd_cython'] = mod
trace_dispatch = mod.trace_dispatch
PyDBAdditionalThreadInfo = mod.PyDBAdditionalThreadInfo
set_additional_thread_info = mod.set_additional_thread_info
global_cache_skips = mod.global_cache_skips
global_cache_frame_skips = mod.global_cache_frame_skips
_set_additional_thread_info_lock = mod._set_additional_thread_info_lock
fix_top_level_trace_and_get_trace_func = mod.fix_top_level_trace_and_get_trace_func
version = getattr(mod, 'version', 0)

Some files were not shown because too many files have changed in this diff Show More