This commit is contained in:
= 2024-07-16 08:29:17 -06:00
parent bb1f22d432
commit d22e7a5acc
2829 changed files with 88 additions and 464484 deletions

View File

@ -2,7 +2,13 @@
set -g default-terminal "screen-256color"
# Disable Right Click Menu
unbind -n MouseDown3Pane
# History
set-option -g history-limit 5000
bind \; command-prompt
# Set ZSH
set -g default-shell /bin/zsh
@ -20,14 +26,35 @@ bind -n M-Right select-pane -R
bind -n M-Up select-pane -U
bind -n M-Down select-pane -D
## Pane Management
# Marking
bind \` switch-client -t'{marked}' # Switch to marked Pane
## Pane/Window Management
# Pane Numbering
set -g base-index 1
setw -g pane-base-index 1
set -g renumber-windows on
unbind % #unbind Horizontal Split
unbind '"' # unbind Vertical Split
bind | split-window -h
# Splits
bind-key "|" split-window -h -c "#{pane_current_path}"
bind-key "\\" split-window -fh -c "#{pane_current_path}"
bind -n IC split-window -h # Split Horizontal with Insert
bind-key "-" split-window -v -c "#{pane_current_path}"
bind-key "_" split-window -fv -c "#{pane_current_path}"
bind j choose-window 'join-pane -h -s "%%"'
bind J choose-window 'join-pane -s "%%"'
bind END kill-pane # Kill active pane
bind Bspace kill-window # Kill Active Window
bind c new-window -c "#{pane_current_path}" # Keep Path
bind Space last-window
bind - split-window -v
@ -36,6 +63,18 @@ bind C-l send-keys 'C-l' # Fix Clear Screen
unbind r
bind r source-file ~/.tmux.conf\; display "Reloaded"
bind-key h split-window -h "vim ~/scratch/notes.md"
# Select Window (Mac)
bind-key -n ¡ select-window -t 1
bind-key -n ™ select-window -t 2
bind-key -n £ select-window -t 3
bind-key -n ¢ select-window -t 4
# Swap Window
bind -r "<" swap-window -d -t -1
bind -r ">" swap-window -d -t +1
set -g mouse on
@ -56,6 +95,7 @@ set -g @plugin 'tmux-plugins/tpm'
# list of tmux plugins
#set -g @plugin 'christoomey/vim-tmux-navigator'
set -g @plugin 'jimeh/tmux-themepack'
set-option -g @plugin 'b0o/tmux-autoreload'
#set -g @plugin 'tmux-plugins/tmux-resurrect'
#set -g @plugin 'tmux-plugins/tmux-continuum'

View File

@ -1 +1 @@
{"hashicorp.terraform-2.31.0-darwin-arm64":true,"ms-python.python-2024.8.0":true,"ms-python.debugpy-2024.6.0-darwin-arm64":true}
{"ms-python.vscode-pylance-2024.6.1":true}

File diff suppressed because one or more lines are too long

View File

@ -1,46 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<PackageManifest Version="2.0.0" xmlns="http://schemas.microsoft.com/developer/vsx-schema/2011" xmlns:d="http://schemas.microsoft.com/developer/vsx-schema-design/2011">
<Metadata>
<Identity Language="en-US" Id="terraform" Version="2.31.0" Publisher="hashicorp" TargetPlatform="darwin-arm64"/>
<DisplayName>HashiCorp Terraform</DisplayName>
<Description xml:space="preserve">Syntax highlighting and autocompletion for Terraform</Description>
<Tags>devops,terraform,hcl,snippet,Terraform,__ext_tf,terraform-vars,__ext_tfvars,terraform-stack,Terraform Stack,__ext_tfstackhcl,terraform-deploy,Terraform Deployment,__ext_tfdeployhcl,json,__ext_tfstate,__web_extension</Tags>
<Categories>Programming Languages,Other,Formatters,Linters</Categories>
<GalleryFlags>Public</GalleryFlags>
<Properties>
<Property Id="Microsoft.VisualStudio.Code.Engine" Value="^1.86.2" />
<Property Id="Microsoft.VisualStudio.Code.ExtensionDependencies" Value="" />
<Property Id="Microsoft.VisualStudio.Code.ExtensionPack" Value="" />
<Property Id="Microsoft.VisualStudio.Code.ExtensionKind" Value="workspace,web" />
<Property Id="Microsoft.VisualStudio.Code.LocalizedLanguages" Value="" />
<Property Id="Microsoft.VisualStudio.Services.Links.Source" Value="https://github.com/hashicorp/vscode-terraform.git" />
<Property Id="Microsoft.VisualStudio.Services.Links.Getstarted" Value="https://github.com/hashicorp/vscode-terraform.git" />
<Property Id="Microsoft.VisualStudio.Services.Links.GitHub" Value="https://github.com/hashicorp/vscode-terraform.git" />
<Property Id="Microsoft.VisualStudio.Services.Links.Support" Value="https://github.com/hashicorp/vscode-terraform/issues" />
<Property Id="Microsoft.VisualStudio.Services.Links.Learn" Value="https://github.com/hashicorp/vscode-terraform#readme" />
<Property Id="Microsoft.VisualStudio.Services.Branding.Color" Value="#FFF" />
<Property Id="Microsoft.VisualStudio.Services.Branding.Theme" Value="light" />
<Property Id="Microsoft.VisualStudio.Services.GitHubFlavoredMarkdown" Value="true" />
<Property Id="Microsoft.VisualStudio.Services.Content.Pricing" Value="Free"/>
<Property Id="Microsoft.VisualStudio.Services.CustomerQnALink" Value="https://discuss.hashicorp.com/c/terraform-core/terraform-editor-integrations/46" />
</Properties>
<License>extension/LICENSE.txt</License>
<Icon>extension/assets/icons/terraform_logo_mark_light_universal.png</Icon>
</Metadata>
<Installation>
<InstallationTarget Id="Microsoft.VisualStudio.Code"/>
</Installation>
<Dependencies/>
<Assets>
<Asset Type="Microsoft.VisualStudio.Code.Manifest" Path="extension/package.json" Addressable="true" />
<Asset Type="Microsoft.VisualStudio.Services.Content.Details" Path="extension/README.md" Addressable="true" />
<Asset Type="Microsoft.VisualStudio.Services.Content.Changelog" Path="extension/CHANGELOG.md" Addressable="true" />
<Asset Type="Microsoft.VisualStudio.Services.Content.License" Path="extension/LICENSE.txt" Addressable="true" />
<Asset Type="Microsoft.VisualStudio.Services.Icons.Default" Path="extension/assets/icons/terraform_logo_mark_light_universal.png" Addressable="true" />
</Assets>
</PackageManifest>

View File

@ -1,355 +0,0 @@
Copyright (c) 2016 HashiCorp, Inc.
Mozilla Public License, version 2.0
1. Definitions
1.1. “Contributor”
means each individual or legal entity that creates, contributes to the
creation of, or owns Covered Software.
1.2. “Contributor Version”
means the combination of the Contributions of others (if any) used by a
Contributor and that particular Contributors Contribution.
1.3. “Contribution”
means Covered Software of a particular Contributor.
1.4. “Covered Software”
means Source Code Form to which the initial Contributor has attached the
notice in Exhibit A, the Executable Form of such Source Code Form, and
Modifications of such Source Code Form, in each case including portions
thereof.
1.5. “Incompatible With Secondary Licenses”
means
a. that the initial Contributor has attached the notice described in
Exhibit B to the Covered Software; or
b. that the Covered Software was made available under the terms of version
1.1 or earlier of the License, but not also under the terms of a
Secondary License.
1.6. “Executable Form”
means any form of the work other than Source Code Form.
1.7. “Larger Work”
means a work that combines Covered Software with other material, in a separate
file or files, that is not Covered Software.
1.8. “License”
means this document.
1.9. “Licensable”
means having the right to grant, to the maximum extent possible, whether at the
time of the initial grant or subsequently, any and all of the rights conveyed by
this License.
1.10. “Modifications”
means any of the following:
a. any file in Source Code Form that results from an addition to, deletion
from, or modification of the contents of Covered Software; or
b. any new file in Source Code Form that contains any Covered Software.
1.11. “Patent Claims” of a Contributor
means any patent claim(s), including without limitation, method, process,
and apparatus claims, in any patent Licensable by such Contributor that
would be infringed, but for the grant of the License, by the making,
using, selling, offering for sale, having made, import, or transfer of
either its Contributions or its Contributor Version.
1.12. “Secondary License”
means either the GNU General Public License, Version 2.0, the GNU Lesser
General Public License, Version 2.1, the GNU Affero General Public
License, Version 3.0, or any later versions of those licenses.
1.13. “Source Code Form”
means the form of the work preferred for making modifications.
1.14. “You” (or “Your”)
means an individual or a legal entity exercising rights under this
License. For legal entities, “You” includes any entity that controls, is
controlled by, or is under common control with You. For purposes of this
definition, “control” means (a) the power, direct or indirect, to cause
the direction or management of such entity, whether by contract or
otherwise, or (b) ownership of more than fifty percent (50%) of the
outstanding shares or beneficial ownership of such entity.
2. License Grants and Conditions
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
a. under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or as
part of a Larger Work; and
b. under Patent Claims of such Contributor to make, use, sell, offer for
sale, have made, import, and otherwise transfer either its Contributions
or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution become
effective for each Contribution on the date the Contributor first distributes
such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under this
License. No additional rights or licenses will be implied from the distribution
or licensing of Covered Software under this License. Notwithstanding Section
2.1(b) above, no patent license is granted by a Contributor:
a. for any code that a Contributor has removed from Covered Software; or
b. for infringements caused by: (i) Your and any other third partys
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
c. under Patent Claims infringed by Covered Software in the absence of its
Contributions.
This License does not grant any rights in the trademarks, service marks, or
logos of any Contributor (except as may be necessary to comply with the
notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this License
(see Section 10.2) or under the terms of a Secondary License (if permitted
under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its Contributions
are its original creation(s) or it has sufficient rights to grant the
rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under applicable
copyright doctrines of fair use, fair dealing, or other equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
Section 2.1.
3. Responsibilities
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under the
terms of this License. You must inform recipients that the Source Code Form
of the Covered Software is governed by the terms of this License, and how
they can obtain a copy of this License. You may not attempt to alter or
restrict the recipients rights in the Source Code Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
a. such Covered Software must also be made available in Source Code Form,
as described in Section 3.1, and You must inform recipients of the
Executable Form how they can obtain a copy of such Source Code Form by
reasonable means in a timely manner, at a charge no more than the cost
of distribution to the recipient; and
b. You may distribute such Executable Form under the terms of this License,
or sublicense it under different terms, provided that the license for
the Executable Form does not attempt to limit or alter the recipients
rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for the
Covered Software. If the Larger Work is a combination of Covered Software
with a work governed by one or more Secondary Licenses, and the Covered
Software is not Incompatible With Secondary Licenses, this License permits
You to additionally distribute such Covered Software under the terms of
such Secondary License(s), so that the recipient of the Larger Work may, at
their option, further distribute the Covered Software under the terms of
either this License or such Secondary License(s).
3.4. Notices
You may not remove or alter the substance of any license notices (including
copyright notices, patent notices, disclaimers of warranty, or limitations
of liability) contained within the Source Code Form of the Covered
Software, except that You may alter any license notices to the extent
required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on behalf
of any Contributor. You must make it absolutely clear that any such
warranty, support, indemnity, or liability obligation is offered by You
alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
If it is impossible for You to comply with any of the terms of this License
with respect to some or all of the Covered Software due to statute, judicial
order, or regulation then You must: (a) comply with the terms of this License
to the maximum extent possible; and (b) describe the limitations and the code
they affect. Such description must be placed in a text file included with all
distributions of the Covered Software under this License. Except to the
extent prohibited by statute or regulation, such description must be
sufficiently detailed for a recipient of ordinary skill to be able to
understand it.
5. Termination
5.1. The rights granted under this License will terminate automatically if You
fail to comply with any of its terms. However, if You become compliant,
then the rights granted under this License from a particular Contributor
are reinstated (a) provisionally, unless and until such Contributor
explicitly and finally terminates Your grants, and (b) on an ongoing basis,
if such Contributor fails to notify You of the non-compliance by some
reasonable means prior to 60 days after You have come back into compliance.
Moreover, Your grants from a particular Contributor are reinstated on an
ongoing basis if such Contributor notifies You of the non-compliance by
some reasonable means, this is the first time You have received notice of
non-compliance with this License from such Contributor, and You become
compliant prior to 30 days after Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions, counter-claims,
and cross-claims) alleging that a Contributor Version directly or
indirectly infringes any patent, then the rights granted to You by any and
all Contributors for the Covered Software under Section 2.1 of this License
shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
license agreements (excluding distributors and resellers) which have been
validly granted by You or Your distributors under this License prior to
termination shall survive termination.
6. Disclaimer of Warranty
Covered Software is provided under this License on an “as is” basis, without
warranty of any kind, either expressed, implied, or statutory, including,
without limitation, warranties that the Covered Software is free of defects,
merchantable, fit for a particular purpose or non-infringing. The entire
risk as to the quality and performance of the Covered Software is with You.
Should any Covered Software prove defective in any respect, You (not any
Contributor) assume the cost of any necessary servicing, repair, or
correction. This disclaimer of warranty constitutes an essential part of this
License. No use of any Covered Software is authorized under this License
except under this disclaimer.
7. Limitation of Liability
Under no circumstances and under no legal theory, whether tort (including
negligence), contract, or otherwise, shall any Contributor, or anyone who
distributes Covered Software as permitted above, be liable to You for any
direct, indirect, special, incidental, or consequential damages of any
character including, without limitation, damages for lost profits, loss of
goodwill, work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses, even if such party shall have been
informed of the possibility of such damages. This limitation of liability
shall not apply to liability for death or personal injury resulting from such
partys negligence to the extent applicable law prohibits such limitation.
Some jurisdictions do not allow the exclusion or limitation of incidental or
consequential damages, so this exclusion and limitation may not apply to You.
8. Litigation
Any litigation relating to this License may be brought only in the courts of
a jurisdiction where the defendant maintains its principal place of business
and such litigation shall be governed by laws of that jurisdiction, without
reference to its conflict-of-law provisions. Nothing in this Section shall
prevent a partys ability to bring cross-claims or counter-claims.
9. Miscellaneous
This License represents the complete agreement concerning the subject matter
hereof. If any provision of this License is held to be unenforceable, such
provision shall be reformed only to the extent necessary to make it
enforceable. Any law or regulation which provides that the language of a
contract shall be construed against the drafter shall not be used to construe
this License against a Contributor.
10. Versions of the License
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version of
the License under which You originally received the Covered Software, or
under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a modified
version of this License if you rename the license and remove any
references to the name of the license steward (except to note that such
modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
This Source Code Form is subject to the
terms of the Mozilla Public License, v.
2.0. If a copy of the MPL was not
distributed with this file, You can
obtain one at
http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular file, then
You may include the notice in a location (such as a LICENSE file in a relevant
directory) where a recipient would be likely to look for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - “Incompatible With Secondary Licenses” Notice
This Source Code Form is “Incompatible
With Secondary Licenses”, as defined by
the Mozilla Public License, v. 2.0.

View File

@ -1,445 +0,0 @@
# Terraform Extension for Visual Studio Code
The HashiCorp [Terraform Extension for Visual Studio Code (VS Code)](https://marketplace.visualstudio.com/items?itemName=HashiCorp.terraform) with the [Terraform Language Server](https://github.com/hashicorp/terraform-ls) adds editing features for [Terraform](https://www.terraform.io) files such as syntax highlighting, IntelliSense, code navigation, code formatting, module explorer and much more!
## Quick Start
Get started writing Terraform configurations with VS Code in three steps:
- **Step 1:** If you haven't done so already, install [Terraform](https://www.terraform.io/downloads)
- **Step 2:** Install the [Terraform Extension for VS Code](https://marketplace.visualstudio.com/items?itemName=HashiCorp.terraform).
- **Step 3:** To activate the extension, open any folder or VS Code workspace containing Terraform files. Once activated, the Terraform language indicator will appear in the bottom right corner of the window.
New to Terraform? Read the [Terraform Learning guides](https://learn.hashicorp.com/terraform)
See [Usage](#usage) for more detailed getting started information.
Read the [Troubleshooting Guide](#troubleshooting) for answers to common questions.
## Features
- [IntelliSense](#intellisense-and-autocomplete) Edit your code with auto-completion of providers, resource names, data sources, attributes and more
- [Syntax validation](#syntax-validation) Provides inline diagnostics for invalid configuration as you type
- [Syntax highlighting](#syntax-highlighting) Highlighting syntax from Terraform 0.12 to 1.X
- [Code Navigation](#code-navigation) Navigate through your codebase with Go to Definition and Symbol support
- [Code Formatting](#code-formatting) Format your code with `terraform fmt` automatically
- [Code Snippets](#code-snippets) Shortcuts for common snippets like `for_each` and `variable`
- [HCP Terraform Integration](#hcp-terraform-integration) View HCP Terraform Workspaces and Run details inside VS Code
- [Terraform Module Explorer](#terraform-module-and-provider-explorer) View all modules and providers referenced in the currently open document.
- [Terraform commands](#terraform-commands) Directly execute commands like `terraform init` or `terraform plan` from the VS Code Command Palette.
### IntelliSense and Autocomplete
IntelliSense is a general term for a variety of code editing features including: code completion, parameter info, quick info, and member lists. IntelliSense features are sometimes called by other names such as autocomplete, code completion, and code hinting.
For Terraform constructs like resource and data, labels, blocks and attributes are auto completed both at the root of the document and inside other blocks. This also works for Terraform modules that are installed in the workspace, attributes and other constructs are autocompleted.
> **Note:** If there are syntax errors present in the document upon opening, intellisense may not provide all completions. Please fix the errors and reload the document and intellisense will return. See [hcl-lang#57](https://github.com/hashicorp/hcl-lang/issues/57) for more information.
Invoking intellisense is performed through the [keyboard combination](https://code.visualstudio.com/docs/getstarted/keybindings) for your platform and the results depend on where the cursor is placed.
If the cursor is at the beginning of a line and no other characters are present, then a list of constructs like `data`, `provider`, `resource`, etc are shown.
![](https://github.com/hashicorp/vscode-terraform/raw/HEAD/docs/intellisense1.png)
If inside a set of quotes or inside a block, the extension provides context specific completions appropriate for the location. For example, inside a `resource` block attributes for a given provider are listed.
![](https://github.com/hashicorp/vscode-terraform/raw/HEAD/docs/intellisense2.png)
Combining `editor.suggest.preview` with the [pre-fill required fields](#code-completion) setting will provide inline snippet suggestions for blocks of code:
![](https://github.com/hashicorp/vscode-terraform/raw/HEAD/docs/intellisense3.png)
Completing the snippet allows you to tab complete through each attribute and block.
### Syntax Validation
Terraform configuration files are validated when opened and on change, and invalid code is marked with diagnostics.
HCL syntax is checked for e.g. missing control characters like `}`, `"` or others in the wrong place.
![](https://github.com/hashicorp/vscode-terraform/raw/HEAD/docs/validation-rule-hcl.png)
Enhanced validation of selected Terraform language constructs in both `*.tf` and `*.tfvars` files based on detected Terraform version and provider versions is also provided. This can highlight deprecations, missing required attributes or blocks, references to undeclared variables and more, [as documented](https://github.com/hashicorp/terraform-ls/blob/main/docs/validation.md#enhanced-validation).
![](https://github.com/hashicorp/vscode-terraform/raw/HEAD/docs/validation-rule-missing-attribute.png)
![](https://github.com/hashicorp/vscode-terraform/raw/HEAD/docs/validation-rule-invalid-ref.png)
The enhanced validation feature is enabled by default but can be disabled using the following setting:
```json
"terraform.validation.enableEnhancedValidation": false
```
The extension also provides validation through [`terraform validate`](https://www.terraform.io/cli/commands/validate). This can be triggered via command palette. Unlike the other validation methods, this one requires the Terraform CLI installed and a previous successful run of `terraform init` (i.e. local installation of all providers and modules) to function correctly. It is the slowest method, but the most thorough - i.e. it will catch the most mistakes.
![](https://github.com/hashicorp/vscode-terraform/raw/HEAD/docs/validation-cli-command.png)
![](https://github.com/hashicorp/vscode-terraform/raw/HEAD/docs/validation-cli-diagnostic.png)
### Syntax Highlighting
Terraform syntax highlighting recognizes language constructs from Terraform version 0.12 to 1.X. Terraform providers, modules, variables and other high-level constructs are recognized, as well as more complex code statements like `for` loops, conditional expressions, and other complex expressions.
![](https://github.com/hashicorp/vscode-terraform/raw/HEAD/docs/syntax.png)
Some language constructs will highlight differently for older versions of Terraform that are incompatible with newer ways of expressing Terraform code. In these cases we lean toward ensuring the latest version of Terraform displays correctly and do our best with older versions.
### Code Navigation
While editing, you can right-click different identifiers to take advantage of several convenient commands
- `Go to Definition` (`F12`) navigates to the code that defines the construct where your cursor is. This command is helpful when you're working with Terraform modules and variables defined in other files than the currently opened document.
- `Peek Definition` (`Alt+F12`) displays the relevant code snippet for the construct where your cursor is directly in the current editor instead of navigating to another file.
- `Go to Declaration` navigates to the place where the variable or other construct is declared.
- `Peek Declaration` displays the declaration directly inside the current editor.
### Code Formatting
This extension utilizes [`terraform fmt`](https://www.terraform.io/cli/commands/fmt) to rewrite an open document to a canonical format and style. This command applies a subset of the [Terraform language style conventions](https://www.terraform.io/language/syntax/style), along with other minor adjustments for readability.
See the [Formatting](#formatting) Configuration section for information on how to configure this feature.
### Code Snippets
The extension provides several snippets to accelerate adding Terraform code to your configuration files:
- `fore` - For Each
- `module` - Module
- `output` - Output
- `provisioner` - Provisioner
- `vare` - Empty variable
- `varm` - Map Variable
### HCP Terraform Integration
Every time you have to switch away from your code, you risk losing momentum and the context about your tasks. Previously, Terraform users needed to have at least two windows open their editor and a web page to develop Terraform code. The editor contains all of the Terraform code they are working on, and the web page has the HCP Terraform workspace loaded. Switching back and forth between the HCP Terraform website and the text editor can be a frustrating and fragmented experience.
The HCP Terraform Visual Studio Code integration improves user experience by allowing users to view workspaces directly from within Visual Studio Code. Users can view the status of current and past runs and inspect detailed logs without ever leaving the comfort of their editor.
To start using HCP Terraform with VS Code, open the new HCP Terraform sidebar and click "Login to HCP Terraform". You can login using a stored token from the Terraform CLI, an existing token you provide, or open the HCP Terraform website to generate a new token.
![](https://github.com/hashicorp/vscode-terraform/raw/HEAD/docs/tfc/login_view.gif)
Once logged in, you are prompted to choose which Organization to view workspaces in.
![](https://github.com/hashicorp/vscode-terraform/raw/HEAD/docs/tfc/choose_org_view.png)
Now that your Organization is chosen, the Workspace view populates with all workspaces your token has permission to view. At a glance, you can see the last run status of each Workspace. Hovering over a workspace shows detailed information about each workspace.
![](https://github.com/hashicorp/vscode-terraform/raw/HEAD/docs/tfc/workspace_view.gif)
Selecting a workspace populates the Run view with a list of runs for that workspace. At a glance, you can see the status of each Run, and hover over each for more detailed information.
![](https://github.com/hashicorp/vscode-terraform/raw/HEAD/docs/tfc/workspace_run_view.gif)
If a Run has been Planned or Applied, you can view the raw log for each by expanding the Run then selecting the 'View Raw Log' button for either the Plan or Apply.
![](https://github.com/hashicorp/vscode-terraform/raw/HEAD/docs/tfc/plan_apply_view.gif)
To sign out or log out of your HCP Terraform session, click the Accounts icon next to the Settings icon in the Activity Bar and select "Sign Out":
![](https://github.com/hashicorp/vscode-terraform/raw/HEAD/docs/tfc/log_out.png)
This will clear the currently saved token and allow you to login using a different token.
### Terraform Module and Provider Explorer
List Terraform modules used in the current open document in the Explorer Pane, or drag to the Side Bar pane for an expanded view.
Each item shows an icon indicating where the module comes from (local filesystem, git repository, or Terraform Registry).
![](https://github.com/hashicorp/vscode-terraform/raw/HEAD/docs/module_calls.png)
If the module comes from the Terraform Registry, a link to open the documentation in a browser is provided.
![](https://github.com/hashicorp/vscode-terraform/raw/HEAD/docs/module_calls_doc_link.png)
List Terraform providers used in the current open document in the Explorer Pane, or drag to the Side Bar pane for an expanded view.
![](https://github.com/hashicorp/vscode-terraform/raw/HEAD/docs/module_providers.png)
### Terraform Commands
The extension provides access to several Terraform commands through the Command Palette:
- Terraform: init
- Terraform: init current folder
- Terraform: validate
- Terraform: plan
## Requirements
The Terraform VS Code extension bundles the [Terraform Language Server](https://github.com/hashicorp/terraform-ls) and is a self-contained install.
The extension does require the following to be installed before use:
- VS Code v1.86 or greater
- Terraform v0.12 or greater
## Platform Support
The extension should work anywhere VS Code itself and Terraform 0.12 or higher is supported. Our test matrix includes the following:
- Windows Server 2022 with Terraform v1.6
- macOS 12 with Terraform v1.6
- macOS 11 with Terraform v1.6
- Ubuntu 22.04 with Terraform v1.6
Intellisense, error checking and other language features are supported for Terraform v0.12 and greater.
Syntax highlighting targets Terraform v1.0 and greater. Highlighting 0.12-0.15 configuration is done on a best effort basis.
## Usage
### VS Code Workspace support
It is a common pattern to have separate folders containing related Terraform configuration that are not contained under one root folder. For example, you have a main Terraform folder containing the configuration for a single application and several module folders containing encapsulated code for configuring different parts of component pieces. You could open each folder in a separate VS Code window, and bounce between each window to author your changes.
A better approach is to use [VS Code Workspaces](https://code.visualstudio.com/docs/editor/workspaces). Using our example above, open the main Terraform folder first, then use Add Folder to workspace to add the dependent module folders. A single VS Code window is used and all Terraform files are available to author your changes. This uses a single terraform-ls process that has an understanding of your entire project, allowing you to use features like `Go to Symbol` and `Reference counts` across your project.
### Single file support
Opening a single Terraform configuration file inside VS Code is currently not supported. We see this approach most commonly attempted by users of terminal editors like vim, where it is common to edit a single file at a time.
The Terraform VS Code extension works best when it has the full context of a Terraform project where it can parse the referenced files and provide the expected advanced language features.
The recommended workflow is to instead open the containing folder for the desired Terraform file inside a single VS Code editor window, then navigate to the desired file. This seems counter-intuitive when you only want to edit a single file, but this allows the extension to understand the Terraform setup you are using and provide accurate and helpful intellisense, error checking, and other language features.
### Refresh Intellisense
The extension will pick up new schema for Terraform providers you reference in your configuration files automatically whenever anything changes inside `.terraform`.
To provide the extension with an up-to-date schema for the Terraform providers used in your configuration:
1. Open any folder or VS Code workspace containing Terraform files.
1. Open the Command Palette and run `Terraform: init current folder` or perform a `terraform init` from the terminal.
### Remote Extension support
The Visual Studio Code [Remote - WSL extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-wsl) lets you use the Windows Subsystem for Linux (WSL) as your full-time development environment right from VS Code. You can author Terraform configuration files in a Linux-based environment, use Linux-specific toolchains and utilities from the comfort of Windows.
The Remote WSL extension runs the [HashiCorp Extension](https://marketplace.visualstudio.com/items?itemName=HashiCorp.terraform) and other extensions directly in WSL so you can edit files located in WSL or the mounted Windows filesystem (for example /mnt/c) without worrying about pathing issues, binary compatibility, or other cross-OS challenges.
For a detailed walkthrough for how to get started using WSL and VS Code, see https://code.visualstudio.com/docs/remote/wsl-tutorial.
## Configuration
The extension does not require any initial configuration and should work out of the box. To take advantage of additional VS Code features or experimental extension features you can configure settings to customize behavior.
This extension offers several configuration options. To modify these open the [VS Code Settings Editor](https://code.visualstudio.com/docs/getstarted/settings#_settings-editor) in the UI or JSON view for [user and workspace level](https://code.visualstudio.com/docs/getstarted/settings#_creating-user-and-workspace-settings) settings, [scope your settings by language](https://code.visualstudio.com/docs/getstarted/settings#_languagespecific-editor-settings), or alternatively modify the `.vscode/settings.json` file in the root of your working directory.
### Code Completion
An experimental option can be enabled to prefill required fields when completing Terraform blocks with the following setting:
```json
"terraform.experimentalFeatures.prefillRequiredFields": true
```
For example, choosing `aws_alb_listener` in the following block inserts a snippet in the current line with the `resource` block entirely filled out, containing tab stops to fill in the required values.
![](https://github.com/hashicorp/vscode-terraform/raw/HEAD/docs/pre-fill.png)
Combine this with `editor.suggest.preview` and the editor will provide inline snippet suggestions for blocks of code:
![](https://github.com/hashicorp/vscode-terraform/raw/HEAD/docs/intellisense3.png)
Completing the snippet allows you to tab complete through each attribute and block.
### Code Lens
Display reference counts above top level blocks and attributes
```json
"terraform.codelens.referenceCount": true
```
![](https://github.com/hashicorp/vscode-terraform/raw/HEAD/docs/code_lens.png)
> **Note:** This feature impacts extension performance when opening folders with many modules present. If you experience slowness or high CPU utilization, open a smaller set of folders or disable this setting.
### Formatting
To enable automatic formatting, it is recommended that the following be added to the extension settings for the Terraform extension:
```json
"[terraform]": {
"editor.defaultFormatter": "hashicorp.terraform",
"editor.formatOnSave": true,
"editor.formatOnSaveMode": "file"
},
"[terraform-vars]": {
"editor.defaultFormatter": "hashicorp.terraform",
"editor.formatOnSave": true,
"editor.formatOnSaveMode": "file"
}
```
> It is recommended to set `editor.defaultFormatter` to ensure that VS Code knows which extension to use to format your files. It is possible to have more than one extension installed which claim a capability to format Terraform files.
When using the `editor.formatOnSaveMode` setting, only `file` is currently supported. The `modifications` or `modificationsIfAvailable` settings [use the currently configured SCM](https://code.visualstudio.com/updates/v1_49#_only-format-modified-text) to detect file line ranges that have changed and send those ranges to the formatter. The `file` setting works because `terraform fmt` was originally designed for formatting an entire file, not ranges. If you don't have an SCM enabled for the files you are editing, `modifications` won't work at all. The `modificationsIfAvailable` setting will fall back to `file` if there is no SCM and will appear to work sometimes.
If you want to use `editor.codeActionsOnSave` with `editor.formatOnSave` to automatically format Terraform files, use the following configuration:
```json
"editor.formatOnSave": true,
"[terraform]": {
"editor.defaultFormatter": "hashicorp.terraform",
"editor.formatOnSave": false,
"editor.codeActionsOnSave": {
"source.formatAll.terraform": true
},
},
"[terraform-vars]": {
"editor.defaultFormatter": "hashicorp.terraform",
"editor.formatOnSave": false,
"editor.codeActionsOnSave": {
"source.formatAll.terraform": true
},
}
```
This will keep the global `editor.formatOnSave` for other languages you use, and configure the Terraform extension to only format during a `codeAction` sweep.
> **Note**: Ensure that the terraform binary is present in the environment `PATH` variable. If the terraform binary cannot be found, formatting will silently fail.
### Validation
An experimental validate-on-save option can be enabled with the following setting:
```json
"terraform.experimentalFeatures.validateOnSave": true
```
This will create diagnostics for any elements that fail validation. You can also run `terraform validate` by issuing the `Terraform: validate` in the command palette.
### Multiple Workspaces
If you have multiple root modules in your workspace, you can configure the language server settings to identify them. Edit this through the VSCode Settings UI or add a `.vscode/settings.json` file using the following template:
```json
"terraform.languageServer.rootModules": [
"/module1",
"/module2"
]
```
If you want to automatically search root modules in your workspace and exclude some folders, you can configure the language server settings to identify them.
```json
"terraform.languageServer.excludeRootModules": [
"/module3",
"/module4"
]
```
If you want to automatically ignore certain directories when terraform-ls indexes files, add the folder names to this setting:
```json
"terraform.languageServer.ignoreDirectoryNames": [
"folder1",
"folder2"
]
```
### Terraform command options
You can configure the path to the Terraform binary used by terraform-ls to perform operations inside the editor by configuring this setting:
```json
"terraform.languageServer.terraform.path": "C:/some/folder/path"
```
You can override the Terraform execution timeout by configuring this setting:
```json
"terraform.languageServer.terraform.timeout": "30"
```
You can set the path Terraform logs (`TF_LOG_PATH`) by configuring this setting:
```json
"terraform.languageServer.terraform.logFilePath": "C:/some/folder/path/log-{{varname}}.log"
```
Supports variables (e.g. timestamp, pid, ppid) via Go template syntax `{{varname}}`
### Telemetry
We use telemetry to send error reports to our team, so we can respond more effectively. You can configure VS Code to send all telemetry, just crash telemetry, just errors or turn it off entirely by [configuring](https://code.visualstudio.com/docs/getstarted/telemetry#_disable-telemetry-reporting) `"telemetry.telemetryLevel"` to your desired value. You can also [monitor what's being sent](https://code.visualstudio.com/docs/getstarted/telemetry#_output-channel-for-telemetry-events) in your logs.
## Known Issues
- If there are syntax errors present in the document upon opening, intellisense may not provide all completions. Run `Terraform: validate` and fix validation errors, then reload the document and intellisense will work again. Potential solutions for this are being investigated in See [hcl-lang#57](https://github.com/hashicorp/hcl-lang/issues/57) for more information.
- Completion inside incomplete blocks, such as `resource "here` (without the closing quote and braces) is not supported. You can complete the 1st level blocks though and that will automatically trigger subsequent completion for e.g. resource types. See [terraform-ls#57](https://github.com/hashicorp/terraform-ls/issues/57) for more information.
- A number of different folder configurations (specifically when your root module is not a parent to any submodules) are not yet supported. More information available in ([terraform-ls#32](https://github.com/hashicorp/terraform-ls/issues/32#issuecomment-649707345))
### Terraform 0.11 compatibility
If you are using a Terraform version prior to 0.12.0, you can install the pre-transfer 1.4.0 version of this extension by following the instructions in the [pin version section](#pin-to-a-specific-version-of-the-extension).
The configuration has changed from 1.4.0 to v2.X. If you are having issues with the Language Server starting, you can reset the configuration to the following:
```json
"terraform.languageServer.enable": true,
"terraform.languageServer.args": ["serve"]
```
## Troubleshooting
- If you have a question about how to accomplish something with the extension, please ask on the [Terraform Editor Discuss site](https://discuss.hashicorp.com/c/terraform-core/terraform-editor-integrations/46)
- If you come across a problem with the extension, please file an [issue](https://github.com/hashicorp/vscode-terraform/issues/new/choose).
- If someone has already filed an issue that encompasses your feedback, please leave a 👍/👎 reaction on the issue
- Contributions are always welcome! Please see our [contributing guide](https://github.com/hashicorp/vscode-terraform/issues/new?assignees=&labels=enhancement&template=feature_request.md) for more details
- If you're interested in the development of the extension, you can read about our [development process](https://github.com/hashicorp/vscode-terraform/blob/HEAD/DEVELOPMENT.md)
### Settings Migration
Read more about [changes in settings options introduced in v2.24.0](https://github.com/hashicorp/vscode-terraform/blob/HEAD/docs/settings-migration.md).
### Generate a bug report
Experience a problem? You can have VS Code open a Github issue in our repo with all the information filled out for you. Open the Command Palette and invoke `Terraform: Generate Bug Report`. This will inspect the VS Code version, the Terraform extension version, the terraform-ls version and the list of installed extensions and open a browser window with GitHub loaded. You can then inspect the information provided, edit if desired, and submit the issue.
### Reload the extension
If you haven't seen the Problems Pane update in awhile, or hover and intellisense doesn't seem to showing up, you might not know what to do. Sometimes the Terraform extension can experience problems which cause the editor to not respond. The extension has a way of [reporting the problem](#generate-a-bug-report), but there is something you can do to get right back to working after reporting the problem: reload the Terraform extension.
You can reload the Terraform extension by opening the command palette and starting to type `Reload`. A list of commands will appear, select `Reload Window`. This will reload the Visual Studio Code window without closing down the entire editor, and without losing any work currently open in the editor.
### Pin to a specific version of the extension
If you wish to install a specific version of the extension, you can choose 'Install Another version' option in the Extensions pane. This will bring up a list of prior versions for the selected extension. Choose the version you want to install from the list.
![](https://github.com/hashicorp/vscode-terraform/raw/HEAD/docs/pin_version.png)
## Code of Conduct
HashiCorp Community Guidelines apply to you when interacting with the community here on GitHub and contributing code to this repository.
Please read the full text at https://www.hashicorp.com/community-guidelines
## Contributing
We are an open source project on GitHub and would enjoy your contributions! Consult our [development guide](https://github.com/hashicorp/vscode-terraform/blob/HEAD/DEVELOPMENT.md) for steps on how to get started. Please [open a new issue](https://github.com/hashicorp/terraform-vscode/issues) before working on a PR that requires significant effort. This will allow us to make sure the work is in line with the project's goals.
## Release History
**v2.0.0** is the first official release from HashiCorp, prior releases were by [Mikael Olenfalk](https://github.com/mauve).
The 2.0.0 release integrates a new [Language Server package from HashiCorp](https://github.com/hashicorp/terraform-ls). The extension will install and upgrade terraform-ls to continue to add new functionality around code completion and formatting. See the [terraform-ls CHANGELOG](https://github.com/hashicorp/terraform-ls/blob/main/CHANGELOG.md) for details.
In addition, this new version brings the syntax highlighting up to date with all HCL2 features, as needed for Terraform 0.12 and above.
> **Configuration Changes** Please note that in 2.x, the configuration differs from 1.4.0, see [Known Issues](#known-issues) for more information.
See the [CHANGELOG](https://github.com/hashicorp/vscode-terraform/blob/main/CHANGELOG.md) for more detailed release notes.
## Credits
- [Mikael Olenfalk](https://github.com/mauve) - creating and supporting the [vscode-terraform](https://github.com/mauve/vscode-terraform) extension, which was used as a starting point and inspiration for this extension.

View File

@ -1,13 +0,0 @@
<svg width="64" height="64" viewBox="0 0 64 64" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_0_1)">
<path fill-rule="evenodd" clip-rule="evenodd" d="M23 11L40 21.3349V42L23 31.6697V11Z" fill="#7B42BC"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M42 21.3349V42L60 31.6697V11L42 21.3349Z" fill="#7B42BC"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M3 0V20L21 30V10L3 0Z" fill="#7B42BC"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M23 53.9985L40 64V44.1353V44.0015L23 34V53.9985Z" fill="#7B42BC"/>
</g>
<defs>
<clipPath id="clip0_0_1">
<rect width="64" height="64" fill="white"/>
</clipPath>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 653 B

View File

@ -1,11 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="64" height="64" fill="none" viewBox="1 1 62 62">
<g fill="#a074c4" fill-rule="evenodd" clip-path="url(#a) translate(1,0)" clip-rule="evenodd">
<path
d="m23 11 17 10.335V42L23 31.67zM42 21.335V42l18-10.33V11zM3 0v20l18 10V10zM23 53.999 40 64V44.002L23 34z" />
</g>
<defs>
<clipPath id="a">
<path fill="#fff" d="M0 0h64v64H0z" />
</clipPath>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 434 B

View File

@ -1,14 +0,0 @@
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_876_66097)">
<path d="M5.33333 4.3999V9.19798L9.47487 11.5976V6.79957L5.33333 4.3999Z" fill="black"/>
<path d="M14.0714 9.47665L9.92863 7.07825V11.8763L14.0714 14.276V9.47665Z" fill="black"/>
<path d="M14.5251 14.276V9.47665L18.6667 7.07825V11.8763L14.5251 14.276Z" fill="black"/>
<path d="M14.0702 19.5999L9.92863 17.2002V12.4022L14.0702 14.8018V19.5999Z" fill="black"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M19.2 1H4.8C3.94342 1 3.36113 1.00078 2.91104 1.03755C2.47262 1.07337 2.24842 1.1383 2.09202 1.21799C1.71569 1.40973 1.40973 1.71569 1.21799 2.09202C1.1383 2.24842 1.07337 2.47262 1.03755 2.91104C1.00078 3.36113 1 3.94342 1 4.8V19.2C1 20.0566 1.00078 20.6389 1.03755 21.089C1.07337 21.5274 1.1383 21.7516 1.21799 21.908C1.40973 22.2843 1.71569 22.5903 2.09202 22.782C2.24842 22.8617 2.47262 22.9266 2.91104 22.9625C3.36113 22.9992 3.94342 23 4.8 23H19.2C20.0566 23 20.6389 22.9992 21.089 22.9625C21.5274 22.9266 21.7516 22.8617 21.908 22.782C22.2843 22.5903 22.5903 22.2843 22.782 21.908C22.8617 21.7516 22.9266 21.5274 22.9624 21.089C22.9992 20.6389 23 20.0566 23 19.2V4.8C23 3.94342 22.9992 3.36113 22.9624 2.91104C22.9266 2.47262 22.8617 2.24842 22.782 2.09202C22.5903 1.71569 22.2843 1.40973 21.908 1.21799C21.7516 1.1383 21.5274 1.07337 21.089 1.03755C20.6389 1.00078 20.0566 1 19.2 1ZM0.32698 1.63803C0 2.27976 0 3.11984 0 4.8V19.2C0 20.8802 0 21.7202 0.32698 22.362C0.614601 22.9265 1.07354 23.3854 1.63803 23.673C2.27976 24 3.11984 24 4.8 24H19.2C20.8802 24 21.7202 24 22.362 23.673C22.9265 23.3854 23.3854 22.9265 23.673 22.362C24 21.7202 24 20.8802 24 19.2V4.8C24 3.11984 24 2.27976 23.673 1.63803C23.3854 1.07354 22.9265 0.614601 22.362 0.32698C21.7202 0 20.8802 0 19.2 0H4.8C3.11984 0 2.27976 0 1.63803 0.32698C1.07354 0.614601 0.614601 1.07354 0.32698 1.63803Z" fill="black"/>
</g>
<defs>
<clipPath id="clip0_876_66097">
<rect width="24" height="24" fill="white"/>
</clipPath>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 2.0 KiB

View File

@ -1,15 +0,0 @@
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_876_66101)">
<path fill-rule="evenodd" clip-rule="evenodd" d="M9.68755 6.6936L13.3125 8.79221V12.9916L9.68755 10.8919V6.6936Z" fill="black"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M13.7095 8.79221V12.9916L17.3333 10.8919V6.6936L13.7095 8.79221Z" fill="black"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M5.66667 4.3501V8.54841L9.29051 10.6481V6.44981L5.66667 4.3501Z" fill="black"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M9.68755 15.5504L13.3114 17.6501V13.4793V13.4518L9.68755 11.3521V15.5504Z" fill="black"/>
<path d="M24 20.75C24 22.5449 22.5449 24 20.75 24C20.7314 24 20.7129 23.9998 20.6944 23.9995L16.25 24.0001C15.0074 24.0001 14 22.9926 14 21.75C14 20.5074 15.0074 19.5 16.25 19.5C16.7562 19.5 17.2234 19.6672 17.5994 19.9494C17.956 18.5416 19.2314 17.5 20.75 17.5C22.5449 17.5 24 18.9551 24 20.75Z" fill="black"/>
<path d="M4.8 1H18.2C19.0566 1 19.6389 1.00078 20.089 1.03755C20.5274 1.07337 20.7516 1.1383 20.908 1.21799C21.2843 1.40973 21.5903 1.71569 21.782 2.09202C21.8617 2.24842 21.9266 2.47262 21.9624 2.91104C21.9992 3.36113 22 3.94342 22 4.8V16.1662C22.3512 16.2617 22.6862 16.3966 23 16.5657V4.8C23 3.11984 23 2.27976 22.673 1.63803C22.3854 1.07354 21.9265 0.614601 21.362 0.32698C20.7202 0 19.8802 0 18.2 0H4.8C3.11984 0 2.27976 0 1.63803 0.32698C1.07354 0.614601 0.614601 1.07354 0.32698 1.63803C0 2.27976 0 3.11984 0 4.8V18.2C0 19.8802 0 20.7202 0.32698 21.362C0.614601 21.9265 1.07354 22.3854 1.63803 22.673C2.27976 23 3.11984 23 4.8 23H12.7134C12.6017 22.684 12.5311 22.3485 12.5082 22H4.8C3.94342 22 3.36113 21.9992 2.91104 21.9624C2.47262 21.9266 2.24842 21.8617 2.09202 21.782C1.71569 21.5903 1.40973 21.2843 1.21799 20.908C1.1383 20.7516 1.07337 20.5274 1.03755 20.089C1.00078 19.6389 1 19.0566 1 18.2V4.8C1 3.94342 1.00078 3.36113 1.03755 2.91104C1.07337 2.47262 1.1383 2.24842 1.21799 2.09202C1.40973 1.71569 1.71569 1.40973 2.09202 1.21799C2.24842 1.1383 2.47262 1.07337 2.91104 1.03755C3.36113 1.00078 3.94342 1 4.8 1Z" fill="black"/>
</g>
<defs>
<clipPath id="clip0_876_66101">
<rect width="24" height="24" fill="white"/>
</clipPath>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 2.2 KiB

View File

@ -1,356 +0,0 @@
Copyright (c) 2020 HashiCorp, Inc.
Mozilla Public License, version 2.0
1. Definitions
1.1. “Contributor”
means each individual or legal entity that creates, contributes to the
creation of, or owns Covered Software.
1.2. “Contributor Version”
means the combination of the Contributions of others (if any) used by a
Contributor and that particular Contributors Contribution.
1.3. “Contribution”
means Covered Software of a particular Contributor.
1.4. “Covered Software”
means Source Code Form to which the initial Contributor has attached the
notice in Exhibit A, the Executable Form of such Source Code Form, and
Modifications of such Source Code Form, in each case including portions
thereof.
1.5. “Incompatible With Secondary Licenses”
means
a. that the initial Contributor has attached the notice described in
Exhibit B to the Covered Software; or
b. that the Covered Software was made available under the terms of version
1.1 or earlier of the License, but not also under the terms of a
Secondary License.
1.6. “Executable Form”
means any form of the work other than Source Code Form.
1.7. “Larger Work”
means a work that combines Covered Software with other material, in a separate
file or files, that is not Covered Software.
1.8. “License”
means this document.
1.9. “Licensable”
means having the right to grant, to the maximum extent possible, whether at the
time of the initial grant or subsequently, any and all of the rights conveyed by
this License.
1.10. “Modifications”
means any of the following:
a. any file in Source Code Form that results from an addition to, deletion
from, or modification of the contents of Covered Software; or
b. any new file in Source Code Form that contains any Covered Software.
1.11. “Patent Claims” of a Contributor
means any patent claim(s), including without limitation, method, process,
and apparatus claims, in any patent Licensable by such Contributor that
would be infringed, but for the grant of the License, by the making,
using, selling, offering for sale, having made, import, or transfer of
either its Contributions or its Contributor Version.
1.12. “Secondary License”
means either the GNU General Public License, Version 2.0, the GNU Lesser
General Public License, Version 2.1, the GNU Affero General Public
License, Version 3.0, or any later versions of those licenses.
1.13. “Source Code Form”
means the form of the work preferred for making modifications.
1.14. “You” (or “Your”)
means an individual or a legal entity exercising rights under this
License. For legal entities, “You” includes any entity that controls, is
controlled by, or is under common control with You. For purposes of this
definition, “control” means (a) the power, direct or indirect, to cause
the direction or management of such entity, whether by contract or
otherwise, or (b) ownership of more than fifty percent (50%) of the
outstanding shares or beneficial ownership of such entity.
2. License Grants and Conditions
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
a. under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or as
part of a Larger Work; and
b. under Patent Claims of such Contributor to make, use, sell, offer for
sale, have made, import, and otherwise transfer either its Contributions
or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution become
effective for each Contribution on the date the Contributor first distributes
such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under this
License. No additional rights or licenses will be implied from the distribution
or licensing of Covered Software under this License. Notwithstanding Section
2.1(b) above, no patent license is granted by a Contributor:
a. for any code that a Contributor has removed from Covered Software; or
b. for infringements caused by: (i) Your and any other third partys
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
c. under Patent Claims infringed by Covered Software in the absence of its
Contributions.
This License does not grant any rights in the trademarks, service marks, or
logos of any Contributor (except as may be necessary to comply with the
notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this License
(see Section 10.2) or under the terms of a Secondary License (if permitted
under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its Contributions
are its original creation(s) or it has sufficient rights to grant the
rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under applicable
copyright doctrines of fair use, fair dealing, or other equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
Section 2.1.
3. Responsibilities
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under the
terms of this License. You must inform recipients that the Source Code Form
of the Covered Software is governed by the terms of this License, and how
they can obtain a copy of this License. You may not attempt to alter or
restrict the recipients rights in the Source Code Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
a. such Covered Software must also be made available in Source Code Form,
as described in Section 3.1, and You must inform recipients of the
Executable Form how they can obtain a copy of such Source Code Form by
reasonable means in a timely manner, at a charge no more than the cost
of distribution to the recipient; and
b. You may distribute such Executable Form under the terms of this License,
or sublicense it under different terms, provided that the license for
the Executable Form does not attempt to limit or alter the recipients
rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for the
Covered Software. If the Larger Work is a combination of Covered Software
with a work governed by one or more Secondary Licenses, and the Covered
Software is not Incompatible With Secondary Licenses, this License permits
You to additionally distribute such Covered Software under the terms of
such Secondary License(s), so that the recipient of the Larger Work may, at
their option, further distribute the Covered Software under the terms of
either this License or such Secondary License(s).
3.4. Notices
You may not remove or alter the substance of any license notices (including
copyright notices, patent notices, disclaimers of warranty, or limitations
of liability) contained within the Source Code Form of the Covered
Software, except that You may alter any license notices to the extent
required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on behalf
of any Contributor. You must make it absolutely clear that any such
warranty, support, indemnity, or liability obligation is offered by You
alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
If it is impossible for You to comply with any of the terms of this License
with respect to some or all of the Covered Software due to statute, judicial
order, or regulation then You must: (a) comply with the terms of this License
to the maximum extent possible; and (b) describe the limitations and the code
they affect. Such description must be placed in a text file included with all
distributions of the Covered Software under this License. Except to the
extent prohibited by statute or regulation, such description must be
sufficiently detailed for a recipient of ordinary skill to be able to
understand it.
5. Termination
5.1. The rights granted under this License will terminate automatically if You
fail to comply with any of its terms. However, if You become compliant,
then the rights granted under this License from a particular Contributor
are reinstated (a) provisionally, unless and until such Contributor
explicitly and finally terminates Your grants, and (b) on an ongoing basis,
if such Contributor fails to notify You of the non-compliance by some
reasonable means prior to 60 days after You have come back into compliance.
Moreover, Your grants from a particular Contributor are reinstated on an
ongoing basis if such Contributor notifies You of the non-compliance by
some reasonable means, this is the first time You have received notice of
non-compliance with this License from such Contributor, and You become
compliant prior to 30 days after Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions, counter-claims,
and cross-claims) alleging that a Contributor Version directly or
indirectly infringes any patent, then the rights granted to You by any and
all Contributors for the Covered Software under Section 2.1 of this License
shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
license agreements (excluding distributors and resellers) which have been
validly granted by You or Your distributors under this License prior to
termination shall survive termination.
6. Disclaimer of Warranty
Covered Software is provided under this License on an “as is” basis, without
warranty of any kind, either expressed, implied, or statutory, including,
without limitation, warranties that the Covered Software is free of defects,
merchantable, fit for a particular purpose or non-infringing. The entire
risk as to the quality and performance of the Covered Software is with You.
Should any Covered Software prove defective in any respect, You (not any
Contributor) assume the cost of any necessary servicing, repair, or
correction. This disclaimer of warranty constitutes an essential part of this
License. No use of any Covered Software is authorized under this License
except under this disclaimer.
7. Limitation of Liability
Under no circumstances and under no legal theory, whether tort (including
negligence), contract, or otherwise, shall any Contributor, or anyone who
distributes Covered Software as permitted above, be liable to You for any
direct, indirect, special, incidental, or consequential damages of any
character including, without limitation, damages for lost profits, loss of
goodwill, work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses, even if such party shall have been
informed of the possibility of such damages. This limitation of liability
shall not apply to liability for death or personal injury resulting from such
partys negligence to the extent applicable law prohibits such limitation.
Some jurisdictions do not allow the exclusion or limitation of incidental or
consequential damages, so this exclusion and limitation may not apply to You.
8. Litigation
Any litigation relating to this License may be brought only in the courts of
a jurisdiction where the defendant maintains its principal place of business
and such litigation shall be governed by laws of that jurisdiction, without
reference to its conflict-of-law provisions. Nothing in this Section shall
prevent a partys ability to bring cross-claims or counter-claims.
9. Miscellaneous
This License represents the complete agreement concerning the subject matter
hereof. If any provision of this License is held to be unenforceable, such
provision shall be reformed only to the extent necessary to make it
enforceable. Any law or regulation which provides that the language of a
contract shall be construed against the drafter shall not be used to construe
this License against a Contributor.
10. Versions of the License
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version of
the License under which You originally received the Covered Software, or
under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a modified
version of this License if you rename the license and remove any
references to the name of the license steward (except to note that such
modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
This Source Code Form is subject to the
terms of the Mozilla Public License, v.
2.0. If a copy of the MPL was not
distributed with this file, You can
obtain one at
http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular file, then
You may include the notice in a location (such as a LICENSE file in a relevant
directory) where a recipient would be likely to look for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - “Incompatible With Secondary Licenses” Notice
This Source Code Form is “Incompatible
With Secondary Licenses”, as defined by
the Mozilla Public License, v. 2.0.

View File

@ -1,81 +0,0 @@
{
"comments": {
"lineComment": "#",
"blockComment": [
"/*",
"*/"
]
},
"brackets": [
[
"{",
"}"
],
[
"[",
"]"
],
[
"(",
")"
]
],
"autoClosingPairs": [
{
"open": "{",
"close": "}"
},
{
"open": "[",
"close": "]"
},
{
"open": "(",
"close": ")"
},
{
"open": "\"",
"close": "\"",
"notIn": [
"string"
]
},
{
"open": "'",
"close": "'",
"notIn": [
"string",
"comment"
]
}
],
"autoCloseBefore": ";:.,=}])> \n\t\"",
"surroundingPairs": [
[
"{",
"}"
],
[
"[",
"]"
],
[
"(",
")"
],
[
"\"",
"\""
],
[
"'",
"'"
]
],
"folding": {
"markers": {
"start": "^\\s*#region",
"end": "^\\s*#endregion"
}
}
}

File diff suppressed because one or more lines are too long

View File

@ -1,13 +0,0 @@
/*!
* mime-db
* Copyright(c) 2014 Jonathan Ong
* Copyright(c) 2015-2022 Douglas Christopher Wilson
* MIT Licensed
*/
/*!
* mime-types
* Copyright(c) 2014 Jonathan Ong
* Copyright(c) 2015 Douglas Christopher Wilson
* MIT Licensed
*/

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,968 +0,0 @@
{
"icon": "assets/icons/terraform_logo_mark_light_universal.png",
"name": "terraform",
"displayName": "HashiCorp Terraform",
"description": "Syntax highlighting and autocompletion for Terraform",
"version": "2.31.0",
"publisher": "hashicorp",
"appInsightsKey": "885372d2-6f3c-499f-9d25-b8b219983a52",
"license": "MPL-2.0",
"preview": false,
"private": true,
"extensionKind": [
"workspace"
],
"engines": {
"npm": "~10.X",
"node": "~18.X",
"vscode": "^1.86.2"
},
"langServer": {
"version": "0.33.3"
},
"syntax": {
"version": "0.5.0"
},
"qna": "https://discuss.hashicorp.com/c/terraform-core/terraform-editor-integrations/46",
"bugs": {
"url": "https://github.com/hashicorp/vscode-terraform/issues",
"email": "terraform-vscode@hashicorp.com"
},
"categories": [
"Programming Languages",
"Other",
"Formatters",
"Linters"
],
"keywords": [
"devops",
"terraform",
"hcl"
],
"galleryBanner": {
"color": "#FFF",
"theme": "light"
},
"repository": {
"type": "git",
"url": "https://github.com/hashicorp/vscode-terraform.git"
},
"activationEvents": [
"onAuthenticationRequest:terraform",
"onView:terraform-modules",
"workspaceContains:**/*.tf",
"workspaceContains:**/*.tfvars"
],
"main": "./out/extension",
"browser": "./out/web/extension",
"contributes": {
"languages": [
{
"id": "terraform",
"aliases": [
"Terraform",
"terraform"
],
"extensions": [
".tf"
],
"configuration": "./language-configuration.json"
},
{
"id": "terraform-vars",
"extensions": [
".tfvars"
],
"configuration": "./language-configuration.json"
},
{
"id": "terraform-stack",
"aliases": [
"Terraform Stack"
],
"extensions": [
".tfstack.hcl"
],
"configuration": "./language-configuration.json",
"icon": {
"dark": "assets/icons/terraform_stacks.svg",
"light": "assets/icons/terraform_stacks.svg"
}
},
{
"id": "terraform-deploy",
"aliases": [
"Terraform Deployment"
],
"extensions": [
".tfdeploy.hcl"
],
"configuration": "./language-configuration.json",
"icon": {
"dark": "assets/icons/terraform_stacks.svg",
"light": "assets/icons/terraform_stacks.svg"
}
},
{
"id": "json",
"extensions": [
".tfstate"
]
}
],
"grammars": [
{
"language": "terraform",
"scopeName": "source.hcl.terraform",
"path": "./syntaxes/terraform.tmGrammar.json"
},
{
"language": "terraform-vars",
"scopeName": "source.hcl.terraform",
"path": "./syntaxes/terraform.tmGrammar.json"
},
{
"language": "terraform-stack",
"scopeName": "source.hcl",
"path": "./syntaxes/hcl.tmGrammar.json"
},
{
"language": "terraform-deploy",
"scopeName": "source.hcl",
"path": "./syntaxes/hcl.tmGrammar.json"
}
],
"semanticTokenTypes": [
{
"id": "hcl-attrName",
"superType": "property",
"description": "Attribute name"
},
{
"id": "hcl-blockType",
"superType": "type",
"description": "Block type"
},
{
"id": "hcl-blockLabel",
"superType": "enumMember",
"description": "Block label"
},
{
"id": "hcl-bool",
"superType": "keyword",
"description": "Boolean"
},
{
"id": "hcl-string",
"superType": "string",
"description": "String"
},
{
"id": "hcl-number",
"superType": "number",
"description": "Number"
},
{
"id": "hcl-objectKey",
"superType": "parameter",
"description": "Object key"
},
{
"id": "hcl-mapKey",
"superType": "parameter",
"description": "Map key"
},
{
"id": "hcl-keyword",
"superType": "variable",
"description": "Keyword"
},
{
"id": "hcl-referenceStep",
"superType": "variable",
"description": "Reference step"
},
{
"id": "hcl-typeComplex",
"superType": "keyword",
"description": "Type (complex)"
},
{
"id": "hcl-typePrimitive",
"superType": "keyword",
"description": "Type (primitive)"
},
{
"id": "hcl-functionName",
"superType": "function",
"description": "Function name"
}
],
"semanticTokenScopes": [
{
"scopes": {
"hcl-attrName": [
"variable.other.property"
],
"hcl-blockType": [
"entity.name.type"
],
"hcl-blockLabel": [
"variable.other.enummember"
],
"hcl-bool": [
"keyword.control"
],
"hcl-string": [
"string"
],
"hcl-number": [
"constant.numeric"
],
"hcl-objectKey": [
"variable.parameter"
],
"hcl-mapKey": [
"variable.parameter"
],
"hcl-keyword": [
"keyword.control"
],
"hcl-referenceStep": [
"variable.other.readwrite"
],
"hcl-typeComplex": [
"keyword.control"
],
"hcl-typePrimitive": [
"keyword.control"
],
"hcl-functionName": [
"support.function"
]
}
}
],
"semanticTokenModifiers": [
{
"id": "hcl-dependent",
"description": "Dependent"
},
{
"id": "terraform-data",
"description": "Data source"
},
{
"id": "terraform-locals",
"description": "Locals"
},
{
"id": "terraform-module",
"description": "Module"
},
{
"id": "terraform-output",
"description": "Output"
},
{
"id": "terraform-provider",
"description": "Provider"
},
{
"id": "terraform-resource",
"description": "Resource"
},
{
"id": "terraform-provisioner",
"description": "Provisioner"
},
{
"id": "terraform-connection",
"description": "Connection"
},
{
"id": "terraform-variable",
"description": "Variable"
},
{
"id": "terraform-terraform",
"description": "Terraform"
},
{
"id": "terraform-backend",
"description": "Backend"
},
{
"id": "terraform-name",
"description": "Name"
},
{
"id": "terraform-type",
"description": "Type"
},
{
"id": "terraform-requiredProviders",
"description": "Required providers"
}
],
"icons": {
"run-status-running": {
"description": "Run Status icon",
"default": {
"fontPath": "./assets/icons/running.woff",
"fontCharacter": "D"
}
}
},
"snippets": [
{
"language": "terraform",
"path": "./snippets/terraform.json"
}
],
"configuration": [
{
"title": "General",
"order": 0,
"properties": {
"terraform.codelens.referenceCount": {
"scope": "window",
"type": "boolean",
"default": false,
"description": "Display reference counts above top level blocks and attributes."
},
"terraform.validation.enableEnhancedValidation": {
"scope": "window",
"type": "boolean",
"default": true,
"description": "Enable enhanced validation of Terraform files and modules"
}
}
},
{
"title": "Language Server",
"order": 1,
"properties": {
"terraform.languageServer.enable": {
"scope": "window",
"order": "0",
"type": "boolean",
"default": true,
"description": "Enable Terraform Language Server"
},
"terraform.languageServer.path": {
"order": "1",
"scope": "machine",
"type": "string",
"default": "",
"description": "Path to the Terraform Language Server binary (optional)"
},
"terraform.languageServer.args": {
"order": "2",
"scope": "machine",
"type": "array",
"items": {
"type": "string"
},
"default": [
"serve"
],
"description": "Arguments to pass to language server binary"
},
"terraform.languageServer.tcp.port": {
"order": 4,
"type": [
"number",
null
],
"scope": "machine",
"default": null,
"markdownDescription": "Language server TCP port to connect to. This is not compatible with `#terraform.languageServer.path#`. This is used when you want the extension to connect via TCP to an already running language server process."
},
"terraform.languageServer.ignoreSingleFileWarning": {
"order": "3",
"scope": "window",
"type": "boolean",
"default": false,
"description": "Enable warning when opening a single Terraform file instead of a Terraform folder. Enabling this will prevent the message being sent"
},
"terraform.languageServer.indexing.ignorePaths": {
"scope": "machine",
"type": "array",
"items": {
"type": "string"
},
"default": [],
"description": "Per-workspace list of paths for the language server to ignore when indexing files"
},
"terraform.languageServer.indexing.ignoreDirectoryNames": {
"scope": "machine",
"type": "array",
"items": {
"type": "string"
},
"default": [],
"description": "Per-workspace list of directory names for the language server to ignore when indexing files"
}
}
},
{
"title": "Terraform Features",
"order": 3,
"properties": {
"terraform.languageServer.terraform.path": {
"order": 0,
"scope": "machine",
"type": "string",
"description": "Path to the Terraform binary used by the Terraform Language Server"
},
"terraform.languageServer.terraform.timeout": {
"order": 1,
"scope": "machine",
"type": "string",
"description": "Overrides Terraform execution timeout (e.g. 30s) used by the Terraform Language Server"
},
"terraform.languageServer.terraform.logFilePath": {
"order": 2,
"scope": "machine",
"type": "string",
"markdownDescription": "Path to a file (`TF_LOG_PATH`) for Terraform executions to be logged used by the the Terraform Language Server. Support for variables (e.g. timestamp, pid, ppid) via Go template syntax `{{varName}}`"
}
}
},
{
"title": "Experimental Features",
"order": 4,
"properties": {
"terraform.experimentalFeatures.validateOnSave": {
"description": "Enable validating the currently open file on save",
"scope": "window",
"type": "boolean",
"default": false
},
"terraform.experimentalFeatures.prefillRequiredFields": {
"markdownDescription": "Enable autocompletion for required fields when completing Terraform blocks. Combine with `#editor.suggest.preview#` and the editor will provide inline snippet suggestions for blocks of code",
"scope": "window",
"type": "boolean",
"default": false
}
}
}
],
"commands": [
{
"command": "terraform.generateBugReport",
"title": "HashiCorp Terraform: Generate Bug Report"
},
{
"command": "terraform.enableLanguageServer",
"title": "HashiCorp Terraform: Enable Language Server"
},
{
"command": "terraform.disableLanguageServer",
"title": "HashiCorp Terraform: Disable Language Server"
},
{
"command": "terraform.init",
"title": "HashiCorp Terraform: init"
},
{
"command": "terraform.initCurrent",
"title": "HashiCorp Terraform: init current folder",
"icon": "$(cloud-download)"
},
{
"command": "terraform.validate",
"title": "HashiCorp Terraform: validate"
},
{
"command": "terraform.plan",
"title": "HashiCorp Terraform: plan"
},
{
"command": "terraform.modules.refreshList",
"title": "Refresh Module Calls",
"icon": "$(refresh)"
},
{
"command": "terraform.providers.refreshList",
"title": "Refresh Module Providers",
"icon": "$(refresh)"
},
{
"command": "terraform.modules.openDocumentation",
"title": "Open Documentation",
"icon": "$(book)"
},
{
"command": "terraform.providers.openDocumentation",
"title": "Open Documentation",
"icon": "$(book)"
},
{
"command": "terraform.cloud.login",
"title": "HCP Terraform: Login",
"enablement": "terraform.cloud.signed-in === false"
},
{
"command": "terraform.cloud.workspaces.refresh",
"title": "Refresh",
"icon": "$(refresh)",
"enablement": "terraform.cloud.signed-in"
},
{
"command": "terraform.cloud.workspaces.viewInBrowser",
"title": "View workspace",
"icon": "$(globe)",
"enablement": "terraform.cloud.signed-in"
},
{
"command": "terraform.cloud.runs.refresh",
"title": "Refresh",
"icon": "$(refresh)",
"enablement": "terraform.cloud.signed-in"
},
{
"command": "terraform.cloud.run.viewInBrowser",
"title": "View run",
"icon": "$(globe)",
"enablement": "terraform.cloud.signed-in"
},
{
"command": "terraform.cloud.run.plan.downloadLog",
"title": "View raw plan log",
"icon": "$(console)",
"enablement": "terraform.cloud.signed-in"
},
{
"command": "terraform.cloud.run.viewPlan",
"title": "View plan output",
"icon": "$(output)",
"enablement": "terraform.cloud.signed-in"
},
{
"command": "terraform.cloud.run.apply.downloadLog",
"title": "View raw apply log",
"icon": "$(output)",
"enablement": "terraform.cloud.signed-in"
},
{
"command": "terraform.cloud.run.viewApply",
"title": "View apply output",
"icon": "$(output)"
},
{
"command": "terraform.cloud.organization.picker",
"title": "HCP Terraform: Pick Organization",
"icon": "$(account)",
"enablement": "terraform.cloud.signed-in"
},
{
"command": "terraform.cloud.workspaces.filterByProject",
"title": "Filter by project",
"icon": "$(filter)",
"enablement": "terraform.cloud.signed-in"
}
],
"menus": {
"commandPalette": [
{
"command": "terraform.enableLanguageServer",
"when": "config.terraform.languageServer.enable == false"
},
{
"command": "terraform.disableLanguageServer",
"when": "config.terraform.languageServer.enable == true"
},
{
"command": "terraform.init",
"when": "config.terraform.languageServer.enable == true"
},
{
"command": "terraform.initCurrent",
"when": "config.terraform.languageServer.enable == true"
},
{
"command": "terraform.validate",
"when": "config.terraform.languageServer.enable == true"
},
{
"command": "terraform.plan",
"when": "config.terraform.languageServer.enable == true"
},
{
"command": "terraform.modules.refreshList",
"when": "false"
},
{
"command": "terraform.providers.refreshList",
"when": "false"
},
{
"command": "terraform.modules.openDocumentation",
"when": "false"
},
{
"command": "terraform.providers.openDocumentation",
"when": "false"
},
{
"command": "terraform.cloud.login",
"when": "terraform.cloud.signed-in === false && terraform.cloud.views.visible"
},
{
"command": "terraform.cloud.organization.picker",
"when": "terraform.cloud.signed-in"
},
{
"command": "terraform.cloud.workspaces.viewInBrowser",
"when": "false"
},
{
"command": "terraform.cloud.run.viewInBrowser",
"when": "false"
},
{
"command": "terraform.cloud.run.plan.downloadLog",
"when": "false"
},
{
"command": "terraform.cloud.run.viewPlan",
"when": "false"
},
{
"command": "terraform.cloud.runs.refresh",
"when": "false"
},
{
"command": "terraform.cloud.run.apply.downloadLog",
"when": "false"
},
{
"command": "terraform.cloud.run.viewApply",
"when": "false"
},
{
"command": "terraform.cloud.workspaces.filterByProject",
"when": "false"
},
{
"command": "terraform.cloud.workspaces.refresh",
"when": "false"
}
],
"view/title": [
{
"command": "terraform.modules.refreshList",
"when": "view == terraform.modules",
"group": "navigation"
},
{
"command": "terraform.providers.refreshList",
"when": "view == terraform.providers",
"group": "navigation"
},
{
"command": "terraform.initCurrent",
"when": "view == terraform.providers",
"group": "navigation"
},
{
"command": "terraform.cloud.workspaces.filterByProject",
"when": "view == terraform.cloud.workspaces",
"group": "navigation"
},
{
"command": "terraform.cloud.organization.picker",
"when": "view == terraform.cloud.workspaces",
"group": "navigation"
},
{
"command": "terraform.cloud.workspaces.refresh",
"when": "view == terraform.cloud.workspaces",
"group": "navigation"
},
{
"command": "terraform.cloud.runs.refresh",
"when": "view == terraform.cloud.runs",
"group": "navigation"
}
],
"view/item/context": [
{
"command": "terraform.modules.openDocumentation",
"when": "view == terraform.modules"
},
{
"command": "terraform.providers.openDocumentation",
"when": "view == terraform.providers && viewItem == moduleProviderHasDocs"
},
{
"command": "terraform.cloud.workspaces.viewInBrowser",
"when": "view == terraform.cloud.workspaces && viewItem =~ /hasLink/",
"group": "inline"
},
{
"command": "terraform.cloud.run.viewInBrowser",
"when": "view == terraform.cloud.runs && viewItem =~ /isRun/",
"group": "inline"
},
{
"command": "terraform.cloud.run.plan.downloadLog",
"when": "view == terraform.cloud.runs && viewItem =~ /hasRawPlan/",
"group": "inline"
},
{
"command": "terraform.cloud.run.viewPlan",
"when": "view == terraform.cloud.runs && viewItem =~ /hasStructuredPlan/",
"group": "inline"
},
{
"command": "terraform.cloud.run.apply.downloadLog",
"when": "view == terraform.cloud.runs && viewItem =~ /hasRawApply/",
"group": "inline"
},
{
"command": "terraform.cloud.run.viewApply",
"when": "view == terraform.cloud.runs && viewItem =~ /hasStructuredApply/",
"group": "inline"
}
]
},
"views": {
"terraform": [
{
"id": "terraform.providers",
"name": "Providers"
},
{
"id": "terraform.modules",
"name": "Module Calls"
}
],
"terraform-cloud": [
{
"id": "terraform.cloud.workspaces",
"name": "Workspaces",
"contextualTitle": "HCP Terraform workspaces"
},
{
"id": "terraform.cloud.runs",
"name": "Runs",
"contextualTitle": "HCP Terraform runs"
}
],
"terraform-cloud-panel": [
{
"id": "terraform.cloud.run.plan",
"name": "Plan",
"when": "terraform.cloud.run.viewingPlan"
},
{
"id": "terraform.cloud.run.apply",
"name": "Apply",
"when": "terraform.cloud.run.viewingApply"
}
]
},
"viewsContainers": {
"activitybar": [
{
"id": "terraform",
"title": "HashiCorp Terraform",
"icon": "assets/icons/vs_code_terraform.svg"
},
{
"id": "terraform-cloud",
"title": "HCP Terraform",
"icon": "assets/icons/vs_code_terraform_cloud.svg"
}
],
"panel": [
{
"id": "terraform-cloud-panel",
"title": "HCP Terraform",
"icon": "assets/icons/vs_code_terraform_cloud.svg"
}
]
},
"viewsWelcome": [
{
"view": "terraform.providers",
"contents": "This view requires the language server to be enabled:\n[Open Settings](command:terraform.openSettingsJson)",
"when": "config.terraform.languageServer.enable === false"
},
{
"view": "terraform.providers",
"contents": "There are no open Terraform files. Please open a Terraform configuration file to see installed providers.",
"when": "config.terraform.languageServer.enable && terraform.providers.documentOpened === false"
},
{
"view": "terraform.providers",
"contents": "The active document is not a Terraform file. Please open a Terraform configuration file to see installed providers.",
"when": "config.terraform.languageServer.enable && terraform.providers.documentIsTerraform === false"
},
{
"view": "terraform.providers",
"contents": "There are no installed providers found for the current open file.\n[Learn more about providers](https://www.terraform.io/docs/language/providers/index.html)",
"when": "config.terraform.languageServer.enable && terraform.providers.noProviders === true"
},
{
"view": "terraform.providers",
"contents": "The active editor cannot provide information about installed providers.\n[Learn more about providers](https://www.terraform.io/docs/language/providers/index.html)",
"when": "config.terraform.languageServer.enable && terraform.providers.noResponse === true"
},
{
"view": "terraform.modules",
"contents": "This view requires the language server to be enabled:\n[Open Settings](command:terraform.openSettingsJson)",
"when": "config.terraform.languageServer.enable === false"
},
{
"view": "terraform.modules",
"contents": "There are no open Terraform files. Please open a Terraform configuration file to see installed modules.",
"when": "config.terraform.languageServer.enable && terraform.modules.documentOpened === false"
},
{
"view": "terraform.modules",
"contents": "The active document is not a Terraform file. Please open a Terraform configuration file to see installed modules.",
"when": "config.terraform.languageServer.enable && terraform.modules.documentIsTerraform === false"
},
{
"view": "terraform.modules",
"contents": "There are no installed modules found for the current open file.\n[Learn more about modules](https://www.terraform.io/docs/language/modules/develop/index.html)",
"when": "config.terraform.languageServer.enable && terraform.modules.noModules === true"
},
{
"view": "terraform.modules",
"contents": "The active editor cannot provide information about installed modules.\n[Learn more about modules](https://www.terraform.io/docs/language/modules/develop/index.html)",
"when": "config.terraform.languageServer.enable && terraform.modules.noResponse === true"
},
{
"view": "terraform.cloud.workspaces",
"contents": "In order to use HCP Terraform features, you need to be logged in\n[Login to HCP Terraform](command:terraform.cloud.login)",
"when": "terraform.cloud.signed-in === false"
},
{
"view": "terraform.cloud.workspaces",
"contents": "No organizations found for this token. Please create a new HCP Terraform organization to get started\n[Create or select an organization](command:terraform.cloud.organization.picker)",
"when": "terraform.cloud.signed-in && !terraform.cloud.organizationsExist && !terraform.cloud.organizationsChosen"
},
{
"view": "terraform.cloud.workspaces",
"contents": "Please choose an organization:\n[Select a organization](command:terraform.cloud.organization.picker)",
"when": "terraform.cloud.signed-in && terraform.cloud.organizationsExist && !terraform.cloud.organizationsChosen"
},
{
"view": "terraform.cloud.workspaces",
"contents": "There are no workspaces in this organization.\n[Create a new workspace](command:terraform.cloud.workspaces.picker)\n[Choose a different organization](command:terraform.cloud.organization.picker)",
"when": "terraform.cloud.signed-in && terraform.cloud.organizationsExist && terraform.cloud.organizationsChosen && !terraform.cloud.projectFilterUsed && !terraform.cloud.workspacesExist && !terraform.cloud.pendingOrgMembership"
},
{
"view": "terraform.cloud.workspaces",
"contents": "You have not yet accepted the invitation to this organization\n[See pending invitations](command:terraform.cloud.organization.viewInBrowser)\n[Choose a different organization](command:terraform.cloud.organization.picker)",
"when": "terraform.cloud.signed-in && terraform.cloud.organizationsExist && terraform.cloud.organizationsChosen && !terraform.cloud.projectFilterUsed && !terraform.cloud.workspacesExist && terraform.cloud.pendingOrgMembership"
},
{
"view": "terraform.cloud.workspaces",
"contents": "There are no workspaces in this project\n[Create a new workspace](command:terraform.cloud.workspaces.picker)\n[Clear filter](command:terraform.cloud.workspaces.resetProjectFilter)",
"when": "terraform.cloud.signed-in && terraform.cloud.organizationsExist && terraform.cloud.organizationsChosen && terraform.cloud.projectFilterUsed && !terraform.cloud.workspacesExist"
},
{
"view": "terraform.cloud.runs",
"contents": "Select a workspace to view a list of runs"
},
{
"view": "terraform.cloud.run.plan",
"contents": "Select a run to view a plan"
},
{
"view": "terraform.cloud.run.apply",
"contents": "Select a run to view an apply"
}
]
},
"scripts": {
"prepare": "npm run download:artifacts && cd src/test/e2e && npm install",
"compile": "webpack",
"compile:test": "tsc --project ./",
"compile:test:watch": "tsc --project ./ --watch",
"compile:prod": "webpack --mode production --devtool hidden-source-map",
"watch": "webpack --watch --mode development",
"web": "npm run compile && vscode-test-web --browserType=chromium --extensionDevelopmentPath=. ./testFixture",
"web:serve": "npx serve --cors -l 5001",
"web:tunnel": "npx localtunnel -p 5001",
"download:artifacts": "ts-node ./build/downloader.ts",
"vscode:prepublish": "npm run compile:prod",
"package": "vsce package",
"test": "npm run compile:test && vscode-test",
"lint": "eslint src --ext ts",
"prettier": "prettier \"**/*.+(js|json|ts)\"",
"format": "npm run prettier -- --write",
"check-format": "npm run prettier -- --check",
"wdio": "npm run compile && wdio run ./src/test/e2e/wdio.conf.ts",
"changelog:build": "changie batch auto && changie merge && npm i --package-lock-only"
},
"dependencies": {
"@vscode/extension-telemetry": "^0.4.9",
"@zodios/core": "^10.9.2",
"@zodios/plugins": "^10.6.0",
"axios": "^1.4.0",
"semver": "^7.5.4",
"vscode-languageclient": "^9.0.1",
"vscode-uri": "^3.0.7",
"which": "^3.0.1",
"zod": "^3.21.4"
},
"devDependencies": {
"@hashicorp/js-releases": "^1.7.1",
"@playwright/browser-chromium": "^1.39.0",
"@types/chai": "^4.3.5",
"@types/glob": "^8.1.0",
"@types/mocha": "^10.0.1",
"@types/node": "^18.x",
"@types/vscode": "~1.86",
"@types/webpack-env": "^1.18.0",
"@types/which": "^3.0.0",
"@typescript-eslint/eslint-plugin": "^5.59.5",
"@typescript-eslint/parser": "^5.59.5",
"@vscode/test-cli": "^0.0.9",
"@vscode/test-electron": "^2.3.9",
"@vscode/test-web": "^0.0.50",
"@vscode/vsce": "^2.19.0",
"@wdio/cli": "^8.38.1",
"@wdio/local-runner": "^8.38.0",
"@wdio/mocha-framework": "^8.38.0",
"@wdio/spec-reporter": "^8.38.0",
"assert": "^2.0.0",
"chai": "^4.3.7",
"esbuild-loader": "^4.1.0",
"eslint": "^8.40.0",
"eslint-config-prettier": "^8.8.0",
"eslint-plugin-prettier": "^4.2.1",
"mocha": "^10.2.0",
"msw": "^2.0.11",
"prettier": "^2.8.8",
"process": "^0.11.10",
"ts-loader": "^9.4.2",
"ts-node": "^10.9.2",
"typescript": "^5.1.3",
"wdio-vscode-service": "^6.0.3",
"wdio-wait-for": "^3.0.11",
"webpack": "^5.82.0",
"webpack-cli": "^5.1.1"
},
"__metadata": {
"id": "4a23294b-fd16-4c51-9759-da9936474cf8",
"publisherId": "a8dd781d-e144-400a-943c-417165981be2",
"publisherDisplayName": "HashiCorp",
"targetPlatform": "darwin-arm64",
"isApplicationScoped": false,
"isPreReleaseVersion": false,
"hasPreReleaseVersion": false,
"installedTimestamp": 1719534015362,
"pinned": false,
"preRelease": false,
"source": "gallery"
}
}

View File

@ -1,61 +0,0 @@
{
"For Each": {
"prefix": "fore",
"body": [
"for_each = {",
"\t${1:key} = \"${2:value}\"",
"}"
],
"description": "The for_each meta-argument accepts a map or a set of strings, and creates an instance for each item in that map or set."
},
"Module": {
"prefix": "module",
"body": [
"module \"${1:name}\" {",
"\tsource = \"$2\"",
"\t$3",
"}"
]
},
"Output": {
"prefix": "output",
"body": [
"output \"${1:name}\" {",
"\tvalue = \"$2\"",
"}"
]
},
"Provisioner": {
"prefix": "provisioner",
"body": [
"provisioner \"${1:name}\" {",
"${2}",
"}"
],
"description": "Provisioners can be used to model specific actions on the local machine or on a remote machine in order to prepare servers or other infrastructure objects for service."
},
"Empty variable": {
"prefix": "vare",
"body": [
"variable \"${1:name}\" {",
"\ttype = ${2|string,number,bool|}",
"\t${3:description = \"${4:(optional) describe your variable}\"}",
"}"
],
"description": "Variable (empty)"
},
"Map variable": {
"prefix": "varm",
"body": [
"variable \"${1:name}\" {",
"\ttype = map(${2|string,number,bool|})",
"\t${3:description = \"${4:(optional) describe your variable}\"}",
"\tdefault = {",
"\t\t${5:key1} = \"${6:val1}\"",
"\t\t${7:key2} = \"${8:val2}\"",
"\t}",
"}"
],
"description": "Variable (map)"
}
}

View File

@ -1,791 +0,0 @@
{
"scopeName": "source.hcl",
"name": "HashiCorp HCL",
"uuid": "a14187be-98d8-42c1-ac89-bb5eaccf911e",
"fileTypes": [
"hcl"
],
"patterns": [
{
"include": "#comments"
},
{
"include": "#attribute_definition"
},
{
"include": "#block"
},
{
"include": "#expressions"
}
],
"repository": {
"attribute_access": {
"begin": "\\.(?!\\*)",
"end": "[[:alpha:]][\\w-]*|\\d*",
"comment": "Matches traversal attribute access such as .attr",
"beginCaptures": {
"0": {
"name": "keyword.operator.accessor.hcl"
}
},
"endCaptures": {
"0": {
"patterns": [
{
"match": "(?!null|false|true)[[:alpha:]][\\w-]*",
"comment": "Attribute name",
"name": "variable.other.member.hcl"
},
{
"match": "\\d+",
"comment": "Optional attribute index",
"name": "constant.numeric.integer.hcl"
}
]
}
}
},
"attribute_definition": {
"name": "variable.declaration.hcl",
"match": "(\\()?(\\b(?!null\\b|false\\b|true\\b)[[:alpha:]][[:alnum:]_-]*)(\\))?\\s*(\\=(?!\\=|\\>))\\s*",
"comment": "Identifier \"=\" with optional parens",
"captures": {
"1": {
"name": "punctuation.section.parens.begin.hcl"
},
"2": {
"name": "variable.other.readwrite.hcl"
},
"3": {
"name": "punctuation.section.parens.end.hcl"
},
"4": {
"name": "keyword.operator.assignment.hcl"
}
}
},
"attribute_splat": {
"begin": "\\.",
"end": "\\*",
"comment": "Legacy attribute-only splat",
"beginCaptures": {
"0": {
"name": "keyword.operator.accessor.hcl"
}
},
"endCaptures": {
"0": {
"name": "keyword.operator.splat.hcl"
}
}
},
"block": {
"name": "meta.block.hcl",
"begin": "([\\w][\\-\\w]*)([^?{\\r\\n]*)(\\{)",
"end": "\\}",
"comment": "This will match HCL blocks like `thing1 \"one\" \"two\" {` or `thing2 {`",
"beginCaptures": {
"1": {
"patterns": [
{
"match": "\\b(?!null|false|true)[[:alpha:]][[:alnum:]_-]*\\b",
"comment": "Block type",
"name": "entity.name.type.hcl"
}
]
},
"2": {
"patterns": [
{
"match": "\\\"[^\\\"\\r\\n]*\\\"",
"comment": "Block label (String Literal)",
"name": "variable.other.enummember.hcl"
},
{
"match": "[[:alpha:]][[:alnum:]_-]*",
"comment": "Block label (Indentifier)",
"name": "variable.other.enummember.hcl"
}
]
},
"3": {
"name": "punctuation.section.block.begin.hcl"
}
},
"endCaptures": {
"0": {
"name": "punctuation.section.block.end.hcl"
}
},
"patterns": [
{
"include": "#comments"
},
{
"include": "#attribute_definition"
},
{
"include": "#expressions"
},
{
"include": "#block"
}
]
},
"block_inline_comments": {
"name": "comment.block.hcl",
"begin": "/\\*",
"end": "\\*/",
"comment": "Inline comments start with the /* sequence and end with the */ sequence, and may have any characters within except the ending sequence. An inline comment is considered equivalent to a whitespace sequence",
"captures": {
"0": {
"name": "punctuation.definition.comment.hcl"
}
}
},
"brackets": {
"begin": "\\[",
"end": "\\]",
"beginCaptures": {
"0": {
"name": "punctuation.section.brackets.begin.hcl"
}
},
"endCaptures": {
"0": {
"name": "punctuation.section.brackets.end.hcl"
}
},
"patterns": [
{
"name": "keyword.operator.splat.hcl",
"match": "\\*",
"comment": "Splat operator"
},
{
"include": "#comma"
},
{
"include": "#comments"
},
{
"include": "#inline_for_expression"
},
{
"include": "#inline_if_expression"
},
{
"include": "#expressions"
},
{
"include": "#local_identifiers"
}
]
},
"char_escapes": {
"name": "constant.character.escape.hcl",
"match": "\\\\[nrt\"\\\\]|\\\\u(\\h{8}|\\h{4})",
"comment": "Character Escapes"
},
"comma": {
"name": "punctuation.separator.hcl",
"match": "\\,",
"comment": "Commas - used in certain expressions"
},
"comments": {
"patterns": [
{
"include": "#hash_line_comments"
},
{
"include": "#double_slash_line_comments"
},
{
"include": "#block_inline_comments"
}
]
},
"double_slash_line_comments": {
"name": "comment.line.double-slash.hcl",
"begin": "//",
"end": "$\\n?",
"comment": "Line comments start with // sequence and end with the next newline sequence. A line comment is considered equivalent to a newline sequence",
"captures": {
"0": {
"name": "punctuation.definition.comment.hcl"
}
}
},
"expressions": {
"patterns": [
{
"include": "#literal_values"
},
{
"include": "#operators"
},
{
"include": "#tuple_for_expression"
},
{
"include": "#object_for_expression"
},
{
"include": "#brackets"
},
{
"include": "#objects"
},
{
"include": "#attribute_access"
},
{
"include": "#attribute_splat"
},
{
"include": "#functions"
},
{
"include": "#parens"
}
]
},
"for_expression_body": {
"patterns": [
{
"name": "keyword.operator.word.hcl",
"match": "\\bin\\b",
"comment": "in keyword"
},
{
"name": "keyword.control.conditional.hcl",
"match": "\\bif\\b",
"comment": "if keyword"
},
{
"name": "keyword.operator.hcl",
"match": "\\:"
},
{
"include": "#expressions"
},
{
"include": "#comments"
},
{
"include": "#comma"
},
{
"include": "#local_identifiers"
}
]
},
"functions": {
"name": "meta.function-call.hcl",
"begin": "([:\\-\\w]+)(\\()",
"end": "\\)",
"comment": "Built-in function calls",
"beginCaptures": {
"1": {
"patterns": [
{
"match": "\\b[[:alpha:]][\\w_-]*::([[:alpha:]][\\w_-]*::)?[[:alpha:]][\\w_-]*\\b",
"name": "support.function.namespaced.hcl"
},
{
"match": "\\b[[:alpha:]][\\w_-]*\\b",
"name": "support.function.builtin.hcl"
}
]
},
"2": {
"name": "punctuation.section.parens.begin.hcl"
}
},
"endCaptures": {
"0": {
"name": "punctuation.section.parens.end.hcl"
}
},
"patterns": [
{
"include": "#comments"
},
{
"include": "#expressions"
},
{
"include": "#comma"
}
]
},
"hash_line_comments": {
"name": "comment.line.number-sign.hcl",
"begin": "#",
"end": "$\\n?",
"comment": "Line comments start with # sequence and end with the next newline sequence. A line comment is considered equivalent to a newline sequence",
"captures": {
"0": {
"name": "punctuation.definition.comment.hcl"
}
}
},
"hcl_type_keywords": {
"name": "storage.type.hcl",
"match": "\\b(any|string|number|bool|list|set|map|tuple|object)\\b",
"comment": "Type keywords known to HCL."
},
"heredoc": {
"name": "string.unquoted.heredoc.hcl",
"begin": "(\\<\\<\\-?)\\s*(\\w+)\\s*$",
"end": "^\\s*\\2\\s*$",
"comment": "String Heredoc",
"beginCaptures": {
"1": {
"name": "keyword.operator.heredoc.hcl"
},
"2": {
"name": "keyword.control.heredoc.hcl"
}
},
"endCaptures": {
"0": {
"name": "keyword.control.heredoc.hcl"
}
},
"patterns": [
{
"include": "#string_interpolation"
}
]
},
"inline_for_expression": {
"match": "(for)\\b(.*)\\n",
"captures": {
"1": {
"name": "keyword.control.hcl"
},
"2": {
"patterns": [
{
"match": "\\=\\>",
"name": "storage.type.function.hcl"
},
{
"include": "#for_expression_body"
}
]
}
}
},
"inline_if_expression": {
"begin": "(if)\\b",
"end": "\\n",
"beginCaptures": {
"1": {
"name": "keyword.control.conditional.hcl"
}
},
"patterns": [
{
"include": "#expressions"
},
{
"include": "#comments"
},
{
"include": "#comma"
},
{
"include": "#local_identifiers"
}
]
},
"language_constants": {
"name": "constant.language.hcl",
"match": "\\b(true|false|null)\\b",
"comment": "Language Constants"
},
"literal_values": {
"patterns": [
{
"include": "#numeric_literals"
},
{
"include": "#language_constants"
},
{
"include": "#string_literals"
},
{
"include": "#heredoc"
},
{
"include": "#hcl_type_keywords"
}
]
},
"local_identifiers": {
"name": "variable.other.readwrite.hcl",
"match": "\\b(?!null|false|true)[[:alpha:]][[:alnum:]_-]*\\b",
"comment": "Local Identifiers"
},
"numeric_literals": {
"patterns": [
{
"name": "constant.numeric.float.hcl",
"match": "\\b\\d+([Ee][+-]?)\\d+\\b",
"comment": "Integer, no fraction, optional exponent",
"captures": {
"1": {
"name": "punctuation.separator.exponent.hcl"
}
}
},
{
"name": "constant.numeric.float.hcl",
"match": "\\b\\d+(\\.)\\d+(?:([Ee][+-]?)\\d+)?\\b",
"comment": "Integer, fraction, optional exponent",
"captures": {
"1": {
"name": "punctuation.separator.decimal.hcl"
},
"2": {
"name": "punctuation.separator.exponent.hcl"
}
}
},
{
"name": "constant.numeric.integer.hcl",
"match": "\\b\\d+\\b",
"comment": "Integers"
}
]
},
"object_for_expression": {
"begin": "(\\{)\\s?(for)\\b",
"end": "\\}",
"beginCaptures": {
"1": {
"name": "punctuation.section.braces.begin.hcl"
},
"2": {
"name": "keyword.control.hcl"
}
},
"endCaptures": {
"0": {
"name": "punctuation.section.braces.end.hcl"
}
},
"patterns": [
{
"name": "storage.type.function.hcl",
"match": "\\=\\>"
},
{
"include": "#for_expression_body"
}
]
},
"object_key_values": {
"patterns": [
{
"include": "#comments"
},
{
"include": "#literal_values"
},
{
"include": "#operators"
},
{
"include": "#tuple_for_expression"
},
{
"include": "#object_for_expression"
},
{
"include": "#heredoc"
},
{
"include": "#functions"
}
]
},
"objects": {
"name": "meta.braces.hcl",
"begin": "\\{",
"end": "\\}",
"beginCaptures": {
"0": {
"name": "punctuation.section.braces.begin.hcl"
}
},
"endCaptures": {
"0": {
"name": "punctuation.section.braces.end.hcl"
}
},
"patterns": [
{
"include": "#comments"
},
{
"include": "#objects"
},
{
"include": "#inline_for_expression"
},
{
"include": "#inline_if_expression"
},
{
"match": "\\b((?!null|false|true)[[:alpha:]][[:alnum:]_-]*)\\s*(\\=(?!=))\\s*",
"comment": "Literal, named object key",
"captures": {
"1": {
"name": "meta.mapping.key.hcl variable.other.readwrite.hcl"
},
"2": {
"name": "keyword.operator.assignment.hcl"
}
}
},
{
"match": "^\\s*((\").*(\"))\\s*(\\=)\\s*",
"comment": "String object key",
"captures": {
"1": {
"name": "meta.mapping.key.hcl string.quoted.double.hcl"
},
"2": {
"name": "punctuation.definition.string.begin.hcl"
},
"3": {
"name": "punctuation.definition.string.end.hcl"
},
"4": {
"name": "keyword.operator.hcl"
}
}
},
{
"name": "meta.mapping.key.hcl",
"begin": "^\\s*\\(",
"end": "(\\))\\s*(=|:)\\s*",
"comment": "Computed object key (any expression between parens)",
"beginCaptures": {
"0": {
"name": "punctuation.section.parens.begin.hcl"
}
},
"endCaptures": {
"1": {
"name": "punctuation.section.parens.end.hcl"
},
"2": {
"name": "keyword.operator.hcl"
}
},
"patterns": [
{
"include": "#attribute_access"
},
{
"include": "#attribute_splat"
}
]
},
{
"include": "#object_key_values"
}
]
},
"operators": {
"patterns": [
{
"name": "keyword.operator.hcl",
"match": "\\>\\="
},
{
"name": "keyword.operator.hcl",
"match": "\\<\\="
},
{
"name": "keyword.operator.hcl",
"match": "\\=\\="
},
{
"name": "keyword.operator.hcl",
"match": "\\!\\="
},
{
"name": "keyword.operator.arithmetic.hcl",
"match": "\\+"
},
{
"name": "keyword.operator.arithmetic.hcl",
"match": "\\-"
},
{
"name": "keyword.operator.arithmetic.hcl",
"match": "\\*"
},
{
"name": "keyword.operator.arithmetic.hcl",
"match": "\\/"
},
{
"name": "keyword.operator.arithmetic.hcl",
"match": "\\%"
},
{
"name": "keyword.operator.logical.hcl",
"match": "\\&\\&"
},
{
"name": "keyword.operator.logical.hcl",
"match": "\\|\\|"
},
{
"name": "keyword.operator.logical.hcl",
"match": "\\!"
},
{
"name": "keyword.operator.hcl",
"match": "\\>"
},
{
"name": "keyword.operator.hcl",
"match": "\\<"
},
{
"name": "keyword.operator.hcl",
"match": "\\?"
},
{
"name": "keyword.operator.hcl",
"match": "\\.\\.\\."
},
{
"name": "keyword.operator.hcl",
"match": "\\:"
},
{
"name": "keyword.operator.hcl",
"match": "\\=\\>"
}
]
},
"parens": {
"begin": "\\(",
"end": "\\)",
"comment": "Parens - matched *after* function syntax",
"beginCaptures": {
"0": {
"name": "punctuation.section.parens.begin.hcl"
}
},
"endCaptures": {
"0": {
"name": "punctuation.section.parens.end.hcl"
}
},
"patterns": [
{
"include": "#comments"
},
{
"include": "#expressions"
}
]
},
"string_interpolation": {
"name": "meta.interpolation.hcl",
"begin": "(?<![%$])([%$]{)",
"end": "\\}",
"comment": "String interpolation",
"beginCaptures": {
"1": {
"name": "keyword.other.interpolation.begin.hcl"
}
},
"endCaptures": {
"0": {
"name": "keyword.other.interpolation.end.hcl"
}
},
"patterns": [
{
"name": "keyword.operator.template.left.trim.hcl",
"match": "\\~\\s",
"comment": "Trim left whitespace"
},
{
"name": "keyword.operator.template.right.trim.hcl",
"match": "\\s\\~",
"comment": "Trim right whitespace"
},
{
"name": "keyword.control.hcl",
"match": "\\b(if|else|endif|for|in|endfor)\\b",
"comment": "if/else/endif and for/in/endfor directives"
},
{
"include": "#expressions"
},
{
"include": "#local_identifiers"
}
]
},
"string_literals": {
"name": "string.quoted.double.hcl",
"begin": "\"",
"end": "\"",
"comment": "Strings",
"beginCaptures": {
"0": {
"name": "punctuation.definition.string.begin.hcl"
}
},
"endCaptures": {
"0": {
"name": "punctuation.definition.string.end.hcl"
}
},
"patterns": [
{
"include": "#string_interpolation"
},
{
"include": "#char_escapes"
}
]
},
"tuple_for_expression": {
"begin": "(\\[)\\s?(for)\\b",
"end": "\\]",
"beginCaptures": {
"1": {
"name": "punctuation.section.brackets.begin.hcl"
},
"2": {
"name": "keyword.control.hcl"
}
},
"endCaptures": {
"0": {
"name": "punctuation.section.brackets.end.hcl"
}
},
"patterns": [
{
"include": "#for_expression_body"
}
]
}
}
}

View File

@ -1,813 +0,0 @@
{
"scopeName": "source.hcl.terraform",
"name": "HashiCorp Terraform",
"uuid": "d9db10d3-db70-48aa-8d44-f96ccbaa29f3",
"fileTypes": [
"tf",
"tfvars"
],
"patterns": [
{
"include": "#comments"
},
{
"include": "#attribute_definition"
},
{
"include": "#block"
},
{
"include": "#expressions"
}
],
"repository": {
"attribute_access": {
"begin": "\\.(?!\\*)",
"end": "[[:alpha:]][\\w-]*|\\d*",
"comment": "Matches traversal attribute access such as .attr",
"beginCaptures": {
"0": {
"name": "keyword.operator.accessor.hcl"
}
},
"endCaptures": {
"0": {
"patterns": [
{
"match": "(?!null|false|true)[[:alpha:]][\\w-]*",
"comment": "Attribute name",
"name": "variable.other.member.hcl"
},
{
"match": "\\d+",
"comment": "Optional attribute index",
"name": "constant.numeric.integer.hcl"
}
]
}
}
},
"attribute_definition": {
"name": "variable.declaration.hcl",
"match": "(\\()?(\\b(?!null\\b|false\\b|true\\b)[[:alpha:]][[:alnum:]_-]*)(\\))?\\s*(\\=(?!\\=|\\>))\\s*",
"comment": "Identifier \"=\" with optional parens",
"captures": {
"1": {
"name": "punctuation.section.parens.begin.hcl"
},
"2": {
"name": "variable.other.readwrite.hcl"
},
"3": {
"name": "punctuation.section.parens.end.hcl"
},
"4": {
"name": "keyword.operator.assignment.hcl"
}
}
},
"attribute_splat": {
"begin": "\\.",
"end": "\\*",
"comment": "Legacy attribute-only splat",
"beginCaptures": {
"0": {
"name": "keyword.operator.accessor.hcl"
}
},
"endCaptures": {
"0": {
"name": "keyword.operator.splat.hcl"
}
}
},
"block": {
"name": "meta.block.hcl",
"begin": "([\\w][\\-\\w]*)([\\s\\\"\\-\\w]*)(\\{)",
"end": "\\}",
"comment": "This will match Terraform blocks like `resource \"aws_instance\" \"web\" {` or `module {`",
"beginCaptures": {
"1": {
"patterns": [
{
"match": "\\bdata|check|import|locals|module|output|provider|resource|terraform|variable\\b",
"comment": "Known block type",
"name": "entity.name.type.terraform"
},
{
"match": "\\b(?!null|false|true)[[:alpha:]][[:alnum:]_-]*\\b",
"comment": "Unknown block type",
"name": "entity.name.type.hcl"
}
]
},
"2": {
"patterns": [
{
"match": "[\\\"\\-\\w]+",
"comment": "Block label",
"name": "variable.other.enummember.hcl"
}
]
},
"3": {
"name": "punctuation.section.block.begin.hcl"
}
},
"endCaptures": {
"0": {
"name": "punctuation.section.block.end.hcl"
}
},
"patterns": [
{
"include": "#comments"
},
{
"include": "#attribute_definition"
},
{
"include": "#block"
},
{
"include": "#expressions"
}
]
},
"block_inline_comments": {
"name": "comment.block.hcl",
"begin": "/\\*",
"end": "\\*/",
"comment": "Inline comments start with the /* sequence and end with the */ sequence, and may have any characters within except the ending sequence. An inline comment is considered equivalent to a whitespace sequence",
"captures": {
"0": {
"name": "punctuation.definition.comment.hcl"
}
}
},
"brackets": {
"begin": "\\[",
"end": "\\]",
"beginCaptures": {
"0": {
"name": "punctuation.section.brackets.begin.hcl"
}
},
"endCaptures": {
"0": {
"name": "punctuation.section.brackets.end.hcl"
}
},
"patterns": [
{
"name": "keyword.operator.splat.hcl",
"match": "\\*",
"comment": "Splat operator"
},
{
"include": "#comma"
},
{
"include": "#comments"
},
{
"include": "#inline_for_expression"
},
{
"include": "#inline_if_expression"
},
{
"include": "#expressions"
},
{
"include": "#local_identifiers"
}
]
},
"char_escapes": {
"name": "constant.character.escape.hcl",
"match": "\\\\[nrt\"\\\\]|\\\\u(\\h{8}|\\h{4})",
"comment": "Character Escapes"
},
"comma": {
"name": "punctuation.separator.hcl",
"match": "\\,",
"comment": "Commas - used in certain expressions"
},
"comments": {
"patterns": [
{
"include": "#hash_line_comments"
},
{
"include": "#double_slash_line_comments"
},
{
"include": "#block_inline_comments"
}
]
},
"double_slash_line_comments": {
"name": "comment.line.double-slash.hcl",
"begin": "//",
"end": "$\\n?",
"comment": "Line comments start with // sequence and end with the next newline sequence. A line comment is considered equivalent to a newline sequence",
"captures": {
"0": {
"name": "punctuation.definition.comment.hcl"
}
}
},
"expressions": {
"patterns": [
{
"include": "#literal_values"
},
{
"include": "#operators"
},
{
"include": "#tuple_for_expression"
},
{
"include": "#object_for_expression"
},
{
"include": "#brackets"
},
{
"include": "#objects"
},
{
"include": "#attribute_access"
},
{
"include": "#attribute_splat"
},
{
"include": "#functions"
},
{
"include": "#parens"
}
]
},
"for_expression_body": {
"patterns": [
{
"name": "keyword.operator.word.hcl",
"match": "\\bin\\b",
"comment": "in keyword"
},
{
"name": "keyword.control.conditional.hcl",
"match": "\\bif\\b",
"comment": "if keyword"
},
{
"name": "keyword.operator.hcl",
"match": "\\:"
},
{
"include": "#expressions"
},
{
"include": "#comments"
},
{
"include": "#comma"
},
{
"include": "#local_identifiers"
}
]
},
"functions": {
"name": "meta.function-call.hcl",
"begin": "([:\\-\\w]+)(\\()",
"end": "\\)",
"comment": "Built-in function calls",
"beginCaptures": {
"1": {
"patterns": [
{
"match": "\\b(core::)?(abs|abspath|alltrue|anytrue|base64decode|base64encode|base64gzip|base64sha256|base64sha512|basename|bcrypt|can|ceil|chomp|chunklist|cidrhost|cidrnetmask|cidrsubnet|cidrsubnets|coalesce|coalescelist|compact|concat|contains|csvdecode|dirname|distinct|element|endswith|file|filebase64|filebase64sha256|filebase64sha512|fileexists|filemd5|fileset|filesha1|filesha256|filesha512|flatten|floor|format|formatdate|formatlist|indent|index|join|jsondecode|jsonencode|keys|length|log|lookup|lower|matchkeys|max|md5|merge|min|nonsensitive|one|parseint|pathexpand|plantimestamp|pow|range|regex|regexall|replace|reverse|rsadecrypt|sensitive|setintersection|setproduct|setsubtract|setunion|sha1|sha256|sha512|signum|slice|sort|split|startswith|strcontains|strrev|substr|sum|templatefile|textdecodebase64|textencodebase64|timeadd|timecmp|timestamp|title|tobool|tolist|tomap|tonumber|toset|tostring|transpose|trim|trimprefix|trimspace|trimsuffix|try|upper|urlencode|uuid|uuidv5|values|yamldecode|yamlencode|zipmap)\\b",
"name": "support.function.builtin.terraform"
},
{
"match": "\\bprovider::[[:alpha:]][\\w_-]*::[[:alpha:]][\\w_-]*\\b",
"name": "support.function.provider.terraform"
}
]
},
"2": {
"name": "punctuation.section.parens.begin.hcl"
}
},
"endCaptures": {
"0": {
"name": "punctuation.section.parens.end.hcl"
}
},
"patterns": [
{
"include": "#comments"
},
{
"include": "#expressions"
},
{
"include": "#comma"
}
]
},
"hash_line_comments": {
"name": "comment.line.number-sign.hcl",
"begin": "#",
"end": "$\\n?",
"comment": "Line comments start with # sequence and end with the next newline sequence. A line comment is considered equivalent to a newline sequence",
"captures": {
"0": {
"name": "punctuation.definition.comment.hcl"
}
}
},
"hcl_type_keywords": {
"name": "storage.type.hcl",
"match": "\\b(any|string|number|bool|list|set|map|tuple|object)\\b",
"comment": "Type keywords known to HCL."
},
"heredoc": {
"name": "string.unquoted.heredoc.hcl",
"begin": "(\\<\\<\\-?)\\s*(\\w+)\\s*$",
"end": "^\\s*\\2\\s*$",
"comment": "String Heredoc",
"beginCaptures": {
"1": {
"name": "keyword.operator.heredoc.hcl"
},
"2": {
"name": "keyword.control.heredoc.hcl"
}
},
"endCaptures": {
"0": {
"name": "keyword.control.heredoc.hcl"
}
},
"patterns": [
{
"include": "#string_interpolation"
}
]
},
"inline_for_expression": {
"match": "(for)\\b(.*)\\n",
"captures": {
"1": {
"name": "keyword.control.hcl"
},
"2": {
"patterns": [
{
"match": "\\=\\>",
"name": "storage.type.function.hcl"
},
{
"include": "#for_expression_body"
}
]
}
}
},
"inline_if_expression": {
"begin": "(if)\\b",
"end": "\\n",
"beginCaptures": {
"1": {
"name": "keyword.control.conditional.hcl"
}
},
"patterns": [
{
"include": "#expressions"
},
{
"include": "#comments"
},
{
"include": "#comma"
},
{
"include": "#local_identifiers"
}
]
},
"language_constants": {
"name": "constant.language.hcl",
"match": "\\b(true|false|null)\\b",
"comment": "Language Constants"
},
"literal_values": {
"patterns": [
{
"include": "#numeric_literals"
},
{
"include": "#language_constants"
},
{
"include": "#string_literals"
},
{
"include": "#heredoc"
},
{
"include": "#hcl_type_keywords"
},
{
"include": "#named_value_references"
}
]
},
"local_identifiers": {
"name": "variable.other.readwrite.hcl",
"match": "\\b(?!null|false|true)[[:alpha:]][[:alnum:]_-]*\\b",
"comment": "Local Identifiers"
},
"named_value_references": {
"name": "variable.other.readwrite.terraform",
"match": "\\b(var|local|module|data|path|terraform)\\b",
"comment": "Constant values available only to Terraform."
},
"numeric_literals": {
"patterns": [
{
"name": "constant.numeric.float.hcl",
"match": "\\b\\d+([Ee][+-]?)\\d+\\b",
"comment": "Integer, no fraction, optional exponent",
"captures": {
"1": {
"name": "punctuation.separator.exponent.hcl"
}
}
},
{
"name": "constant.numeric.float.hcl",
"match": "\\b\\d+(\\.)\\d+(?:([Ee][+-]?)\\d+)?\\b",
"comment": "Integer, fraction, optional exponent",
"captures": {
"1": {
"name": "punctuation.separator.decimal.hcl"
},
"2": {
"name": "punctuation.separator.exponent.hcl"
}
}
},
{
"name": "constant.numeric.integer.hcl",
"match": "\\b\\d+\\b",
"comment": "Integers"
}
]
},
"object_for_expression": {
"begin": "(\\{)\\s?(for)\\b",
"end": "\\}",
"beginCaptures": {
"1": {
"name": "punctuation.section.braces.begin.hcl"
},
"2": {
"name": "keyword.control.hcl"
}
},
"endCaptures": {
"0": {
"name": "punctuation.section.braces.end.hcl"
}
},
"patterns": [
{
"name": "storage.type.function.hcl",
"match": "\\=\\>"
},
{
"include": "#for_expression_body"
}
]
},
"object_key_values": {
"patterns": [
{
"include": "#comments"
},
{
"include": "#literal_values"
},
{
"include": "#operators"
},
{
"include": "#tuple_for_expression"
},
{
"include": "#object_for_expression"
},
{
"include": "#heredoc"
},
{
"include": "#functions"
}
]
},
"objects": {
"name": "meta.braces.hcl",
"begin": "\\{",
"end": "\\}",
"beginCaptures": {
"0": {
"name": "punctuation.section.braces.begin.hcl"
}
},
"endCaptures": {
"0": {
"name": "punctuation.section.braces.end.hcl"
}
},
"patterns": [
{
"include": "#comments"
},
{
"include": "#objects"
},
{
"include": "#inline_for_expression"
},
{
"include": "#inline_if_expression"
},
{
"match": "\\b((?!null|false|true)[[:alpha:]][[:alnum:]_-]*)\\s*(\\=\\>?)\\s*",
"comment": "Literal, named object key",
"captures": {
"1": {
"name": "meta.mapping.key.hcl variable.other.readwrite.hcl"
},
"2": {
"name": "keyword.operator.assignment.hcl",
"patterns": [
{
"match": "\\=\\>",
"name": "storage.type.function.hcl"
}
]
}
}
},
{
"match": "\\b((\").*(\"))\\s*(\\=)\\s*",
"comment": "String object key",
"captures": {
"0": {
"patterns": [
{
"include": "#named_value_references"
}
]
},
"1": {
"name": "meta.mapping.key.hcl string.quoted.double.hcl"
},
"2": {
"name": "punctuation.definition.string.begin.hcl"
},
"3": {
"name": "punctuation.definition.string.end.hcl"
},
"4": {
"name": "keyword.operator.hcl"
}
}
},
{
"name": "meta.mapping.key.hcl",
"begin": "^\\s*\\(",
"end": "(\\))\\s*(=|:)\\s*",
"comment": "Computed object key (any expression between parens)",
"beginCaptures": {
"0": {
"name": "punctuation.section.parens.begin.hcl"
}
},
"endCaptures": {
"1": {
"name": "punctuation.section.parens.end.hcl"
},
"2": {
"name": "keyword.operator.hcl"
}
},
"patterns": [
{
"include": "#named_value_references"
},
{
"include": "#attribute_access"
}
]
},
{
"include": "#object_key_values"
}
]
},
"operators": {
"patterns": [
{
"name": "keyword.operator.hcl",
"match": "\\>\\="
},
{
"name": "keyword.operator.hcl",
"match": "\\<\\="
},
{
"name": "keyword.operator.hcl",
"match": "\\=\\="
},
{
"name": "keyword.operator.hcl",
"match": "\\!\\="
},
{
"name": "keyword.operator.arithmetic.hcl",
"match": "\\+"
},
{
"name": "keyword.operator.arithmetic.hcl",
"match": "\\-"
},
{
"name": "keyword.operator.arithmetic.hcl",
"match": "\\*"
},
{
"name": "keyword.operator.arithmetic.hcl",
"match": "\\/"
},
{
"name": "keyword.operator.arithmetic.hcl",
"match": "\\%"
},
{
"name": "keyword.operator.logical.hcl",
"match": "\\&\\&"
},
{
"name": "keyword.operator.logical.hcl",
"match": "\\|\\|"
},
{
"name": "keyword.operator.logical.hcl",
"match": "\\!"
},
{
"name": "keyword.operator.hcl",
"match": "\\>"
},
{
"name": "keyword.operator.hcl",
"match": "\\<"
},
{
"name": "keyword.operator.hcl",
"match": "\\?"
},
{
"name": "keyword.operator.hcl",
"match": "\\.\\.\\."
},
{
"name": "keyword.operator.hcl",
"match": "\\:"
},
{
"name": "keyword.operator.hcl",
"match": "\\=\\>"
}
]
},
"parens": {
"begin": "\\(",
"end": "\\)",
"comment": "Parens - matched *after* function syntax",
"beginCaptures": {
"0": {
"name": "punctuation.section.parens.begin.hcl"
}
},
"endCaptures": {
"0": {
"name": "punctuation.section.parens.end.hcl"
}
},
"patterns": [
{
"include": "#comments"
},
{
"include": "#expressions"
}
]
},
"string_interpolation": {
"name": "meta.interpolation.hcl",
"begin": "(?<![%$])([%$]{)",
"end": "\\}",
"comment": "String interpolation",
"beginCaptures": {
"1": {
"name": "keyword.other.interpolation.begin.hcl"
}
},
"endCaptures": {
"0": {
"name": "keyword.other.interpolation.end.hcl"
}
},
"patterns": [
{
"name": "keyword.operator.template.left.trim.hcl",
"match": "\\~\\s",
"comment": "Trim left whitespace"
},
{
"name": "keyword.operator.template.right.trim.hcl",
"match": "\\s\\~",
"comment": "Trim right whitespace"
},
{
"name": "keyword.control.hcl",
"match": "\\b(if|else|endif|for|in|endfor)\\b",
"comment": "if/else/endif and for/in/endfor directives"
},
{
"include": "#expressions"
},
{
"include": "#local_identifiers"
}
]
},
"string_literals": {
"name": "string.quoted.double.hcl",
"begin": "\"",
"end": "\"",
"comment": "Strings",
"beginCaptures": {
"0": {
"name": "punctuation.definition.string.begin.hcl"
}
},
"endCaptures": {
"0": {
"name": "punctuation.definition.string.end.hcl"
}
},
"patterns": [
{
"include": "#string_interpolation"
},
{
"include": "#char_escapes"
}
]
},
"tuple_for_expression": {
"begin": "(\\[)\\s?(for)\\b",
"end": "\\]",
"beginCaptures": {
"1": {
"name": "punctuation.section.brackets.begin.hcl"
},
"2": {
"name": "keyword.control.hcl"
}
},
"endCaptures": {
"0": {
"name": "punctuation.section.brackets.end.hcl"
}
},
"patterns": [
{
"include": "#for_expression_body"
}
]
}
}
}

View File

@ -1,14 +0,0 @@
[flake8]
ignore = W,BLK,
E24,E121,E123,E125,E126,E221,E226,E266,E704,
E265,E722,E501,E731,E306,E401,E302,E222,E303,
E402,E305,E261,E262,E203
exclude =
__pycache__,
.eggs,
.git,
.tox,
.nox,
build,
dist,
src/test/python_tests/test_data

View File

@ -1,45 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<PackageManifest Version="2.0.0" xmlns="http://schemas.microsoft.com/developer/vsx-schema/2011" xmlns:d="http://schemas.microsoft.com/developer/vsx-schema-design/2011">
<Metadata>
<Identity Language="en-US" Id="debugpy" Version="2024.6.0" Publisher="ms-python" TargetPlatform="darwin-arm64"/>
<DisplayName>Python Debugger</DisplayName>
<Description xml:space="preserve">Python Debugger extension using debugpy.</Description>
<Tags>python,debugger,debugpy,debuggers</Tags>
<Categories>Debuggers</Categories>
<GalleryFlags>Public</GalleryFlags>
<Properties>
<Property Id="Microsoft.VisualStudio.Code.Engine" Value="^1.87.0" />
<Property Id="Microsoft.VisualStudio.Code.ExtensionDependencies" Value="ms-python.python" />
<Property Id="Microsoft.VisualStudio.Code.ExtensionPack" Value="" />
<Property Id="Microsoft.VisualStudio.Code.ExtensionKind" Value="workspace" />
<Property Id="Microsoft.VisualStudio.Code.LocalizedLanguages" Value="" />
<Property Id="Microsoft.VisualStudio.Services.Links.Source" Value="https://github.com/microsoft/vscode-python-debugger.git" />
<Property Id="Microsoft.VisualStudio.Services.Links.Getstarted" Value="https://github.com/microsoft/vscode-python-debugger.git" />
<Property Id="Microsoft.VisualStudio.Services.Links.GitHub" Value="https://github.com/microsoft/vscode-python-debugger.git" />
<Property Id="Microsoft.VisualStudio.Services.Links.Support" Value="https://github.com/Microsoft/vscode-python-debugger/issues" />
<Property Id="Microsoft.VisualStudio.Services.Links.Learn" Value="https://github.com/Microsoft/vscode-python-debugger" />
<Property Id="Microsoft.VisualStudio.Services.GitHubFlavoredMarkdown" Value="true" />
<Property Id="Microsoft.VisualStudio.Services.Content.Pricing" Value="Free"/>
</Properties>
<License>extension/LICENSE.txt</License>
<Icon>extension/icon.png</Icon>
</Metadata>
<Installation>
<InstallationTarget Id="Microsoft.VisualStudio.Code"/>
</Installation>
<Dependencies/>
<Assets>
<Asset Type="Microsoft.VisualStudio.Code.Manifest" Path="extension/package.json" Addressable="true" />
<Asset Type="Microsoft.VisualStudio.Services.Content.Details" Path="extension/README.md" Addressable="true" />
<Asset Type="Microsoft.VisualStudio.Services.Content.License" Path="extension/LICENSE.txt" Addressable="true" />
<Asset Type="Microsoft.VisualStudio.Services.Icons.Default" Path="extension/icon.png" Addressable="true" />
</Assets>
</PackageManifest>

View File

@ -1,21 +0,0 @@
MIT License
Copyright (c) Microsoft Corporation.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE

View File

@ -1,55 +0,0 @@
# Python Debugger extension for Visual Studio Code
A [Visual Studio Code](https://code.visualstudio.com/) [extension](https://marketplace.visualstudio.com/VSCode) that supports Python debugging with debugpy. Python Debugger provides a seamless debugging experience by allowing you to set breakpoints, step through code, inspect variables, and perform other essential debugging tasks. The debugpy extension offers debugging support for various types of Python applications including scripts, web applications, remote processes, and multi-threaded processes.
Note:
- The Python extension offers the python debugger extension as an optional installation, including it during the setup process.
- This extension is supported for all [actively supported versions](https://devguide.python.org/#status-of-python-branches) of the Python language (i.e., Python >= 3.7).
## Purpose
The main intent of this extension is to offer:
1. **Independence and Compatibility:** The Python Debugger extension aims to separate the debugging functionality from the main Python extension to prevent compatibility issues. This ensures that even as the Python extension drops support for older Python versions (e.g., Python 3.7), you can continue debugging projects with those versions without downgrading your Python extension. This allows you to access new features and bug fixes while keeping your debugging capabilities intact.
2. **Platform-Specific Builds:** Unlike the main Python extension, which bundles all debugpy builds for various platforms into a single extension package, the Python Debugger extension provides a more streamlined approach: it delivers platform-specific builds, ensuring you only receive the components relevant to your specific operating system. This reduces download times and unnecessary overhead.
3. **Feature Parity and Ongoing Updates:** This extension replicates all the functionality available in the main Python extension, and more. Going forward, any new debugger features will be added to this extension. In the future, the Python extension will no longer offer debugging support on its own, and we will transition all debugging support to this extension for all debugging functionality.
## Usage
Once installed in Visual Studio Code, python-debugger will be automatically activated when you open a Python file.
## Disabling the Python Debugger extension
If you want to disable the Python Debugger extension, you can [disable this extension](https://code.visualstudio.com/docs/editor/extension-marketplace#_disable-an-extension) per workspace in Visual Studio Code.
## Commands
| Command | Description |
| ---------------------- | --------------------------------- |
| Python Debugger: viewOutput | Show the Python Debugger Extension output. |
| Python Debugger: clearCacheAndReload | Allows you to clear the global values set in the extension. |
| Python Debugger: debugInTerminal | Allows you to debug a simple Python file in the terminal. |
## Limited support for deprecated Python versions
Older versions of the Python Debugger extension are available for debugging Python projects that use outdated Python versions like Python 2.7 and Python 3.6. However, its important to note that our team is no longer maintaining these extension versions. We strongly advise you to update your project to a supported Python version if possible.
You can reference the table below to find the most recent Python Debugger extension version that offers debugging support for projects using deprecated Python versions, as well as the debugpy version that is shipped in each extension version.
> **Note**: If you do not see older extension versions to install (<=`2024.0.0`), try opting-in to pre-releases. You can do so on the extension page by clicking `Switch to Pre-Release Version`.
| Python version | Latest supported Python Debugger extension version | debugpy version |
| -------------- | -------------------------------------------------- | ---------------- |
| 2.7, >= 3.5 | 2023.1.XXX | 1.5.1 |
| >= 3.7 | 2024.0.XXX | 1.7.0 |
| >= 3.8 | 2024.2.XXX | 1.8.1 |
> **Note**: Once you install an older version of the Python Debugger extension in VS Code, you may want to disable auto update by changing the value of the `"extensions.autoUpdate"` setting in your `settings.json` file.
## Data and telemetry
The Debugpy Extension for Visual Studio Code collects usage data and sends it to Microsoft to help improve our products and services. Read our [privacy statement](https://privacy.microsoft.com/privacystatement) to learn more. This extension respects the `telemetry.enableTelemetry` setting which you can learn more about at https://code.visualstudio.com/docs/supporting/faq#_how-to-disable-telemetry-reporting.

View File

@ -1,41 +0,0 @@
<!-- BEGIN MICROSOFT SECURITY.MD V0.0.8 BLOCK -->
## Security
Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/).
If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://aka.ms/opensource/security/definition), please report it to us as described below.
## Reporting Security Issues
**Please do not report security vulnerabilities through public GitHub issues.**
Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://aka.ms/opensource/security/create-report).
If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://aka.ms/opensource/security/pgpkey).
You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://aka.ms/opensource/security/msrc).
Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue:
* Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.)
* Full paths of source file(s) related to the manifestation of the issue
* The location of the affected source code (tag/branch/commit or direct URL)
* Any special configuration required to reproduce the issue
* Step-by-step instructions to reproduce the issue
* Proof-of-concept or exploit code (if possible)
* Impact of the issue, including how an attacker might exploit the issue
This information will help us triage your report more quickly.
If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://aka.ms/opensource/security/bounty) page for more details about our active programs.
## Preferred Languages
We prefer all communications to be in English.
## Policy
Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://aka.ms/opensource/security/cvd).
<!-- END MICROSOFT SECURITY.MD BLOCK -->

View File

@ -1,25 +0,0 @@
# TODO: The maintainer of this repo has not yet edited this file
**REPO OWNER**: Do you want Customer Service & Support (CSS) support for this product/project?
- **No CSS support:** Fill out this template with information about how to file issues and get help.
- **Yes CSS support:** Fill out an intake form at [aka.ms/onboardsupport](https://aka.ms/onboardsupport). CSS will work with/help you to determine next steps.
- **Not sure?** Fill out an intake as though the answer were "Yes". CSS will help you decide.
*Then remove this first heading from this SUPPORT.MD file before publishing your repo.*
# Support
## How to file issues and get help
This project uses GitHub Issues to track bugs and feature requests. Please search the existing
issues before filing new issues to avoid duplicates. For new issues, file your bug or
feature request as a new Issue.
For help and questions about using this project, please **REPO MAINTAINER: INSERT INSTRUCTIONS HERE
FOR HOW TO ENGAGE REPO OWNERS OR COMMUNITY FOR HELP. COULD BE A STACK OVERFLOW TAG OR OTHER
CHANNEL. WHERE WILL YOU HELP PEOPLE?**.
## Microsoft Support Policy
Support for this **PROJECT or PRODUCT** is limited to the resources listed above.

View File

@ -1,24 +0,0 @@
debugpy
Copyright (c) Microsoft Corporation
All rights reserved.
MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -1,27 +0,0 @@
Metadata-Version: 2.1
Name: debugpy
Version: 1.8.1
Summary: An implementation of the Debug Adapter Protocol for Python
Home-page: https://aka.ms/debugpy
Author: Microsoft Corporation
Author-email: ptvshelp@microsoft.com
License: MIT
Project-URL: Source, https://github.com/microsoft/debugpy
Classifier: Development Status :: 5 - Production/Stable
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Topic :: Software Development :: Debuggers
Classifier: Operating System :: Microsoft :: Windows
Classifier: Operating System :: MacOS
Classifier: Operating System :: POSIX
Classifier: License :: OSI Approved :: MIT License
Requires-Python: >=3.8
Description-Content-Type: text/markdown
License-File: LICENSE
debugpy is an implementation of the Debug Adapter Protocol for Python.
The source code and the issue tracker is [hosted on GitHub](https://github.com/microsoft/debugpy/).

View File

@ -1,289 +0,0 @@
debugpy/ThirdPartyNotices.txt,sha256=WzmT853BlHOtkEiV_xtk96iazYeVDKWB2tqKMtcm_jo,34345
debugpy/__init__.py,sha256=B621TRbcw1Pr4LrqSZB8Qr0CW9QfpbpcEz2eaN16jns,1018
debugpy/__main__.py,sha256=feuxCZgFCWXu9rVRgySgWvbrS_HECrm_BvaSG8VPdDc,1829
debugpy/_version.py,sha256=Og6fOzAgJgUMiP5eMIOBvYWbxh7LSapJZiAq2O66fMs,497
debugpy/public_api.py,sha256=17qhti1Y4dOR8YSM6y-kKyztjk7TSLLUvlvkxlf0KHs,6320
debugpy/_vendored/__init__.py,sha256=cQGcZObOjPcKFDQk06oWNgqBHh2rXDCv0JTNISv3_rg,3878
debugpy/_vendored/_pydevd_packaging.py,sha256=cYo9maxM8jNPj-vdvCtweaYAPX210Pg8-NHS8ZU02Mg,1245
debugpy/_vendored/_util.py,sha256=E5k-21l2RXQHxl0C5YF5ZmZr-Fzgd9eNl1c6UYYzjfg,1840
debugpy/_vendored/force_pydevd.py,sha256=spQVMTUrxCRoNiSKKIkK96AhEi0JX6vHxcBIJNcNQE0,3172
debugpy/_vendored/pydevd/pydev_app_engine_debug_startup.py,sha256=-gA1UJ8pRY3VE8bRc7JhWRmxlRanQ8QG3724O5ioeKA,691
debugpy/_vendored/pydevd/pydev_coverage.py,sha256=qmd5XNE8Hwtem9m5eDtwbVIxi6U9XvtXIcur2xDP2Uk,3200
debugpy/_vendored/pydevd/pydev_pysrc.py,sha256=LKtwQyDYYry3lhL7YalgmehgWD82-NDpqQYYi1bTYj8,100
debugpy/_vendored/pydevd/pydev_run_in_console.py,sha256=bnrvimUb2pj9Gtcu09qNmSQFGY-lAcu1Yz9M6IJsqGM,4709
debugpy/_vendored/pydevd/pydevconsole.py,sha256=Y4a1Kq3Y8qug4Y6PjFnM46RbMcpsy5OxdTIBTBHaY6M,21094
debugpy/_vendored/pydevd/pydevd.py,sha256=4g912FKxopwibrlL7Z50Hlp-qWXHwz5RNyyA7YHphy8,147607
debugpy/_vendored/pydevd/pydevd_file_utils.py,sha256=_Mk1ugrnQ99YMWnV0Tlu_JW9H52oUWXDOBOPM249mPU,36326
debugpy/_vendored/pydevd/pydevd_tracing.py,sha256=u2xjF1vg4XhuqkZR5elKyiy-MqZ1-DZYKwr1YfM01Y8,15142
debugpy/_vendored/pydevd/setup_pydevd_cython.py,sha256=KcRQ-SPbua2b0AZEiFExT0PiQiVce7FSLsdWoqBBvAI,10410
debugpy/_vendored/pydevd/_pydev_bundle/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
debugpy/_vendored/pydevd/_pydev_bundle/_pydev_calltip_util.py,sha256=shNNj8qVCRge6yKmRDbCthzsG71MYtjc4KsaIr1glK0,4687
debugpy/_vendored/pydevd/_pydev_bundle/_pydev_completer.py,sha256=Vk1jAIrXKT5-i49QqC0UL6wvCCSag8J-lu_NoIV-hAI,8544
debugpy/_vendored/pydevd/_pydev_bundle/_pydev_execfile.py,sha256=Y1tsEGh18bIyF4gfIM84RDk8LUOpC4uYcB0uekv74O8,483
debugpy/_vendored/pydevd/_pydev_bundle/_pydev_filesystem_encoding.py,sha256=VyYZXhesz-t_frcGmzm6b5K2ccv7yYRTXWcdbqxwpzM,1095
debugpy/_vendored/pydevd/_pydev_bundle/_pydev_getopt.py,sha256=YYphxNwTDyaD73sWznWqc41Clw5vHF85-rIALwHqXB0,4458
debugpy/_vendored/pydevd/_pydev_bundle/_pydev_imports_tipper.py,sha256=cuQ0YXVeBryJBAf3dU4CDcXVyIruYSAX0-vdMgneUpE,12350
debugpy/_vendored/pydevd/_pydev_bundle/_pydev_jy_imports_tipper.py,sha256=2FIRkXu71bTvaAHQDTRsVVONFuyd5otTX1kM_RC7XJY,17063
debugpy/_vendored/pydevd/_pydev_bundle/_pydev_log.py,sha256=RrRri4IPsTWYPNA-EvTWOSXKoflIFAHxcERQppngJVM,555
debugpy/_vendored/pydevd/_pydev_bundle/_pydev_saved_modules.py,sha256=D93V9B1C3qA9PoL0dV5bosenu5ncVQR2b-RAoxpvWTc,4573
debugpy/_vendored/pydevd/_pydev_bundle/_pydev_sys_patch.py,sha256=7w1l1AAwvglYAAqydScAvUzOulnv59RLVM7ODsio6Cw,2076
debugpy/_vendored/pydevd/_pydev_bundle/_pydev_tipper_common.py,sha256=hEt5I9k_kxxaJWwinODOEzA_je5uJovcN4iUEYrwXg0,1227
debugpy/_vendored/pydevd/_pydev_bundle/pydev_console_utils.py,sha256=3jXJZ-tObnlWS5JnHhrI5eCBbtQnt-DcqYd1zsHgNZM,23769
debugpy/_vendored/pydevd/_pydev_bundle/pydev_import_hook.py,sha256=85M_hwEqlMuazvG5Nk8WFn3SpjDIQ0aO5ahAA7lPBsk,1322
debugpy/_vendored/pydevd/_pydev_bundle/pydev_imports.py,sha256=N6EMJg371s1SiA1wAFc_zBV9ZYbGBs0oFvTTvnW8q5Y,404
debugpy/_vendored/pydevd/_pydev_bundle/pydev_ipython_console.py,sha256=P3tZYpjILr_jyITP39wXLwhQmmwfzVjcBEPOcG73BTc,3821
debugpy/_vendored/pydevd/_pydev_bundle/pydev_ipython_console_011.py,sha256=P7WvuETdIVxIYZPdmd7c7vJ8DIZ0FCGVppGz9PIBcIE,21354
debugpy/_vendored/pydevd/_pydev_bundle/pydev_is_thread_alive.py,sha256=bOvk_nVIxgZH6EuUb4pkWh5L9JVZAw_IlQJVH75tL9o,696
debugpy/_vendored/pydevd/_pydev_bundle/pydev_localhost.py,sha256=7NTqaf3nAzfkbj1niqZ4xhgPzx9W9SDhgSkwqCazQIo,2070
debugpy/_vendored/pydevd/_pydev_bundle/pydev_log.py,sha256=irP3s1xg55sF6DR3SAyD-wzF2LmlLEeMhAmTq9Ge-hU,9174
debugpy/_vendored/pydevd/_pydev_bundle/pydev_monkey.py,sha256=lt2R1Kt5HoNC88488rEZTK4TDIkfgxMmYSzNkfywieg,42252
debugpy/_vendored/pydevd/_pydev_bundle/pydev_monkey_qt.py,sha256=7FLTYfeF9YCmzxxki7zcbGt3ieWUVwYG4SkCNGjsEvk,7306
debugpy/_vendored/pydevd/_pydev_bundle/pydev_override.py,sha256=lL3tGSnQsziztbyePyrk9eu-xmgAWU2YQwxtv2t3to4,872
debugpy/_vendored/pydevd/_pydev_bundle/pydev_umd.py,sha256=0kfbdk22O6_wAN9b2dGRNyhJVsv2P2VZs9dJHh6Fxl8,6279
debugpy/_vendored/pydevd/_pydev_bundle/pydev_versioncheck.py,sha256=VpGp1SZeDMPimt--5dq7LDfB6mKyPGRcAsQUCwuFHuw,510
debugpy/_vendored/pydevd/_pydev_bundle/fsnotify/__init__.py,sha256=X_j7-4Z4v6o5aSI9UW92eUWITNeM-qkvXrQbEOUJn7A,12704
debugpy/_vendored/pydevd/_pydev_runfiles/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles.py,sha256=oKip8qMSr9rwIFgxuRY9mKRxZbLSkpQM6hk0tUDfNlA,31550
debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_coverage.py,sha256=sWjARGt1-4SZXYDNVML20bWd3IBG5stPgs7Q3Cm4ub8,3499
debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_nose.py,sha256=wMCy67DodvPIwyADbrSkV3FGp1ZXyixJ4lWc0o_ugJU,7549
debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_parallel.py,sha256=Qgn7uUlngUlvw21xCr9D_nj8oVNY_9MVPGzMYhsXKCs,9472
debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_parallel_client.py,sha256=wwl8UBpGRSqTvw8VCdl2S7G0O-SyXtOmVFIPKz4oh9E,7722
debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_pytest2.py,sha256=Fny0ZPakVcx5ed0tqrtrR9WNor542br0lavONh2XnTs,9845
debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_unittest.py,sha256=U1jbD9cvqx2gpeKV-XuYc0AoETohg31Jv1oDTudlu4M,6685
debugpy/_vendored/pydevd/_pydev_runfiles/pydev_runfiles_xml_rpc.py,sha256=BznpLOP0enjZ4aZus2nQvQbcoj0ThH0JJu-qBbZeXKA,10594
debugpy/_vendored/pydevd/_pydevd_bundle/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
debugpy/_vendored/pydevd/_pydevd_bundle/pydevconsole_code.py,sha256=m2z3GfCLJbbS0_V6_OSlOns21LruvVg1Zkdm2QYtS6Y,19014
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_additional_thread_info.py,sha256=FP57omz4K2irhuf5zlRStGoFLglsw9mcwSS-isJ1S8I,1166
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_additional_thread_info_regular.py,sha256=VnRPfq6xsWsKBmzjMvCoS98lgt7LPUTJ-hW1KKssAgo,6239
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_api.py,sha256=kr0qDV2JRHFj_3Q6QaGdtUPAdtLCR-abcOAe_tvNx7U,50739
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_breakpoints.py,sha256=UAgyAAIiwqLxe-NswVieGiCpuDl2dvb_VUxg4jkZQGw,6010
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_bytecode_utils.py,sha256=Ub1XpTtxqx14P84WriKaEI40Qa6PW4x70Gsc8UJA4ng,26277
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_code_to_source.py,sha256=B30xbup05xjZrYBjA0xP9w5H72lLW7qO6U0jgJlMd7Q,17622
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_collect_bytecode_info.py,sha256=gMsWsDVb5f6GAQlWn-MhPKJWNKUPd8EZyBP-y2ru_gY,37237
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_comm.py,sha256=iuPfHw1MaF40fnwS2uDFoV_eTrTvsAL5P0OJoRTGqbw,76130
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_comm_constants.py,sha256=S2aiyPQnH-uUdP1hitV2P2dqM9G8t3Ix2CWd3xPF084,6084
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_command_line_handling.py,sha256=WCIU5RPg0jIgCaGeQTiCkF45ztMxaTJFcANpKXQB5ok,6126
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_console.py,sha256=hIxX9k9ZNR7k9mi4Rtl-2ArzpTap3-hMrgUJgtFobmg,10179
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_constants.py,sha256=lzjrv-3KHx9h2twacRxk214RXj7mi1XVk_dOsgm6Ajg,27218
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_custom_frames.py,sha256=g69QtSQ5MjMlxARXbY95U3Q5t9REt0WPRk4pbvELB5k,4399
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.c,sha256=IAt_8ZZMxkImzuZ1pyWKqvqm91dx7N-TL72KWsVSPYc,2095445
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.cpython-311-darwin.so,sha256=weDrL6jrdXQ4yfwmEvUd2SeXwx9Hl3EWVniuZeFI4qU,1088053
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.pxd,sha256=OzA3y2aGOd7ego0_ParOpQ5mzxj0iy38cNEVy1yKDco,1242
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.pyx,sha256=wY2dk_xgaVmHeM4ZIuVl8lENsEqfuHcKC_c0wfLMTBI,92487
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython_wrapper.py,sha256=hVfzhgMunBeuVkGl9qVnE6uMFFSX4DdhxPl0K2-Tcd4,1600
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_daemon_thread.py,sha256=JYINrkA8Tz0K8VfMofsIy95ipQrBQrh03BKyFy3D1KA,7964
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_defaults.py,sha256=anhqPJQu1sNI6h3V62povNEupCoGTmYFal6ezTsoDdk,2316
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_dont_trace.py,sha256=vOYpH4i6tWq-DX8oCtW2UNu1BrI7Px-UysiXjLPxjUs,3567
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_dont_trace_files.py,sha256=hHFDIIDFQ9EHi0lz5NSTJ4qGncT2XB2NhOg-rwWSeDU,5814
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_exec2.py,sha256=8q6dcEHDfJE6bdwNyrRFXa_vYOlTrHz3UVBtYzhnJJo,159
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_extension_api.py,sha256=Ef8g1TihaUOVR-ed9C0a2u0L6dhUJOzb-GJld9sKf4s,3907
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_extension_utils.py,sha256=DZYt56rfRm_4wjSXG7cUSFqgJBUKvYOnpHPxIWWpQR4,2369
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_filtering.py,sha256=yKtcV0GyyO0bs6LR6BJElEstCP7yZ5O-aba50aIHdl8,12831
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_frame.py,sha256=Yia39BovL20CruHp2ZKVqSutpMGHpV0XwCzySwNF0xU,64386
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_frame_utils.py,sha256=MdpmpRVl7t9WHHs2-sX4UMHTUyFZE3mP6vk5sSpjsGw,13133
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_gevent_integration.py,sha256=cUOOzjdQQ2geKjnSkJdnR894DVLkrtpltBDLmQoqnUc,3877
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_import_class.py,sha256=XigIMYbVg6IsNY3shHca-R26vGG5wMTGygdyh38vaU8,1838
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_io.py,sha256=Kkk-SrYQK8-oNZRbuiDOebRlhx0r8VblBwQgy2ndASs,8117
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_json_debug_options.py,sha256=2xrwawOQFtJNQWLNwGrnpiBKFBpFdevLSUPpcBjFP3U,6199
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_net_command.py,sha256=wnKwGD2DcxSrTf_l5XsJPuUwgnnjzzdRoJPuHPyPFqg,4588
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_net_command_factory_json.py,sha256=k3tUA_ysyKF_KNsGAq20uyAYXTZW9WK_c025VZfSt_A,22408
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_net_command_factory_xml.py,sha256=Y05qSjj0TF6nzOvIepzSXX-fQUCQn4Xht9edYnbGlDs,23269
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_plugin_utils.py,sha256=5azIt1MGftKwB7Y5YTtvGPC9agQt7xM23zjvKqGjeig,2484
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_process_net_command.py,sha256=YNdXxz9ArTbxS4RQ5n5X2A2yYu5NXxcGO-2GrSQUhLk,35186
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_process_net_command_json.py,sha256=_mTzfSWyboxqCv_IjqfyzPf0embVuTn_NlUC1rzavQ0,57057
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_referrers.py,sha256=WVZbrenmO4WKVTMdBFEOfO7JjICIDURT-5EeCy-lu10,9756
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_reload.py,sha256=J4JylvGaK34_DhMNBpqD0YMWgUtWn9LkgShpxzQJ_iI,15773
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_resolver.py,sha256=yGVZrTFZM12z9BbmafUs3suQGD4HtX5SXXIpLRlWedk,29616
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_runpy.py,sha256=EMN-0GQ242o41a9PG_SakVQfox7K-FK2jSV4yPbcdMs,13521
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_safe_repr.py,sha256=S8VBwT0KYhrcATdFz7AZck1mvOZbpm_yGLb8YRa98B8,14556
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_save_locals.py,sha256=WyKCgiqNQEWBm8YDZy2e6E_DkM_9sEQz3IJKKtbqD_M,3465
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_signature.py,sha256=cwiP2JIzmHrnyzDOL4dZcKRPWMmcCqFZ-Knwl4x8Ieg,6883
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_source_mapping.py,sha256=K8hROuOdVJhXTZGFDi5RP40uHDWK4ZWbG0ix0gNX_oc,6428
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_stackless.py,sha256=P9H6T_jIeukGbhY6TJ-BhPRty3neqP0CiAf65XC-5pg,16909
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_suspended_frames.py,sha256=gA223r51LB8KslU61gOjBLkEuahAwX8XHkGe5HAc9Vk,20864
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_thread_lifecycle.py,sha256=I-U_-urNLaxavillhZFfWSEFdX7QMdgq0dMUtiOL29E,3408
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_timeout.py,sha256=V1-pX6yhik_aa1bk9ClkMsN5RaUXFGWRlHawtmTCSrA,8366
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_trace_api.py,sha256=nlf56i9hKz6DikH5Txc_fwZxqN1gSV60Qps0ODWzc4g,1397
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_trace_dispatch.py,sha256=bIDRhihoBHfcVU9jVlK2TuS-sEIoHEYejTkdHsSXagM,3265
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_trace_dispatch_regular.py,sha256=3Q_iAqFyoOd6qMcKg5Dvw8aFqqtRYQ7QJdffGGZmRAc,22202
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_traceproperty.py,sha256=oY3O9UJwf8EIUngMLsJnOQAiyeuOt9Y2SEgm6MsMJ6o,3279
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_utils.py,sha256=vjEUSAke7-K-bV0lq3jFDYZo2jzNw3-gUSxPiqEMdJY,17844
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_vars.py,sha256=0pksSWxEVQ38Q3r0o7CiYGX0ajFTGM1KMwNJoXVNU7U,31106
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_vm_type.py,sha256=osPhgiDRcU66KFUQffURSEt1GxLsS5B-DuV1ThHR_sY,1578
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_xml.py,sha256=XZsERMeJDDOQjN4NZKT7-YNKXJyB5SzKnzH2gYnAREw,15485
debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__main__pydevd_gen_debug_adapter_protocol.py,sha256=7rcDvKv4OweHbJJVUIrVNaq7nRzc2NnbRCWY_wqGeXg,23085
debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/debugProtocol.json,sha256=F6Myi84FtlbjechZbCpVsWRwB-Q_z5ixJn7vCJrYy3s,157930
debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/debugProtocolCustom.json,sha256=NmKM_bN5s6ozUPXIFc_Q3w6unk6Zf1hb1pgyjvxv4QM,10616
debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_base_schema.py,sha256=Ke2Ff_8SRKGSDamE0eZxx8vAYNx4Ka9oJqRmNX5DfQA,3998
debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_schema.py,sha256=ViAsceebMsN4ZE8F3nwvcZN_0ABr8gkKdXEl_T5BFsM,763496
debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_schema_log.py,sha256=aMICrBzLWCaynZy2TysEvz3_sdHXfrQlFBHJNKl7j7k,1255
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/pydevd_concurrency_logger.py,sha256=nhYHTjhodOdBQ5ds2_kyyAF7kpqSvOjflPLacyx0SOw,16764
debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_concurrency_analyser/pydevd_thread_wrappers.py,sha256=Ky_yDpWcMfVTOAA0uwyEgAvPQZS7apJCrMM4OAo6BO4,2039
debugpy/_vendored/pydevd/_pydevd_frame_eval/.gitignore,sha256=EKhbR-PpYtbvIYhIYeDDdUKDcU_kZf4kBzDgARh1Fgg,87
debugpy/_vendored/pydevd/_pydevd_frame_eval/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_eval_cython_wrapper.py,sha256=fsjgGg07ahVCdIMe38savTyAEgQtEzb7vVIldjgLhfE,1343
debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_eval_main.py,sha256=XzgU2jey-apeQd9NCV40YXOkiwsGJdYdrUSv_mdNu8U,2105
debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.c,sha256=4fh_u7VP3gA6Xsb_m46U5GoGBVUx3CXHnboAXGcboss,935795
debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.pxd,sha256=0pKrUK3YwQtlJk7vvhZzqnDCJvB6JOb4V23Zl8UrqYI,5324
debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.template.pyx,sha256=YOHqtbizcUtC_jMeBlNQ6FLQZn-CZ3fcoqChyHF4h4g,24550
debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_tracing.py,sha256=HkS8B9I0YbUVsHxRDfc6kYstUTlKc41HqZyD-N5ifQA,4219
debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_modify_bytecode.py,sha256=cTwAD7Y2HBwZGZWe-M6AnO-Fs5spFuHXIwue1R8RRIw,13545
debugpy/_vendored/pydevd/_pydevd_frame_eval/release_mem.h,sha256=MbMCNJQXkcJZ8UU7xoH44MwqE3QRWZX5WCAz7zCju6Y,79
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/README.txt,sha256=jgSPsMO3Gc8ncNUe5RwdxdVB-YHyAioWMPXHcD6KbQE,700
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/pydevd_fix_code.py,sha256=0IMNqViUb9NM5gPw9Nv9KUay99XkLoW4TnklupYkdDs,1801
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/__init__.py,sha256=9hCplBAGV2ZNbI6TkkkC-Zefk_SxbesAVwe2iXtdSPQ,4152
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/bytecode.py,sha256=UACCPg0CuM9RIzNMJLqvLlCzeMBNI1UZ-WAspzO7rQM,6983
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/cfg.py,sha256=_ngtd6LTdF74Q7eM0kv-jY70Y-1m2dYOVTdL3LABi6U,15391
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/concrete.py,sha256=0CoEZ5sxVK6BLytJ1KI2qZgSb9-UAMkXwa1GiPaC3ag,22299
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/flags.py,sha256=ipQqlQuvHn_zwjWkaPlf_-LPTjOTcayut3T-sAijYQU,6020
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/instr.py,sha256=z5zgc6dJLWlaOxpmiErmR3qoWK-pbsqFQ5DnZhYxp9w,11721
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/peephole_opt.py,sha256=O7q19q0sDiwN4zVkFGdmceThKK4bQYP_13DFIbH8o8M,15740
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/__init__.py,sha256=qlpJ7nivOajEtN7mIjg_wpiQPBgjnkNypVy1KdbmlEw,4996
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_bytecode.py,sha256=oxR5LHyY7Mp8AnNd5gB9US1E2HLP1_ikKDZ9O1ybB9g,15909
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_cfg.py,sha256=rDlSsB9STCggvl79pv1q6uKUxElzat99z8_KcObcbb8,28547
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_code.py,sha256=5p98FDCpHG2GxUpoMnaw5S6SBakyeMqa5eX29nrOmuo,2425
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_concrete.py,sha256=HKFAg96iZKEjDMpaPwa2LrwERctdWUCTCcnvo-sKnEc,49634
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_flags.py,sha256=vBeWGBHWMvJ4yN7RdJKImSZurjuyeGQz7pHpeBBAKDI,6009
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_instr.py,sha256=hQ6EyqSddjJeUv4vhZFrEiy1xxMiqpyDuCK0xfKbgf8,11676
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_misc.py,sha256=z0K5O16SkAf81IljdDlKumS-x76HSrf5tnNGtsTLuIU,7149
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/test_peephole_opt.py,sha256=r6-xIPkRHhQlnTWXkt0sU0p0r1A80EgDKoFJTxE2J2A,32993
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode/tests/util_annotation.py,sha256=wKq6yPWrzkNlholl5Y10b3VjuCkoiYVgvcIjk_8jzf8,485
debugpy/_vendored/pydevd/pydev_ipython/README,sha256=rvIWDUoNsPxITSg6EUu3L9DihmZUCwx68vQsqo_FSQg,538
debugpy/_vendored/pydevd/pydev_ipython/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
debugpy/_vendored/pydevd/pydev_ipython/inputhook.py,sha256=GjhfMC0wvhSGsDKTOTEOt0Gl6FleaS81yUn4AxGIlZY,19554
debugpy/_vendored/pydevd/pydev_ipython/inputhookglut.py,sha256=J5QUoxuqZLwvb8nBjIkrmBr-pTULVLp86UsdmImhYY8,5675
debugpy/_vendored/pydevd/pydev_ipython/inputhookgtk.py,sha256=LDLg0tDXbmv-YoODReQKtOu5Aa3ECBr9HUaNJV1UYk0,1107
debugpy/_vendored/pydevd/pydev_ipython/inputhookgtk3.py,sha256=Sh382xr25ztKAQfChBNm4D-NC5UoY-Ho0LTIj3i4Wi8,1104
debugpy/_vendored/pydevd/pydev_ipython/inputhookpyglet.py,sha256=8lVbOG3Lucf_bX97HLNHXbq-wekKs1BUh79rMmh_bYg,3255
debugpy/_vendored/pydevd/pydev_ipython/inputhookqt4.py,sha256=DKOobEvH6bIkkvOF581Vj-rQXWSUU6f7cEc9fzXr3_g,7242
debugpy/_vendored/pydevd/pydev_ipython/inputhookqt5.py,sha256=82p6mLMradWAyzHh9faHb_eQJRigIgaW74WRbNLaoFc,7289
debugpy/_vendored/pydevd/pydev_ipython/inputhooktk.py,sha256=bW7hLVv2JOuP00TSeqIw9O3KKNVHtBBpE5bHASW-bSo,748
debugpy/_vendored/pydevd/pydev_ipython/inputhookwx.py,sha256=h804ZBfWPLy5ITbQSkzwELkGO4BqZtZB2b9izZXcpQk,6517
debugpy/_vendored/pydevd/pydev_ipython/matplotlibtools.py,sha256=bZSIYY09Cny96-NpxOdbmU0lueNEaCfIhJP87D-dbFc,5378
debugpy/_vendored/pydevd/pydev_ipython/qt.py,sha256=Ley-7H_Fn40za6lJKmekC-r0uOTeOgnH6FIGUBaGqP8,785
debugpy/_vendored/pydevd/pydev_ipython/qt_for_kernel.py,sha256=HF68WlXR08W6-4B3DjuF-5AbNEMLq-NXIVscoSUTEFc,3619
debugpy/_vendored/pydevd/pydev_ipython/qt_loaders.py,sha256=ZxyWPGfCtdYbGrtJ49BkAWu7CEW6w38VjSnoeEzvWjM,8413
debugpy/_vendored/pydevd/pydev_ipython/version.py,sha256=lLBSR8mtlF1eCzwwOejljchAyfSy0opD8b0w_QjR97Q,1227
debugpy/_vendored/pydevd/pydev_sitecustomize/__not_in_default_pythonpath.txt,sha256=hnkTAuxSFW_Tilgw0Bt1RVLrfGRE3hYjAmTPm1k-sc8,21
debugpy/_vendored/pydevd/pydev_sitecustomize/sitecustomize.py,sha256=kHFVCILQngtbNTgiaTu0icWDh7hRzqIpMlASeULI8Wo,9473
debugpy/_vendored/pydevd/pydevd_attach_to_process/README.txt,sha256=JibLodU4lzwvGyI8TNbfYhu626MPpYN3z4YAw82zTPU,960
debugpy/_vendored/pydevd/pydevd_attach_to_process/_always_live_program.py,sha256=I8Cbq2TR04ferhN1DMds9agFTqF8RST6Pw6diDFHr6o,679
debugpy/_vendored/pydevd/pydevd_attach_to_process/_check.py,sha256=9AE9SHJNK7bwNjIsaadqhfOM11tvgZm64KgDPFtLSDY,135
debugpy/_vendored/pydevd/pydevd_attach_to_process/_test_attach_to_process.py,sha256=kM9LFgzX_qQofDb_X0hYMSb8Qfoga-exKBlng3__KZw,297
debugpy/_vendored/pydevd/pydevd_attach_to_process/_test_attach_to_process_linux.py,sha256=ATL8WvNcGPi3wTHtTQL23UhKxnbtG9dwB3MTZEiC-2E,2523
debugpy/_vendored/pydevd/pydevd_attach_to_process/add_code_to_python_process.py,sha256=qcW2vIIxQG_-PP04N4HYOR0wwlmdZThhp7uL6GgeivY,22334
debugpy/_vendored/pydevd/pydevd_attach_to_process/attach_pydevd.py,sha256=0zce8nPiEccG6JOQV5Va7BkMTibuTJfsrUAMbCfNT1g,2479
debugpy/_vendored/pydevd/pydevd_attach_to_process/attach_script.py,sha256=iI9em6cK60PzfVkS0dcqUQgACvOGhnMLPYyJnpYRLyY,7873
debugpy/_vendored/pydevd/pydevd_attach_to_process/attach_x86_64.dylib,sha256=Zz3NWFM3_oEJ3p5PyUW5K-yXm-4VGZ-rG0ynlf8ynGM,55296
debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace.hpp,sha256=GCjlTIQW1wRoEpp1yCbeJiUaa9JDTbeOypJiZBRtxPE,8399
debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace_310.hpp,sha256=mNr6LVLPDoCRyxLPTdYb0JWDXSfRn7xuAzPOzZWvoFs,4062
debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace_311.hpp,sha256=_tpO9I0U0f2RqCYM8DIOPQJTLv8sL2NCxwKE2BnR0NE,4269
debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_custom_pyeval_settrace_common.hpp,sha256=r1ch6UgwF4rxW8ehiNnAvJE18VCoUl2TujP7nTCy0vQ,1870
debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_settrace.hpp,sha256=IuObk2NpXdBYvJwoz8p9ZZpUtamRwXIudTORVFpA_84,7822
debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_utils.hpp,sha256=hxR-qpxpXQTupO-AgnasBq1j69ztvTdFUF8xWuiEdWA,3811
debugpy/_vendored/pydevd/pydevd_attach_to_process/common/py_version.hpp,sha256=tn11Wl2u0aLBj7Z0mcrYSCSOH6JrZ4e9P3jXSCZxySo,2617
debugpy/_vendored/pydevd/pydevd_attach_to_process/common/python.h,sha256=ueIyBiClInxfKMlgAVMuxTvmhESadiWeA4uSwZAROeo,21631
debugpy/_vendored/pydevd/pydevd_attach_to_process/common/ref_utils.hpp,sha256=8wDFQk9XoAnK2EdM4J-RErKfBYZn93uG6Rw5OCbLsA0,1475
debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/.gitignore,sha256=r3rDatBumb9cRDhx35hsdJp9URPUmjgkynAQViLIoR4,82
debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/attach.cpp,sha256=xG8NmOwfuhXN93spuG_uEfe0tOn32hnljCqY5f1z354,3703
debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/compile_linux.sh,sha256=zLx_fpShueHqBioStvOE0hm1aBuvIkc5EygxXSSh8mQ,275
debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/compile_mac.sh,sha256=6szfH260KYg9ZuPusllOBx4vfPXU6ZrOrOrEdh-2bOM,232
debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/compile_manylinux.cmd,sha256=W93C4jG-fGn27rd1Yes3neP2upJTO9qAdKZPf8cvSQE,633
debugpy/_vendored/pydevd/pydevd_attach_to_process/linux_and_mac/lldb_prepare.py,sha256=eSmL1KLLOrG8r4RJyVOd3USUjsplWXdKSCCnnGyGVdo,1691
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/__init__.py,sha256=2VU5wHMC1RElLHJa5cwPVo6bK8sRDics9cFMtqx3mq4,7917
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/breakpoint.py,sha256=49N-AlYBS8S8ZC_lOXqQKq0ttdWOKjfmRpq8ESfNn84,168168
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/compat.py,sha256=0KEHUJM9HVPJkzEa_f6cWb6LB7ickr7xOqKvuOVjjeA,5230
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/crash.py,sha256=bAwe0hjMepdZkfIESfCJSxQJOnCxC7yp046nd6bJTI0,65394
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/debug.py,sha256=TI59UyIasBZF6iln8OMxtQCkIv4t6hRXqYaED8bRTzg,58709
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/disasm.py,sha256=e45vidMFkVl2s5nu2YJW4iE5Ng1KnWobNWBWnbaZskQ,24409
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/event.py,sha256=QzHPCcGvEZ1LrL33ddVegMsb6BURRgVBSi2nV20aOTs,67241
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/interactive.py,sha256=Nmm9hGyn5XnxFccS0vmLmHl25ReTXPxZSgIiWQCaPQg,83555
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/module.py,sha256=aQOXcKztzBzIhBNuEbSvaNZ-xtDERviWynRzbSVpesw,70615
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/process.py,sha256=ViXvPwwEz2_EKrG-myicFCrtUjirrOmCon36AjeUI78,183635
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/registry.py,sha256=BmThLf5TaXsPEXzk16PqtZHIArZKpSK7f1srG8oFpJ4,21569
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/search.py,sha256=NuUoyoU7lMZovibgZIjkTKtRsxWVuY3FRYfOJicM7-k,23798
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/sql.py,sha256=vHnOHabuFHMq4JBu7qf9O6omUrSTOsOGxdxjg81fvuw,34997
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/system.py,sha256=L5oZqyn68kkoDpFnOYJkaIdTjFle1_tBPwaTPfcKzvo,45884
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/textio.py,sha256=osUFHqxoZBdGQLmfvPXEFikvNPY_JbiQub5N2_tgee0,62691
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/thread.py,sha256=uEuXm8xSq4iwZNfwW6wXnrTvF4fpvkYTz--VQUYQ2mg,75478
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/util.py,sha256=9GFD52PGOg0czJc0oUlpE8sTJVHZnmam84teq938xeg,36223
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/window.py,sha256=E7zOrJKXXm2OnRp_Xvt868_BwEVdh_39etqDQgZYLjQ,24309
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/__init__.py,sha256=LjPLQ0pv6rcHQmb32jV12bGUw_CTsi0atfNuOvkg2nc,2845
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/advapi32.py,sha256=2EFvqeuxUGiiDGTzkHMV4fEeDZvMxmrByD5hu9ClM8A,120809
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/context_amd64.py,sha256=8eT1_GvmfpRI2HWWqdNDRJjA6TvpwVl8ZQcVvSZFCOY,25137
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/context_i386.py,sha256=CLD5RAQi686FfCs0bszEgh_ZVFIj-YrUEt8HRvHE5HE,16108
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/dbghelp.py,sha256=6mZA5sDvGrIfP76-Jv6bvIYON3cjTruYt29Cxu2QKdQ,46705
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/defines.py,sha256=Verc00KnY3XSxI0KMpb81U4P6k6MUHCe-NgsxlG7Ie8,22799
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/gdi32.py,sha256=5i8RpG43wEMAYbcBjEwJaemrVhrlFwGKhkL947CR-7E,16829
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/kernel32.py,sha256=TeRsADg7eZN1dw7aGepbOIZobGmiKzddT-bKEZ97bQI,164818
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/ntdll.py,sha256=7PSunl1ixZo5y-bol-w53iE4GhKEPYv7KRiI9_CiOms,22847
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/peb_teb.py,sha256=KjCX0Yte_g7ty240hehVBbadxmhI2M--bVu0SfhKS9E,159230
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/psapi.py,sha256=V5n9HSLn0fv5qwe83zMjGGBcTeJ2woJzS_hdjGQhps4,13762
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/shell32.py,sha256=T2MAPhKCqGfg_JxYGZkWChzlDtDsRAeczsL8fGhSYDA,14007
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/shlwapi.py,sha256=x7jV5_99GrZU0wlD-ePUeM09Uq_PJ7L1M9YwucIMUEw,25807
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/user32.py,sha256=-YYdVrMVUZWpzMIBUbeLkNlhQ7VCBhvjWLdZtN9u0Vo,57177
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/version.py,sha256=uBtrPoubwMv_1CLSf42kpPQOiOrinCtYJkxWqzOl09I,36813
debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/win32/wtsapi32.py,sha256=P-4JIP38rtbQ-iHtdjSBst656mfBeFEBWPZyOrCD_c0,11164
debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/attach.cpp,sha256=qRJVdptEyvZtM70x0XmSkGhl3U28EYBZ9zCB2-GW3pE,27447
debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/attach.h,sha256=rWBA3kdzfyHaE9X9Diub3cojoJlXjC3TKnLQv-nGCeA,1846
debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/compile_windows.bat,sha256=I70DCACraBiPsZRb0oi9DaPEZCZFqWeIuxu4RJwTqYI,2031
debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/inject_dll.cpp,sha256=GQmZbpNBRMMW1WFcDFOlJaGLy1-oZ4qCCIyo5exPLBM,4792
debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/py_win_helpers.hpp,sha256=45pO-c1ofub4nn0XY9kMsTm3s48_EPE-VWAhld3704I,2479
debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/run_code_in_memory.hpp,sha256=zvDC9cVGZ6BXEsz_Im-QMIubdw6vX3BCIg2T11cVfvg,3355
debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/run_code_on_dllmain.cpp,sha256=ApQza8ZJrfe2U4jTJPa8ItzvnHc7w2G6Yrfq4BT_56g,2516
debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/stdafx.cpp,sha256=NO8qlc7sKU6oHA_AnXJSodHupZLizp3npBTc-EGpBj8,999
debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/stdafx.h,sha256=l0tEIxxiv0XrIeiJ2zedTd4fYrdlxVTK8ECpBk881WM,1162
debugpy/_vendored/pydevd/pydevd_attach_to_process/windows/targetver.h,sha256=fqw-iPopG_V-pFRxf33lVFL0uvWfDUB2ky0bibFblK0,1013
debugpy/_vendored/pydevd/pydevd_plugins/__init__.py,sha256=TAxMvzzecHUbYSemZ7gBbqi-3hYmsODLKrbmxrtbaUQ,66
debugpy/_vendored/pydevd/pydevd_plugins/django_debug.py,sha256=VIBdRwe3Ia3m4LwdyCmnm93ybPXoOwXIE8HUCy9JMcw,22430
debugpy/_vendored/pydevd/pydevd_plugins/jinja2_debug.py,sha256=z6XdAlP0Wf9PyluABWF2GICHnTfKaN3gRKvmuh6uymU,19141
debugpy/_vendored/pydevd/pydevd_plugins/pydevd_line_validation.py,sha256=Kl48sEmnu7cn8P4GTflgsqhfOh3UUNCp_qfyjN3B8FI,6286
debugpy/_vendored/pydevd/pydevd_plugins/extensions/README.md,sha256=cxu8F295snUVNgqE5xXKnZTpbqbR3LTmm60pgK0IOTs,1183
debugpy/_vendored/pydevd/pydevd_plugins/extensions/__init__.py,sha256=TAxMvzzecHUbYSemZ7gBbqi-3hYmsODLKrbmxrtbaUQ,66
debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/__init__.py,sha256=TAxMvzzecHUbYSemZ7gBbqi-3hYmsODLKrbmxrtbaUQ,66
debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_helpers.py,sha256=0fFR63gGOCVNWHp-e8El6OPlBlljTJwCe1oEJWXPv5M,639
debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_plugin_numpy_types.py,sha256=xFgk-9qUwGV7IIraROYeT7ve-oZekcaabzG37FM6giU,3248
debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_plugin_pandas_types.py,sha256=s-vvx_QhZeSd_OLJ3u4YHGf82Uw-0XIe7cG1D1POAnU,6581
debugpy/_vendored/pydevd/pydevd_plugins/extensions/types/pydevd_plugins_django_form_str.py,sha256=quqrRUKuKEPoT37MBla7wJCSuWHio1dI1cEK1G13qO0,538
debugpy/adapter/__init__.py,sha256=ewTjlS3VAb6lypFWfGZjY7j2wiW6ApS3KSPJrm0xsEk,346
debugpy/adapter/__main__.py,sha256=MBXkVyKOTc0QwB-vrUAI_al7EMaGIC3k7mXtfBv7U78,8257
debugpy/adapter/clients.py,sha256=qzv-eHGZvSW2LpiglWyHQr-Oym5-cSuocWlHt74uSDs,31341
debugpy/adapter/components.py,sha256=UGZg2cTDq4oHi3OrJ5bW2zdFe5MAw-lTZ9lkjRbAwgM,6081
debugpy/adapter/launchers.py,sha256=VWMB5i8GJX6CW1FP3OtT1a-iGqMcTabJ3UowNiWq02A,7000
debugpy/adapter/servers.py,sha256=pvIW2wloRQUQTr6WGrOHB1jct5_v12jbpKQeJfdYdCU,23418
debugpy/adapter/sessions.py,sha256=KzkGF3hWP59siooY3UbnATQY2wqIbCHFpqJHO7aR8jo,11229
debugpy/common/__init__.py,sha256=b9YhaTxBfLJ1VqVI33KM-uqiNWAu34RI2GVVN1IH0_8,608
debugpy/common/json.py,sha256=NoinXsMZHybYGNiSbq3_OWPJxtmYbDew6RkqF3zMyZ8,9674
debugpy/common/log.py,sha256=NqGC5AIo9zdls1FKn5Gv0n1EvLmaB0d5s12CRY7ZpLs,11706
debugpy/common/messaging.py,sha256=-kvJRS7uYUoh4MFa5EnlVRYe8xIXwOca9YrSg3OGYos,56576
debugpy/common/singleton.py,sha256=bSTqWB9bLp1SpP1W9-LH_OlU9Arbd7pqu4OcjYKDQ_0,7666
debugpy/common/sockets.py,sha256=RsRPizhPy6SRsK5DSEiUWIBX2fw-D90ch-KnSfQwmZk,4224
debugpy/common/stacks.py,sha256=czZjqyY_5ntvOSpelZlJkpH4Gqq9JyZY7tcUqz4sWXA,1526
debugpy/common/timestamp.py,sha256=ZocK6sWz2JUD1hBAKj672ta8D3ze0Z3zJD_CWjeDq7A,410
debugpy/common/util.py,sha256=CPWCyS757aIcGISxf_SbaGlewSCFOgou0raEKfCZ56I,4646
debugpy/launcher/__init__.py,sha256=L7aoLf-CaGikoiEJokF5JmSL0Y7FWIn2EOJFV89KbbY,890
debugpy/launcher/__main__.py,sha256=yLvc7PNl8YulPLINLZVBGY-38hClmkgec0LmkEQna_Q,3812
debugpy/launcher/debuggee.py,sha256=uUOkA8E-67FrrtOrarI_yEfDBsnn76opcNW1PmFxb9M,8574
debugpy/launcher/handlers.py,sha256=vsMHp4SKqbR1ar27RW7t-SbApXdWPfhS7qLlgr_cw8g,5728
debugpy/launcher/output.py,sha256=R8YWa7ccb9Sy_ighQqN9R5W1OdMNcnmizmTLYc6yTsg,3748
debugpy/launcher/winapi.py,sha256=7ACn2Hxf8Kx5bBmxmuC-0A_hG60kEfrDtrZW_BnnjV4,3129
debugpy/server/__init__.py,sha256=mmPRoui4PkSeZxG3r5Gep6YB0MLMq_lIJru1pfGmKUY,323
debugpy/server/api.py,sha256=oxecs9s0cATf8K8MCd6vVEeZjTgUTuWVIIT0vdfOtF4,11789
debugpy/server/attach_pid_injected.py,sha256=mx8G8CDw5HGOPBM5XGkjkLka_LHVQJEuMJ4tUJHQqX8,2734
debugpy/server/cli.py,sha256=-RTBelnNYN_IdnpqNLcU4ynZnf8RbOa0Ey-vLLjfk3Q,13289
debugpy-1.8.1.dist-info/LICENSE,sha256=g8PtOU5gAGfBe30cCb0-og8HA7jAcW5qn0FLmfy9-BA,1176
debugpy-1.8.1.dist-info/METADATA,sha256=gF_sEN7FW-2DCsY9ls2X8Y1RlRAoEaaXaqR231wI-_Y,1093
debugpy-1.8.1.dist-info/WHEEL,sha256=FC3UK38D2pRZOMRrvghdrfbp2ik06raaw67nwqnEhoM,115
debugpy-1.8.1.dist-info/top_level.txt,sha256=6Om6JTEaqkWnj-9-7kJOJr988sTO6iSuiK4N9X6RLpg,8
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/COPYING,sha256=baWkm-Te2LLURwK7TL0zOkMSVjVCU_ezvObHBo298Tk,1074
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/METADATA,sha256=9XadDK6YTQ-FPowYI5DS4ieA7hRGnRP_fM5Z9ioPkEQ,2929
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/RECORD,sha256=2udHTtpgQXukzLaj7MVfrJhBa40hV7SjP8vyZ5vNqMU,2995
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/direct_url.json,sha256=s58Rb4KXRlMKxk-mzpvr_tJRQ-Hx8-DHsU6NdohCnAg,93
debugpy/_vendored/pydevd/_pydevd_frame_eval/vendored/bytecode-0.13.0.dev0.dist-info/top_level.txt,sha256=9BhdB7HqYZ-PvHNoWX6ilwLYWQqcgEOLwdb3aXm5Gys,9
debugpy-1.8.1.dist-info/RECORD,,

View File

@ -1,5 +0,0 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.42.0)
Root-Is-Purelib: false
Tag: cp311-cp311-macosx_11_0_universal2

View File

@ -1,499 +0,0 @@
THIRD-PARTY SOFTWARE NOTICES AND INFORMATION
Do Not Translate or Localize
debugpy incorporates third party material from the projects listed below.
1. PyDev.Debugger (https://github.com/fabioz/PyDev.Debugger)
Includes:File copyright Brainwy Software Ltda.
Includes:File(s) related to Python, Cpython
Includes:File authored by Yuli Fitterman
Includes:File copyright Brainwy software Ltda
Includes:File with methods from Spyder
Includes:File(s) related to IPython
Includes:Files copyright Microsoft Corporation
Includes:six
Includes:WinAppDbg
Includes:XML-RPC client interface for Python
%% PyDev.Debugger NOTICES, INFORMATION, AND LICENSE BEGIN HERE
=========================================
The source code for the PyDev.Debugger files are provided with debugpy, or you may send a check or money order for US $5.00, including the product name (debugpy), the open source component name (PyDev.Debugger) and version number, to: Source Code Compliance Team, Microsoft Corporation, One Microsoft Way, Redmond, WA 98052, USA.
Eclipse Public License, Version 1.0 (EPL-1.0)
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
1. DEFINITIONS
"Contribution" means:
a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and
b) in the case of each subsequent Contributor:
i) changes to the Program, and
ii) additions to the Program;
where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program.
"Contributor" means any person or entity that distributes the Program.
"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
"Program" means the Contributions distributed in accordance with this Agreement.
"Recipient" means anyone who receives the Program under this Agreement, including all Contributors.
2. GRANT OF RIGHTS
a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form.
b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
3. REQUIREMENTS
A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that:
a) it complies with the terms and conditions of this Agreement; and
b) its license agreement:
i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and
iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange.
When the Program is made available in source code form:
a) it must be made available under this Agreement; and
b) a copy of this Agreement must be included with each copy of the Program.
Contributors may not remove or alter any copyright notices contained within the Program.
Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution.
4. COMMERCIAL DISTRIBUTION
Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
5. NO WARRANTY
EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
6. DISCLAIMER OF LIABILITY
EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
7. GENERAL
If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved.
This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation.
=========================================
Includes File copyright Brainwy Software Ltda.
File includes the following notice:
Copyright: Brainwy Software Ltda.
License: EPL.
=========================================
Includes file(s) from Python, Python xreload, Cpython and an ActiveState.com Recipe on "NULL OBJECT DESIGN PATTERN (PYTHON RECIPE)"
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
--------------------------------------------
1. This LICENSE AGREEMENT is between the Python Software Foundation
("PSF"), and the Individual or Organization ("Licensee") accessing and
otherwise using this software ("Python") in source or binary form and
its associated documentation.
2. Subject to the terms and conditions of this License Agreement, PSF hereby
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
analyze, test, perform and/or display publicly, prepare derivative works,
distribute, and otherwise use Python alone or in any derivative version,
provided, however, that PSF's License Agreement and PSF's notice of copyright,
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
2011, 2012, 2013, 2014, 2015, 2016, 2017 Python Software Foundation; All Rights
Reserved" are retained in Python alone or in any derivative version prepared by
Licensee.
3. In the event Licensee prepares a derivative work that is based on
or incorporates Python or any part thereof, and wants to make
the derivative work available to others as provided herein, then
Licensee hereby agrees to include in any such work a brief summary of
the changes made to Python.
4. PSF is making Python available to Licensee on an "AS IS"
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
7. Nothing in this License Agreement shall be deemed to create any
relationship of agency, partnership, or joint venture between PSF and
Licensee. This License Agreement does not grant permission to use PSF
trademarks or trade name in a trademark sense to endorse or promote
products or services of Licensee, or any third party.
8. By copying, installing or otherwise using Python, Licensee
agrees to be bound by the terms and conditions of this License
Agreement.
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
-------------------------------------------
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
Individual or Organization ("Licensee") accessing and otherwise using
this software in source or binary form and its associated
documentation ("the Software").
2. Subject to the terms and conditions of this BeOpen Python License
Agreement, BeOpen hereby grants Licensee a non-exclusive,
royalty-free, world-wide license to reproduce, analyze, test, perform
and/or display publicly, prepare derivative works, distribute, and
otherwise use the Software alone or in any derivative version,
provided, however, that the BeOpen Python License is retained in the
Software, alone or in any derivative version prepared by Licensee.
3. BeOpen is making the Software available to Licensee on an "AS IS"
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
5. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
6. This License Agreement shall be governed by and interpreted in all
respects by the law of the State of California, excluding conflict of
law provisions. Nothing in this License Agreement shall be deemed to
create any relationship of agency, partnership, or joint venture
between BeOpen and Licensee. This License Agreement does not grant
permission to use BeOpen trademarks or trade names in a trademark
sense to endorse or promote products or services of Licensee, or any
third party. As an exception, the "BeOpen Python" logos available at
http://www.pythonlabs.com/logos.html may be used according to the
permissions granted on that web page.
7. By copying, installing or otherwise using the software, Licensee
agrees to be bound by the terms and conditions of this License
Agreement.
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
---------------------------------------
1. This LICENSE AGREEMENT is between the Corporation for National
Research Initiatives, having an office at 1895 Preston White Drive,
Reston, VA 20191 ("CNRI"), and the Individual or Organization
("Licensee") accessing and otherwise using Python 1.6.1 software in
source or binary form and its associated documentation.
2. Subject to the terms and conditions of this License Agreement, CNRI
hereby grants Licensee a nonexclusive, royalty-free, world-wide
license to reproduce, analyze, test, perform and/or display publicly,
prepare derivative works, distribute, and otherwise use Python 1.6.1
alone or in any derivative version, provided, however, that CNRI's
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
1995-2001 Corporation for National Research Initiatives; All Rights
Reserved" are retained in Python 1.6.1 alone or in any derivative
version prepared by Licensee. Alternately, in lieu of CNRI's License
Agreement, Licensee may substitute the following text (omitting the
quotes): "Python 1.6.1 is made available subject to the terms and
conditions in CNRI's License Agreement. This Agreement together with
Python 1.6.1 may be located on the Internet using the following
unique, persistent identifier (known as a handle): 1895.22/1013. This
Agreement may also be obtained from a proxy server on the Internet
using the following URL: http://hdl.handle.net/1895.22/1013".
3. In the event Licensee prepares a derivative work that is based on
or incorporates Python 1.6.1 or any part thereof, and wants to make
the derivative work available to others as provided herein, then
Licensee hereby agrees to include in any such work a brief summary of
the changes made to Python 1.6.1.
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
7. This License Agreement shall be governed by the federal
intellectual property law of the United States, including without
limitation the federal copyright law, and, to the extent such
U.S. federal law does not apply, by the law of the Commonwealth of
Virginia, excluding Virginia's conflict of law provisions.
Notwithstanding the foregoing, with regard to derivative works based
on Python 1.6.1 that incorporate non-separable material that was
previously distributed under the GNU General Public License (GPL), the
law of the Commonwealth of Virginia shall govern this License
Agreement only as to issues arising under or with respect to
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
License Agreement shall be deemed to create any relationship of
agency, partnership, or joint venture between CNRI and Licensee. This
License Agreement does not grant permission to use CNRI trademarks or
trade name in a trademark sense to endorse or promote products or
services of Licensee, or any third party.
8. By clicking on the "ACCEPT" button where indicated, or by copying,
installing or otherwise using Python 1.6.1, Licensee agrees to be
bound by the terms and conditions of this License Agreement.
ACCEPT
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
--------------------------------------------------
Copyright (C) 2006-2010 Python Software Foundation
Permission to use, copy, modify, and distribute this software and its
documentation for any purpose and without fee is hereby granted,
provided that the above copyright notice appear in all copies and that
both that copyright notice and this permission notice appear in
supporting documentation, and that the name of Stichting Mathematisch
Centrum or CWI not be used in advertising or publicity pertaining to
distribution of the software without specific, written prior
permission.
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
=========================================
Includes File authored by Yuli Fitterman
Copyright (c) Yuli Fitterman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=========================================
Includes file(s): * Copyright (c) Brainwy software Ltda.
*
* This source code is subject to terms and conditions of the Apache License, Version 2.0. A
* copy of the license can be found in the License.html file at the root of this distribution. If
* you cannot locate the Apache License, Version 2.0, please send an email to
* vspython@microsoft.com. By using this source code in any fashion, you are agreeing to be bound
* by the terms of the Apache License, Version 2.0.
*
* You must not remove this notice, or any other, from this software.
=========================================
Includes file(s): Copyright (c) 2009-2012 Pierre Raybaut
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
=========================================
Includes file(s) from Ipython
Copyright (c) 2008-2010, IPython Development Team
Copyright (c) 2001-2007, Fernando Perez. <fernando.perez@colorado.edu>
Copyright (c) 2001, Janko Hauser <jhauser@zscout.de>
Copyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
Neither the name of the IPython Development Team nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
=========================================
Includes file(s): * Copyright (c) Microsoft Corporation.
*
* This source code is subject to terms and conditions of the Apache License, Version 2.0. A
* copy of the license can be found in the License.html file at the root of this distribution. If
* you cannot locate the Apache License, Version 2.0, please send an email to
* vspython@microsoft.com. By using this source code in any fashion, you are agreeing to be bound
* by the terms of the Apache License, Version 2.0.
*
* You must not remove this notice, or any other, from this software.
=========================================
Includes file(s) from https://pythonhosted.org/six/
Copyright (c) 2010-2018 Benjamin Peterson
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
Includes WinAppDbg
# Copyright (c) 2009-2014, Mario Vilas
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice,this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
=========================================
Includes XML-RPC client interface for Python
# Copyright (c) 1999-2002 by Secret Labs AB
# Copyright (c) 1999-2002 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
=========================================
Includes https://github.com/vstinner/bytecode e3e77fb690ed05ac171e15694e1c5d0e0dc34e86 - MIT
Copyright (c) 2016 Red Hat.
The MIT License (MIT)
Copyright (c) 2016 Red Hat.
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
Includes https://github.com/benhoyt/scandir 6ed381881bc2fb9de05804e892eeeeb3601a3af2 - BSD 3-Clause "New" or "Revised" License
Copyright (c) 2012, Ben Hoyt
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Ben Hoyt nor the names of its contributors may be used
to endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
=========================================
END OF PyDev.Debugger NOTICES, INFORMATION, AND LICENSE

View File

@ -1,38 +0,0 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
"""An implementation of the Debug Adapter Protocol (DAP) for Python.
https://microsoft.github.io/debug-adapter-protocol/
"""
# debugpy stable public API consists solely of members of this module that are
# enumerated below.
__all__ = [ # noqa
"__version__",
"breakpoint",
"configure",
"connect",
"debug_this_thread",
"is_client_connected",
"listen",
"log_to",
"trace_this_thread",
"wait_for_client",
]
import sys
assert sys.version_info >= (3, 7), (
"Python 3.6 and below is not supported by this version of debugpy; "
"use debugpy 1.5.1 or earlier."
)
# Actual definitions are in a separate file to work around parsing issues causing
# SyntaxError on Python 2 and preventing the above version check from executing.
from debugpy.public_api import * # noqa
from debugpy.public_api import __version__
del sys

View File

@ -1,39 +0,0 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
import sys
if __name__ == "__main__":
# debugpy can also be invoked directly rather than via -m. In this case, the first
# entry on sys.path is the one added automatically by Python for the directory
# containing this file. This means that import debugpy will not work, since we need
# the parent directory of debugpy/ to be in sys.path, rather than debugpy/ itself.
#
# The other issue is that many other absolute imports will break, because they
# will be resolved relative to debugpy/ - e.g. `import debugger` will then try
# to import debugpy/debugger.py.
#
# To fix both, we need to replace the automatically added entry such that it points
# at parent directory of debugpy/ instead of debugpy/ itself, import debugpy with that
# in sys.path, and then remove the first entry entry altogether, so that it doesn't
# affect any further imports we might do. For example, suppose the user did:
#
# python /foo/bar/debugpy ...
#
# At the beginning of this script, sys.path will contain "/foo/bar/debugpy" as the
# first entry. What we want is to replace it with "/foo/bar', then import debugpy
# with that in effect, and then remove the replaced entry before any more
# code runs. The imported debugpy module will remain in sys.modules, and thus all
# future imports of it or its submodules will resolve accordingly.
if "debugpy" not in sys.modules:
# Do not use dirname() to walk up - this can be a relative path, e.g. ".".
sys.path[0] = sys.path[0] + "/../"
import debugpy # noqa
del sys.path[0]
from debugpy.server import cli
cli.main()

View File

@ -1,126 +0,0 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
import contextlib
from importlib import import_module
import os
import sys
from . import _util
VENDORED_ROOT = os.path.dirname(os.path.abspath(__file__))
# TODO: Move the "pydevd" git submodule to the debugpy/_vendored directory
# and then drop the following fallback.
if "pydevd" not in os.listdir(VENDORED_ROOT):
VENDORED_ROOT = os.path.dirname(VENDORED_ROOT)
def list_all(resolve=False):
"""Return the list of vendored projects."""
# TODO: Derive from os.listdir(VENDORED_ROOT)?
projects = ["pydevd"]
if not resolve:
return projects
return [project_root(name) for name in projects]
def project_root(project):
"""Return the path the root dir of the vendored project.
If "project" is an empty string then the path prefix for vendored
projects (e.g. "debugpy/_vendored/") will be returned.
"""
if not project:
project = ""
return os.path.join(VENDORED_ROOT, project)
def iter_project_files(project, relative=False, **kwargs):
"""Yield (dirname, basename, filename) for all files in the project."""
if relative:
with _util.cwd(VENDORED_ROOT):
for result in _util.iter_all_files(project, **kwargs):
yield result
else:
root = project_root(project)
for result in _util.iter_all_files(root, **kwargs):
yield result
def iter_packaging_files(project):
"""Yield the filenames for all files in the project.
The filenames are relative to "debugpy/_vendored". This is most
useful for the "package data" in a setup.py.
"""
# TODO: Use default filters? __pycache__ and .pyc?
prune_dir = None
exclude_file = None
try:
mod = import_module("._{}_packaging".format(project), __name__)
except ImportError:
pass
else:
prune_dir = getattr(mod, "prune_dir", prune_dir)
exclude_file = getattr(mod, "exclude_file", exclude_file)
results = iter_project_files(
project, relative=True, prune_dir=prune_dir, exclude_file=exclude_file
)
for _, _, filename in results:
yield filename
def prefix_matcher(*prefixes):
"""Return a module match func that matches any of the given prefixes."""
assert prefixes
def match(name, module):
for prefix in prefixes:
if name.startswith(prefix):
return True
else:
return False
return match
def check_modules(project, match, root=None):
"""Verify that only vendored modules have been imported."""
if root is None:
root = project_root(project)
extensions = []
unvendored = {}
for modname, mod in list(sys.modules.items()):
if not match(modname, mod):
continue
try:
filename = getattr(mod, "__file__", None)
except: # In theory it's possible that any error is raised when accessing __file__
filename = None
if not filename: # extension module
extensions.append(modname)
elif not filename.startswith(root):
unvendored[modname] = filename
return unvendored, extensions
@contextlib.contextmanager
def vendored(project, root=None):
"""A context manager under which the vendored project will be imported."""
if root is None:
root = project_root(project)
# Add the vendored project directory, so that it gets tried first.
sys.path.insert(0, root)
try:
yield root
finally:
sys.path.remove(root)
def preimport(project, modules, **kwargs):
"""Import each of the named modules out of the vendored project."""
with vendored(project, **kwargs):
for name in modules:
import_module(name)

View File

@ -1,48 +0,0 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from . import VENDORED_ROOT
from ._util import cwd, iter_all_files
INCLUDES = [
'setup_pydevd_cython.py',
]
def iter_files():
# From the root of pydevd repo, we want only scripts and
# subdirectories that constitute the package itself (not helper
# scripts, tests etc). But when walking down into those
# subdirectories, we want everything below.
with cwd(VENDORED_ROOT):
return iter_all_files('pydevd', prune_dir, exclude_file)
def prune_dir(dirname, basename):
if basename == '__pycache__':
return True
elif dirname != 'pydevd':
return False
elif basename.startswith('pydev'):
return False
elif basename.startswith('_pydev'):
return False
return True
def exclude_file(dirname, basename):
if dirname == 'pydevd':
if basename in INCLUDES:
return False
elif not basename.endswith('.py'):
return True
elif 'pydev' not in basename:
return True
return False
if basename.endswith('.pyc'):
return True
return False

View File

@ -1,59 +0,0 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
import contextlib
import os
@contextlib.contextmanager
def cwd(dirname):
"""A context manager for operating in a different directory."""
orig = os.getcwd()
os.chdir(dirname)
try:
yield orig
finally:
os.chdir(orig)
def iter_all_files(root, prune_dir=None, exclude_file=None):
"""Yield (dirname, basename, filename) for each file in the tree.
This is an alternative to os.walk() that flattens out the tree and
with filtering.
"""
pending = [root]
while pending:
dirname = pending.pop(0)
for result in _iter_files(dirname, pending, prune_dir, exclude_file):
yield result
def iter_tree(root, prune_dir=None, exclude_file=None):
"""Yield (dirname, files) for each directory in the tree.
The list of files is actually a list of (basename, filename).
This is an alternative to os.walk() with filtering."""
pending = [root]
while pending:
dirname = pending.pop(0)
files = []
for _, b, f in _iter_files(dirname, pending, prune_dir, exclude_file):
files.append((b, f))
yield dirname, files
def _iter_files(dirname, subdirs, prune_dir, exclude_file):
for basename in os.listdir(dirname):
filename = os.path.join(dirname, basename)
if os.path.isdir(filename):
if prune_dir is not None and prune_dir(dirname, basename):
continue
subdirs.append(filename)
else:
# TODO: Use os.path.isfile() to narrow it down?
if exclude_file is not None and exclude_file(dirname, basename):
continue
yield dirname, basename, filename

View File

@ -1,81 +0,0 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See LICENSE in the project root
# for license information.
from importlib import import_module
import os
import warnings
from . import check_modules, prefix_matcher, preimport, vendored
# Ensure that pydevd is our vendored copy.
_unvendored, _ = check_modules('pydevd',
prefix_matcher('pydev', '_pydev'))
if _unvendored:
_unvendored = sorted(_unvendored.values())
msg = 'incompatible copy of pydevd already imported'
# raise ImportError(msg)
warnings.warn(msg + ':\n {}'.format('\n '.join(_unvendored)))
# If debugpy logging is enabled, enable it for pydevd as well
if "DEBUGPY_LOG_DIR" in os.environ:
os.environ[str("PYDEVD_DEBUG")] = str("True")
os.environ[str("PYDEVD_DEBUG_FILE")] = os.environ["DEBUGPY_LOG_DIR"] + str("/debugpy.pydevd.log")
# Disable pydevd frame-eval optimizations only if unset, to allow opt-in.
if "PYDEVD_USE_FRAME_EVAL" not in os.environ:
os.environ[str("PYDEVD_USE_FRAME_EVAL")] = str("NO")
# Constants must be set before importing any other pydevd module
# # due to heavy use of "from" in them.
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=DeprecationWarning)
with vendored('pydevd'):
pydevd_constants = import_module('_pydevd_bundle.pydevd_constants')
# We limit representation size in our representation provider when needed.
pydevd_constants.MAXIMUM_VARIABLE_REPRESENTATION_SIZE = 2 ** 32
# Now make sure all the top-level modules and packages in pydevd are
# loaded. Any pydevd modules that aren't loaded at this point, will
# be loaded using their parent package's __path__ (i.e. one of the
# following).
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=DeprecationWarning)
preimport('pydevd', [
'_pydev_bundle',
'_pydev_runfiles',
'_pydevd_bundle',
'_pydevd_frame_eval',
'pydev_ipython',
'pydevd_plugins',
'pydevd',
])
# When pydevd is imported it sets the breakpoint behavior, but it needs to be
# overridden because by default pydevd will connect to the remote debugger using
# its own custom protocol rather than DAP.
import pydevd # noqa
import debugpy # noqa
def debugpy_breakpointhook():
debugpy.breakpoint()
pydevd.install_breakpointhook(debugpy_breakpointhook)
# Ensure that pydevd uses JSON protocol
from _pydevd_bundle import pydevd_constants
from _pydevd_bundle import pydevd_defaults
pydevd_defaults.PydevdCustomization.DEFAULT_PROTOCOL = pydevd_constants.HTTP_JSON_PROTOCOL
# Enable some defaults related to debugpy such as sending a single notification when
# threads pause and stopping on any exception.
pydevd_defaults.PydevdCustomization.DEBUG_MODE = 'debugpy-dap'
# This is important when pydevd attaches automatically to a subprocess. In this case, we have to
# make sure that debugpy is properly put back in the game for users to be able to use it.
pydevd_defaults.PydevdCustomization.PREIMPORT = '%s;%s' % (
os.path.dirname(os.path.dirname(debugpy.__file__)),
'debugpy._vendored.force_pydevd'
)

View File

@ -1,155 +0,0 @@
'''
License: Apache 2.0
Author: Yuli Fitterman
'''
import types
from _pydevd_bundle.pydevd_constants import IS_JYTHON
try:
import inspect
except:
import traceback;
traceback.print_exc() # Ok, no inspect available (search will not work)
from _pydev_bundle._pydev_imports_tipper import signature_from_docstring
def is_bound_method(obj):
if isinstance(obj, types.MethodType):
return getattr(obj, '__self__', getattr(obj, 'im_self', None)) is not None
else:
return False
def get_class_name(instance):
return getattr(getattr(instance, "__class__", None), "__name__", None)
def get_bound_class_name(obj):
my_self = getattr(obj, '__self__', getattr(obj, 'im_self', None))
if my_self is None:
return None
return get_class_name(my_self)
def get_description(obj):
try:
ob_call = obj.__call__
except:
ob_call = None
if isinstance(obj, type) or type(obj).__name__ == 'classobj':
fob = getattr(obj, '__init__', lambda: None)
if not isinstance(fob, (types.FunctionType, types.MethodType)):
fob = obj
elif is_bound_method(ob_call):
fob = ob_call
else:
fob = obj
argspec = ""
fn_name = None
fn_class = None
if isinstance(fob, (types.FunctionType, types.MethodType)):
spec_info = inspect.getfullargspec(fob)
argspec = inspect.formatargspec(*spec_info)
fn_name = getattr(fob, '__name__', None)
if isinstance(obj, type) or type(obj).__name__ == 'classobj':
fn_name = "__init__"
fn_class = getattr(obj, "__name__", "UnknownClass")
elif is_bound_method(obj) or is_bound_method(ob_call):
fn_class = get_bound_class_name(obj) or "UnknownClass"
else:
fn_name = getattr(fob, '__name__', None)
fn_self = getattr(fob, '__self__', None)
if fn_self is not None and not isinstance(fn_self, types.ModuleType):
fn_class = get_class_name(fn_self)
doc_string = get_docstring(ob_call) if is_bound_method(ob_call) else get_docstring(obj)
return create_method_stub(fn_name, fn_class, argspec, doc_string)
def create_method_stub(fn_name, fn_class, argspec, doc_string):
if fn_name and argspec:
doc_string = "" if doc_string is None else doc_string
fn_stub = create_function_stub(fn_name, argspec, doc_string, indent=1 if fn_class else 0)
if fn_class:
expr = fn_class if fn_name == '__init__' else fn_class + '().' + fn_name
return create_class_stub(fn_class, fn_stub) + "\n" + expr
else:
expr = fn_name
return fn_stub + "\n" + expr
elif doc_string:
if fn_name:
restored_signature, _ = signature_from_docstring(doc_string, fn_name)
if restored_signature:
return create_method_stub(fn_name, fn_class, restored_signature, doc_string)
return create_function_stub('unknown', '(*args, **kwargs)', doc_string) + '\nunknown'
else:
return ''
def get_docstring(obj):
if obj is not None:
try:
if IS_JYTHON:
# Jython
doc = obj.__doc__
if doc is not None:
return doc
from _pydev_bundle import _pydev_jy_imports_tipper
is_method, infos = _pydev_jy_imports_tipper.ismethod(obj)
ret = ''
if is_method:
for info in infos:
ret += info.get_as_doc()
return ret
else:
doc = inspect.getdoc(obj)
if doc is not None:
return doc
except:
pass
else:
return ''
try:
# if no attempt succeeded, try to return repr()...
return repr(obj)
except:
try:
# otherwise the class
return str(obj.__class__)
except:
# if all fails, go to an empty string
return ''
def create_class_stub(class_name, contents):
return "class %s(object):\n%s" % (class_name, contents)
def create_function_stub(fn_name, fn_argspec, fn_docstring, indent=0):
def shift_right(string, prefix):
return ''.join(prefix + line for line in string.splitlines(True))
fn_docstring = shift_right(inspect.cleandoc(fn_docstring), " " * (indent + 1))
ret = '''
def %s%s:
"""%s"""
pass
''' % (fn_name, fn_argspec, fn_docstring)
ret = ret[1:] # remove first /n
ret = ret.replace('\t', " ")
if indent:
prefix = " " * indent
ret = shift_right(ret, prefix)
return ret

View File

@ -1,267 +0,0 @@
from collections import namedtuple
from string import ascii_letters, digits
from _pydevd_bundle import pydevd_xml
import pydevconsole
import builtins as __builtin__ # Py3
try:
import java.lang # @UnusedImport
from _pydev_bundle import _pydev_jy_imports_tipper
_pydev_imports_tipper = _pydev_jy_imports_tipper
except ImportError:
IS_JYTHON = False
from _pydev_bundle import _pydev_imports_tipper
dir2 = _pydev_imports_tipper.generate_imports_tip_for_module
#=======================================================================================================================
# _StartsWithFilter
#=======================================================================================================================
class _StartsWithFilter:
'''
Used because we can't create a lambda that'll use an outer scope in jython 2.1
'''
def __init__(self, start_with):
self.start_with = start_with.lower()
def __call__(self, name):
return name.lower().startswith(self.start_with)
#=======================================================================================================================
# Completer
#
# This class was gotten from IPython.completer (dir2 was replaced with the completer already in pydev)
#=======================================================================================================================
class Completer:
def __init__(self, namespace=None, global_namespace=None):
"""Create a new completer for the command line.
Completer([namespace,global_namespace]) -> completer instance.
If unspecified, the default namespace where completions are performed
is __main__ (technically, __main__.__dict__). Namespaces should be
given as dictionaries.
An optional second namespace can be given. This allows the completer
to handle cases where both the local and global scopes need to be
distinguished.
Completer instances should be used as the completion mechanism of
readline via the set_completer() call:
readline.set_completer(Completer(my_namespace).complete)
"""
# Don't bind to namespace quite yet, but flag whether the user wants a
# specific namespace or to use __main__.__dict__. This will allow us
# to bind to __main__.__dict__ at completion time, not now.
if namespace is None:
self.use_main_ns = 1
else:
self.use_main_ns = 0
self.namespace = namespace
# The global namespace, if given, can be bound directly
if global_namespace is None:
self.global_namespace = {}
else:
self.global_namespace = global_namespace
def complete(self, text):
"""Return the next possible completion for 'text'.
This is called successively with state == 0, 1, 2, ... until it
returns None. The completion should begin with 'text'.
"""
if self.use_main_ns:
# In pydev this option should never be used
raise RuntimeError('Namespace must be provided!')
self.namespace = __main__.__dict__ # @UndefinedVariable
if "." in text:
return self.attr_matches(text)
else:
return self.global_matches(text)
def global_matches(self, text):
"""Compute matches when text is a simple name.
Return a list of all keywords, built-in functions and names currently
defined in self.namespace or self.global_namespace that match.
"""
def get_item(obj, attr):
return obj[attr]
a = {}
for dict_with_comps in [__builtin__.__dict__, self.namespace, self.global_namespace]: # @UndefinedVariable
a.update(dict_with_comps)
filter = _StartsWithFilter(text)
return dir2(a, a.keys(), get_item, filter)
def attr_matches(self, text):
"""Compute matches when text contains a dot.
Assuming the text is of the form NAME.NAME....[NAME], and is
evaluatable in self.namespace or self.global_namespace, it will be
evaluated and its attributes (as revealed by dir()) are used as
possible completions. (For class instances, class members are are
also considered.)
WARNING: this can still invoke arbitrary C code, if an object
with a __getattr__ hook is evaluated.
"""
import re
# Another option, seems to work great. Catches things like ''.<tab>
m = re.match(r"(\S+(\.\w+)*)\.(\w*)$", text) # @UndefinedVariable
if not m:
return []
expr, attr = m.group(1, 3)
try:
obj = eval(expr, self.namespace)
except:
try:
obj = eval(expr, self.global_namespace)
except:
return []
filter = _StartsWithFilter(attr)
words = dir2(obj, filter=filter)
return words
def generate_completions(frame, act_tok):
'''
:return list(tuple(method_name, docstring, parameters, completion_type))
method_name: str
docstring: str
parameters: str -- i.e.: "(a, b)"
completion_type is an int
See: _pydev_bundle._pydev_imports_tipper for TYPE_ constants
'''
if frame is None:
return []
# Not using frame.f_globals because of https://sourceforge.net/tracker2/?func=detail&aid=2541355&group_id=85796&atid=577329
# (Names not resolved in generator expression in method)
# See message: http://mail.python.org/pipermail/python-list/2009-January/526522.html
updated_globals = {}
updated_globals.update(frame.f_globals)
updated_globals.update(frame.f_locals) # locals later because it has precedence over the actual globals
if pydevconsole.IPYTHON:
completions = pydevconsole.get_completions(act_tok, act_tok, updated_globals, frame.f_locals)
else:
completer = Completer(updated_globals, None)
# list(tuple(name, descr, parameters, type))
completions = completer.complete(act_tok)
return completions
def generate_completions_as_xml(frame, act_tok):
completions = generate_completions(frame, act_tok)
return completions_to_xml(completions)
def completions_to_xml(completions):
valid_xml = pydevd_xml.make_valid_xml_value
quote = pydevd_xml.quote
msg = ["<xml>"]
for comp in completions:
msg.append('<comp p0="')
msg.append(valid_xml(quote(comp[0], '/>_= \t')))
msg.append('" p1="')
msg.append(valid_xml(quote(comp[1], '/>_= \t')))
msg.append('" p2="')
msg.append(valid_xml(quote(comp[2], '/>_= \t')))
msg.append('" p3="')
msg.append(valid_xml(quote(comp[3], '/>_= \t')))
msg.append('"/>')
msg.append("</xml>")
return ''.join(msg)
identifier_start = ascii_letters + '_'
identifier_part = ascii_letters + '_' + digits
identifier_start = set(identifier_start)
identifier_part = set(identifier_part)
def isidentifier(s):
return s.isidentifier()
TokenAndQualifier = namedtuple('TokenAndQualifier', 'token, qualifier')
def extract_token_and_qualifier(text, line=0, column=0):
'''
Extracts the token a qualifier from the text given the line/colum
(see test_extract_token_and_qualifier for examples).
:param unicode text:
:param int line: 0-based
:param int column: 0-based
'''
# Note: not using the tokenize module because text should be unicode and
# line/column refer to the unicode text (otherwise we'd have to know
# those ranges after converted to bytes).
if line < 0:
line = 0
if column < 0:
column = 0
if isinstance(text, bytes):
text = text.decode('utf-8')
lines = text.splitlines()
try:
text = lines[line]
except IndexError:
return TokenAndQualifier(u'', u'')
if column >= len(text):
column = len(text)
text = text[:column]
token = u''
qualifier = u''
temp_token = []
for i in range(column - 1, -1, -1):
c = text[i]
if c in identifier_part or isidentifier(c) or c == u'.':
temp_token.append(c)
else:
break
temp_token = u''.join(reversed(temp_token))
if u'.' in temp_token:
temp_token = temp_token.split(u'.')
token = u'.'.join(temp_token[:-1])
qualifier = temp_token[-1]
else:
qualifier = temp_token
return TokenAndQualifier(token, qualifier)

View File

@ -1,14 +0,0 @@
# We must redefine it in Py3k if it's not already there
def execfile(file, glob=None, loc=None):
if glob is None:
import sys
glob = sys._getframe().f_back.f_globals
if loc is None:
loc = glob
import tokenize
with tokenize.open(file) as stream:
contents = stream.read()
# execute the script (note: it's important to compile first to have the filename set in debug mode)
exec(compile(contents + "\n", file, 'exec'), glob, loc)

View File

@ -1,41 +0,0 @@
import sys
def __getfilesystemencoding():
'''
Note: there's a copy of this method in interpreterInfo.py
'''
try:
ret = sys.getfilesystemencoding()
if not ret:
raise RuntimeError('Unable to get encoding.')
return ret
except:
try:
#Handle Jython
from java.lang import System # @UnresolvedImport
env = System.getProperty("os.name").lower()
if env.find('win') != -1:
return 'ISO-8859-1' #mbcs does not work on Jython, so, use a (hopefully) suitable replacement
return 'utf-8'
except:
pass
#Only available from 2.3 onwards.
if sys.platform == 'win32':
return 'mbcs'
return 'utf-8'
def getfilesystemencoding():
try:
ret = __getfilesystemencoding()
#Check if the encoding is actually there to be used!
if hasattr('', 'encode'):
''.encode(ret)
if hasattr('', 'decode'):
''.decode(ret)
return ret
except:
return 'utf-8'

View File

@ -1,130 +0,0 @@
#=======================================================================================================================
# getopt code copied since gnu_getopt is not available on jython 2.1
#=======================================================================================================================
class GetoptError(Exception):
opt = ''
msg = ''
def __init__(self, msg, opt=''):
self.msg = msg
self.opt = opt
Exception.__init__(self, msg, opt)
def __str__(self):
return self.msg
def gnu_getopt(args, shortopts, longopts=[]):
"""getopt(args, options[, long_options]) -> opts, args
This function works like getopt(), except that GNU style scanning
mode is used by default. This means that option and non-option
arguments may be intermixed. The getopt() function stops
processing options as soon as a non-option argument is
encountered.
If the first character of the option string is `+', or if the
environment variable POSIXLY_CORRECT is set, then option
processing stops as soon as a non-option argument is encountered.
"""
opts = []
prog_args = []
if type('') == type(longopts):
longopts = [longopts]
else:
longopts = list(longopts)
# Allow options after non-option arguments?
all_options_first = False
if shortopts.startswith('+'):
shortopts = shortopts[1:]
all_options_first = True
while args:
if args[0] == '--':
prog_args += args[1:]
break
if args[0][:2] == '--':
opts, args = do_longs(opts, args[0][2:], longopts, args[1:])
elif args[0][:1] == '-':
opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:])
else:
if all_options_first:
prog_args += args
break
else:
prog_args.append(args[0])
args = args[1:]
return opts, prog_args
def do_longs(opts, opt, longopts, args):
try:
i = opt.index('=')
except ValueError:
optarg = None
else:
opt, optarg = opt[:i], opt[i + 1:]
has_arg, opt = long_has_args(opt, longopts)
if has_arg:
if optarg is None:
if not args:
raise GetoptError('option --%s requires argument' % opt, opt)
optarg, args = args[0], args[1:]
elif optarg:
raise GetoptError('option --%s must not have an argument' % opt, opt)
opts.append(('--' + opt, optarg or ''))
return opts, args
# Return:
# has_arg?
# full option name
def long_has_args(opt, longopts):
possibilities = [o for o in longopts if o.startswith(opt)]
if not possibilities:
raise GetoptError('option --%s not recognized' % opt, opt)
# Is there an exact match?
if opt in possibilities:
return False, opt
elif opt + '=' in possibilities:
return True, opt
# No exact match, so better be unique.
if len(possibilities) > 1:
# XXX since possibilities contains all valid continuations, might be
# nice to work them into the error msg
raise GetoptError('option --%s not a unique prefix' % opt, opt)
assert len(possibilities) == 1
unique_match = possibilities[0]
has_arg = unique_match.endswith('=')
if has_arg:
unique_match = unique_match[:-1]
return has_arg, unique_match
def do_shorts(opts, optstring, shortopts, args):
while optstring != '':
opt, optstring = optstring[0], optstring[1:]
if short_has_arg(opt, shortopts):
if optstring == '':
if not args:
raise GetoptError('option -%s requires argument' % opt,
opt)
optstring, args = args[0], args[1:]
optarg, optstring = optstring, ''
else:
optarg = ''
opts.append(('-' + opt, optarg))
return opts, args
def short_has_arg(opt, shortopts):
for i in range(len(shortopts)):
if opt == shortopts[i] != ':':
return shortopts.startswith(':', i + 1)
raise GetoptError('option -%s not recognized' % opt, opt)
#=======================================================================================================================
# End getopt code
#=======================================================================================================================

View File

@ -1,373 +0,0 @@
import inspect
import os.path
import sys
from _pydev_bundle._pydev_tipper_common import do_find
from _pydevd_bundle.pydevd_utils import hasattr_checked, dir_checked
from inspect import getfullargspec
def getargspec(*args, **kwargs):
arg_spec = getfullargspec(*args, **kwargs)
return arg_spec.args, arg_spec.varargs, arg_spec.varkw, arg_spec.defaults, arg_spec.kwonlyargs or [], arg_spec.kwonlydefaults or {}
# completion types.
TYPE_IMPORT = '0'
TYPE_CLASS = '1'
TYPE_FUNCTION = '2'
TYPE_ATTR = '3'
TYPE_BUILTIN = '4'
TYPE_PARAM = '5'
def _imp(name, log=None):
try:
return __import__(name)
except:
if '.' in name:
sub = name[0:name.rfind('.')]
if log is not None:
log.add_content('Unable to import', name, 'trying with', sub)
log.add_exception()
return _imp(sub, log)
else:
s = 'Unable to import module: %s - sys.path: %s' % (str(name), sys.path)
if log is not None:
log.add_content(s)
log.add_exception()
raise ImportError(s)
IS_IPY = False
if sys.platform == 'cli':
IS_IPY = True
_old_imp = _imp
def _imp(name, log=None):
# We must add a reference in clr for .Net
import clr # @UnresolvedImport
initial_name = name
while '.' in name:
try:
clr.AddReference(name)
break # If it worked, that's OK.
except:
name = name[0:name.rfind('.')]
else:
try:
clr.AddReference(name)
except:
pass # That's OK (not dot net module).
return _old_imp(initial_name, log)
def get_file(mod):
f = None
try:
f = inspect.getsourcefile(mod) or inspect.getfile(mod)
except:
try:
f = getattr(mod, '__file__', None)
except:
f = None
if f and f.lower(f[-4:]) in ['.pyc', '.pyo']:
filename = f[:-4] + '.py'
if os.path.exists(filename):
f = filename
return f
def Find(name, log=None):
f = None
mod = _imp(name, log)
parent = mod
foundAs = ''
if inspect.ismodule(mod):
f = get_file(mod)
components = name.split('.')
old_comp = None
for comp in components[1:]:
try:
# this happens in the following case:
# we have mx.DateTime.mxDateTime.mxDateTime.pyd
# but after importing it, mx.DateTime.mxDateTime shadows access to mxDateTime.pyd
mod = getattr(mod, comp)
except AttributeError:
if old_comp != comp:
raise
if inspect.ismodule(mod):
f = get_file(mod)
else:
if len(foundAs) > 0:
foundAs = foundAs + '.'
foundAs = foundAs + comp
old_comp = comp
return f, mod, parent, foundAs
def search_definition(data):
'''@return file, line, col
'''
data = data.replace('\n', '')
if data.endswith('.'):
data = data.rstrip('.')
f, mod, parent, foundAs = Find(data)
try:
return do_find(f, mod), foundAs
except:
return do_find(f, parent), foundAs
def generate_tip(data, log=None):
data = data.replace('\n', '')
if data.endswith('.'):
data = data.rstrip('.')
f, mod, parent, foundAs = Find(data, log)
# print_ >> open('temp.txt', 'w'), f
tips = generate_imports_tip_for_module(mod)
return f, tips
def check_char(c):
if c == '-' or c == '.':
return '_'
return c
_SENTINEL = object()
def generate_imports_tip_for_module(obj_to_complete, dir_comps=None, getattr=getattr, filter=lambda name:True):
'''
@param obj_to_complete: the object from where we should get the completions
@param dir_comps: if passed, we should not 'dir' the object and should just iterate those passed as kwonly_arg parameter
@param getattr: the way to get kwonly_arg given object from the obj_to_complete (used for the completer)
@param filter: kwonly_arg callable that receives the name and decides if it should be appended or not to the results
@return: list of tuples, so that each tuple represents kwonly_arg completion with:
name, doc, args, type (from the TYPE_* constants)
'''
ret = []
if dir_comps is None:
dir_comps = dir_checked(obj_to_complete)
if hasattr_checked(obj_to_complete, '__dict__'):
dir_comps.append('__dict__')
if hasattr_checked(obj_to_complete, '__class__'):
dir_comps.append('__class__')
get_complete_info = True
if len(dir_comps) > 1000:
# ok, we don't want to let our users wait forever...
# no complete info for you...
get_complete_info = False
dontGetDocsOn = (float, int, str, tuple, list, dict)
dontGetattrOn = (dict, list, set, tuple)
for d in dir_comps:
if d is None:
continue
if not filter(d):
continue
args = ''
try:
try:
if isinstance(obj_to_complete, dontGetattrOn):
raise Exception('Since python 3.9, e.g. "dict[str]" will return'
" a dict that's only supposed to take strings. "
'Interestingly, e.g. dict["val"] is also valid '
'and presumably represents a dict that only takes '
'keys that are "val". This breaks our check for '
'class attributes.')
obj = getattr(obj_to_complete.__class__, d)
except:
obj = getattr(obj_to_complete, d)
except: # just ignore and get it without additional info
ret.append((d, '', args, TYPE_BUILTIN))
else:
if get_complete_info:
try:
retType = TYPE_BUILTIN
# check if we have to get docs
getDoc = True
for class_ in dontGetDocsOn:
if isinstance(obj, class_):
getDoc = False
break
doc = ''
if getDoc:
# no need to get this info... too many constants are defined and
# makes things much slower (passing all that through sockets takes quite some time)
try:
doc = inspect.getdoc(obj)
if doc is None:
doc = ''
except: # may happen on jython when checking java classes (so, just ignore it)
doc = ''
if inspect.ismethod(obj) or inspect.isbuiltin(obj) or inspect.isfunction(obj) or inspect.isroutine(obj):
try:
args, vargs, kwargs, defaults, kwonly_args, kwonly_defaults = getargspec(obj)
args = args[:]
for kwonly_arg in kwonly_args:
default = kwonly_defaults.get(kwonly_arg, _SENTINEL)
if default is not _SENTINEL:
args.append('%s=%s' % (kwonly_arg, default))
else:
args.append(str(kwonly_arg))
args = '(%s)' % (', '.join(args))
except TypeError:
# ok, let's see if we can get the arguments from the doc
args, doc = signature_from_docstring(doc, getattr(obj, '__name__', None))
retType = TYPE_FUNCTION
elif inspect.isclass(obj):
retType = TYPE_CLASS
elif inspect.ismodule(obj):
retType = TYPE_IMPORT
else:
retType = TYPE_ATTR
# add token and doc to return - assure only strings.
ret.append((d, doc, args, retType))
except: # just ignore and get it without aditional info
ret.append((d, '', args, TYPE_BUILTIN))
else: # get_complete_info == False
if inspect.ismethod(obj) or inspect.isbuiltin(obj) or inspect.isfunction(obj) or inspect.isroutine(obj):
retType = TYPE_FUNCTION
elif inspect.isclass(obj):
retType = TYPE_CLASS
elif inspect.ismodule(obj):
retType = TYPE_IMPORT
else:
retType = TYPE_ATTR
# ok, no complete info, let's try to do this as fast and clean as possible
# so, no docs for this kind of information, only the signatures
ret.append((d, '', str(args), retType))
return ret
def signature_from_docstring(doc, obj_name):
args = '()'
try:
found = False
if len(doc) > 0:
if IS_IPY:
# Handle case where we have the situation below
# sort(self, object cmp, object key)
# sort(self, object cmp, object key, bool reverse)
# sort(self)
# sort(self, object cmp)
# Or: sort(self: list, cmp: object, key: object)
# sort(self: list, cmp: object, key: object, reverse: bool)
# sort(self: list)
# sort(self: list, cmp: object)
if obj_name:
name = obj_name + '('
# Fix issue where it was appearing sort(aa)sort(bb)sort(cc) in the same line.
lines = doc.splitlines()
if len(lines) == 1:
c = doc.count(name)
if c > 1:
doc = ('\n' + name).join(doc.split(name))
major = ''
for line in doc.splitlines():
if line.startswith(name) and line.endswith(')'):
if len(line) > len(major):
major = line
if major:
args = major[major.index('('):]
found = True
if not found:
i = doc.find('->')
if i < 0:
i = doc.find('--')
if i < 0:
i = doc.find('\n')
if i < 0:
i = doc.find('\r')
if i > 0:
s = doc[0:i]
s = s.strip()
# let's see if we have a docstring in the first line
if s[-1] == ')':
start = s.find('(')
if start >= 0:
end = s.find('[')
if end <= 0:
end = s.find(')')
if end <= 0:
end = len(s)
args = s[start:end]
if not args[-1] == ')':
args = args + ')'
# now, get rid of unwanted chars
l = len(args) - 1
r = []
for i in range(len(args)):
if i == 0 or i == l:
r.append(args[i])
else:
r.append(check_char(args[i]))
args = ''.join(r)
if IS_IPY:
if args.startswith('(self:'):
i = args.find(',')
if i >= 0:
args = '(self' + args[i:]
else:
args = '(self)'
i = args.find(')')
if i > 0:
args = args[:i + 1]
except:
pass
return args, doc

View File

@ -1,492 +0,0 @@
import traceback
from io import StringIO
from java.lang import StringBuffer # @UnresolvedImport
from java.lang import String # @UnresolvedImport
import java.lang # @UnresolvedImport
import sys
from _pydev_bundle._pydev_tipper_common import do_find
from org.python.core import PyReflectedFunction # @UnresolvedImport
from org.python import core # @UnresolvedImport
from org.python.core import PyClass # @UnresolvedImport
# completion types.
TYPE_IMPORT = '0'
TYPE_CLASS = '1'
TYPE_FUNCTION = '2'
TYPE_ATTR = '3'
TYPE_BUILTIN = '4'
TYPE_PARAM = '5'
def _imp(name):
try:
return __import__(name)
except:
if '.' in name:
sub = name[0:name.rfind('.')]
return _imp(sub)
else:
s = 'Unable to import module: %s - sys.path: %s' % (str(name), sys.path)
raise RuntimeError(s)
import java.util
_java_rt_file = getattr(java.util, '__file__', None)
def Find(name):
f = None
if name.startswith('__builtin__'):
if name == '__builtin__.str':
name = 'org.python.core.PyString'
elif name == '__builtin__.dict':
name = 'org.python.core.PyDictionary'
mod = _imp(name)
parent = mod
foundAs = ''
try:
f = getattr(mod, '__file__', None)
except:
f = None
components = name.split('.')
old_comp = None
for comp in components[1:]:
try:
# this happens in the following case:
# we have mx.DateTime.mxDateTime.mxDateTime.pyd
# but after importing it, mx.DateTime.mxDateTime does shadows access to mxDateTime.pyd
mod = getattr(mod, comp)
except AttributeError:
if old_comp != comp:
raise
if hasattr(mod, '__file__'):
f = mod.__file__
else:
if len(foundAs) > 0:
foundAs = foundAs + '.'
foundAs = foundAs + comp
old_comp = comp
if f is None and name.startswith('java.lang'):
# Hack: java.lang.__file__ is None on Jython 2.7 (whereas it pointed to rt.jar on Jython 2.5).
f = _java_rt_file
if f is not None:
if f.endswith('.pyc'):
f = f[:-1]
elif f.endswith('$py.class'):
f = f[:-len('$py.class')] + '.py'
return f, mod, parent, foundAs
def format_param_class_name(paramClassName):
if paramClassName.startswith('<type \'') and paramClassName.endswith('\'>'):
paramClassName = paramClassName[len('<type \''):-2]
if paramClassName.startswith('['):
if paramClassName == '[C':
paramClassName = 'char[]'
elif paramClassName == '[B':
paramClassName = 'byte[]'
elif paramClassName == '[I':
paramClassName = 'int[]'
elif paramClassName.startswith('[L') and paramClassName.endswith(';'):
paramClassName = paramClassName[2:-1]
paramClassName += '[]'
return paramClassName
def generate_tip(data, log=None):
data = data.replace('\n', '')
if data.endswith('.'):
data = data.rstrip('.')
f, mod, parent, foundAs = Find(data)
tips = generate_imports_tip_for_module(mod)
return f, tips
#=======================================================================================================================
# Info
#=======================================================================================================================
class Info:
def __init__(self, name, **kwargs):
self.name = name
self.doc = kwargs.get('doc', None)
self.args = kwargs.get('args', ()) # tuple of strings
self.varargs = kwargs.get('varargs', None) # string
self.kwargs = kwargs.get('kwargs', None) # string
self.ret = kwargs.get('ret', None) # string
def basic_as_str(self):
'''@returns this class information as a string (just basic format)
'''
args = self.args
s = 'function:%s args=%s, varargs=%s, kwargs=%s, docs:%s' % \
(self.name, args, self.varargs, self.kwargs, self.doc)
return s
def get_as_doc(self):
s = str(self.name)
if self.doc:
s += '\n@doc %s\n' % str(self.doc)
if self.args:
s += '\n@params '
for arg in self.args:
s += str(format_param_class_name(arg))
s += ' '
if self.varargs:
s += '\n@varargs '
s += str(self.varargs)
if self.kwargs:
s += '\n@kwargs '
s += str(self.kwargs)
if self.ret:
s += '\n@return '
s += str(format_param_class_name(str(self.ret)))
return str(s)
def isclass(cls):
return isinstance(cls, core.PyClass) or type(cls) == java.lang.Class
def ismethod(func):
'''this function should return the information gathered on a function
@param func: this is the function we want to get info on
@return a tuple where:
0 = indicates whether the parameter passed is a method or not
1 = a list of classes 'Info', with the info gathered from the function
this is a list because when we have methods from java with the same name and different signatures,
we actually have many methods, each with its own set of arguments
'''
try:
if isinstance(func, core.PyFunction):
# ok, this is from python, created by jython
# print_ ' PyFunction'
def getargs(func_code):
"""Get information about the arguments accepted by a code object.
Three things are returned: (args, varargs, varkw), where 'args' is
a list of argument names (possibly containing nested lists), and
'varargs' and 'varkw' are the names of the * and ** arguments or None."""
nargs = func_code.co_argcount
names = func_code.co_varnames
args = list(names[:nargs])
step = 0
if not hasattr(func_code, 'CO_VARARGS'):
from org.python.core import CodeFlag # @UnresolvedImport
co_varargs_flag = CodeFlag.CO_VARARGS.flag
co_varkeywords_flag = CodeFlag.CO_VARKEYWORDS.flag
else:
co_varargs_flag = func_code.CO_VARARGS
co_varkeywords_flag = func_code.CO_VARKEYWORDS
varargs = None
if func_code.co_flags & co_varargs_flag:
varargs = func_code.co_varnames[nargs]
nargs = nargs + 1
varkw = None
if func_code.co_flags & co_varkeywords_flag:
varkw = func_code.co_varnames[nargs]
return args, varargs, varkw
args = getargs(func.func_code)
return 1, [Info(func.func_name, args=args[0], varargs=args[1], kwargs=args[2], doc=func.func_doc)]
if isinstance(func, core.PyMethod):
# this is something from java itself, and jython just wrapped it...
# things to play in func:
# ['__call__', '__class__', '__cmp__', '__delattr__', '__dir__', '__doc__', '__findattr__', '__name__', '_doget', 'im_class',
# 'im_func', 'im_self', 'toString']
# print_ ' PyMethod'
# that's the PyReflectedFunction... keep going to get it
func = func.im_func
if isinstance(func, PyReflectedFunction):
# this is something from java itself, and jython just wrapped it...
# print_ ' PyReflectedFunction'
infos = []
for i in range(len(func.argslist)):
# things to play in func.argslist[i]:
# 'PyArgsCall', 'PyArgsKeywordsCall', 'REPLACE', 'StandardCall', 'args', 'compare', 'compareTo', 'data', 'declaringClass'
# 'flags', 'isStatic', 'matches', 'precedence']
# print_ ' ', func.argslist[i].data.__class__
# func.argslist[i].data.__class__ == java.lang.reflect.Method
if func.argslist[i]:
met = func.argslist[i].data
name = met.getName()
try:
ret = met.getReturnType()
except AttributeError:
ret = ''
parameterTypes = met.getParameterTypes()
args = []
for j in range(len(parameterTypes)):
paramTypesClass = parameterTypes[j]
try:
try:
paramClassName = paramTypesClass.getName()
except:
paramClassName = paramTypesClass.getName(paramTypesClass)
except AttributeError:
try:
paramClassName = repr(paramTypesClass) # should be something like <type 'object'>
paramClassName = paramClassName.split('\'')[1]
except:
paramClassName = repr(paramTypesClass) # just in case something else happens... it will at least be visible
# if the parameter equals [C, it means it it a char array, so, let's change it
a = format_param_class_name(paramClassName)
# a = a.replace('[]','Array')
# a = a.replace('Object', 'obj')
# a = a.replace('String', 's')
# a = a.replace('Integer', 'i')
# a = a.replace('Char', 'c')
# a = a.replace('Double', 'd')
args.append(a) # so we don't leave invalid code
info = Info(name, args=args, ret=ret)
# print_ info.basic_as_str()
infos.append(info)
return 1, infos
except Exception:
s = StringIO()
traceback.print_exc(file=s)
return 1, [Info(str('ERROR'), doc=s.getvalue())]
return 0, None
def ismodule(mod):
# java modules... do we have other way to know that?
if not hasattr(mod, 'getClass') and not hasattr(mod, '__class__') \
and hasattr(mod, '__name__'):
return 1
return isinstance(mod, core.PyModule)
def dir_obj(obj):
ret = []
found = java.util.HashMap()
original = obj
if hasattr(obj, '__class__'):
if obj.__class__ == java.lang.Class:
# get info about superclasses
classes = []
classes.append(obj)
try:
c = obj.getSuperclass()
except TypeError:
# may happen on jython when getting the java.lang.Class class
c = obj.getSuperclass(obj)
while c != None:
classes.append(c)
c = c.getSuperclass()
# get info about interfaces
interfs = []
for obj in classes:
try:
interfs.extend(obj.getInterfaces())
except TypeError:
interfs.extend(obj.getInterfaces(obj))
classes.extend(interfs)
# now is the time when we actually get info on the declared methods and fields
for obj in classes:
try:
declaredMethods = obj.getDeclaredMethods()
except TypeError:
declaredMethods = obj.getDeclaredMethods(obj)
try:
declaredFields = obj.getDeclaredFields()
except TypeError:
declaredFields = obj.getDeclaredFields(obj)
for i in range(len(declaredMethods)):
name = declaredMethods[i].getName()
ret.append(name)
found.put(name, 1)
for i in range(len(declaredFields)):
name = declaredFields[i].getName()
ret.append(name)
found.put(name, 1)
elif isclass(obj.__class__):
d = dir(obj.__class__)
for name in d:
ret.append(name)
found.put(name, 1)
# this simple dir does not always get all the info, that's why we have the part before
# (e.g.: if we do a dir on String, some methods that are from other interfaces such as
# charAt don't appear)
d = dir(original)
for name in d:
if found.get(name) != 1:
ret.append(name)
return ret
def format_arg(arg):
'''formats an argument to be shown
'''
s = str(arg)
dot = s.rfind('.')
if dot >= 0:
s = s[dot + 1:]
s = s.replace(';', '')
s = s.replace('[]', 'Array')
if len(s) > 0:
c = s[0].lower()
s = c + s[1:]
return s
def search_definition(data):
'''@return file, line, col
'''
data = data.replace('\n', '')
if data.endswith('.'):
data = data.rstrip('.')
f, mod, parent, foundAs = Find(data)
try:
return do_find(f, mod), foundAs
except:
return do_find(f, parent), foundAs
def generate_imports_tip_for_module(obj_to_complete, dir_comps=None, getattr=getattr, filter=lambda name:True):
'''
@param obj_to_complete: the object from where we should get the completions
@param dir_comps: if passed, we should not 'dir' the object and should just iterate those passed as a parameter
@param getattr: the way to get a given object from the obj_to_complete (used for the completer)
@param filter: a callable that receives the name and decides if it should be appended or not to the results
@return: list of tuples, so that each tuple represents a completion with:
name, doc, args, type (from the TYPE_* constants)
'''
ret = []
if dir_comps is None:
dir_comps = dir_obj(obj_to_complete)
for d in dir_comps:
if d is None:
continue
if not filter(d):
continue
args = ''
doc = ''
retType = TYPE_BUILTIN
try:
obj = getattr(obj_to_complete, d)
except (AttributeError, java.lang.NoClassDefFoundError):
# jython has a bug in its custom classloader that prevents some things from working correctly, so, let's see if
# we can fix that... (maybe fixing it in jython itself would be a better idea, as this is clearly a bug)
# for that we need a custom classloader... we have references from it in the below places:
#
# http://mindprod.com/jgloss/classloader.html
# http://www.javaworld.com/javaworld/jw-03-2000/jw-03-classload-p2.html
# http://freshmeat.net/articles/view/1643/
#
# note: this only happens when we add things to the sys.path at runtime, if they are added to the classpath
# before the run, everything goes fine.
#
# The code below ilustrates what I mean...
#
# import sys
# sys.path.insert(1, r"C:\bin\eclipse310\plugins\org.junit_3.8.1\junit.jar" )
#
# import junit.framework
# print_ dir(junit.framework) #shows the TestCase class here
#
# import junit.framework.TestCase
#
# raises the error:
# Traceback (innermost last):
# File "<console>", line 1, in ?
# ImportError: No module named TestCase
#
# whereas if we had added the jar to the classpath before, everything would be fine by now...
ret.append((d, '', '', retType))
# that's ok, private things cannot be gotten...
continue
else:
isMet = ismethod(obj)
if isMet[0] and isMet[1]:
info = isMet[1][0]
try:
args, vargs, kwargs = info.args, info.varargs, info.kwargs
doc = info.get_as_doc()
r = ''
for a in (args):
if len(r) > 0:
r += ', '
r += format_arg(a)
args = '(%s)' % (r)
except TypeError:
traceback.print_exc()
args = '()'
retType = TYPE_FUNCTION
elif isclass(obj):
retType = TYPE_CLASS
elif ismodule(obj):
retType = TYPE_IMPORT
# add token and doc to return - assure only strings.
ret.append((d, doc, args, retType))
return ret
if __name__ == "__main__":
sys.path.append(r'D:\dev_programs\eclipse_3\310\eclipse\plugins\org.junit_3.8.1\junit.jar')
sys.stdout.write('%s\n' % Find('junit.framework.TestCase'))

View File

@ -1,24 +0,0 @@
import traceback
import sys
from io import StringIO
class Log:
def __init__(self):
self._contents = []
def add_content(self, *content):
self._contents.append(' '.join(content))
def add_exception(self):
s = StringIO()
exc_info = sys.exc_info()
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], limit=None, file=s)
self._contents.append(s.getvalue())
def get_contents(self):
return '\n'.join(self._contents)
def clear_log(self):
del self._contents[:]

View File

@ -1,110 +0,0 @@
import sys
import os
def find_in_pythonpath(module_name):
# Check all the occurrences where we could match the given module/package in the PYTHONPATH.
#
# This is a simplistic approach, but probably covers most of the cases we're interested in
# (i.e.: this may fail in more elaborate cases of import customization or .zip imports, but
# this should be rare in general).
found_at = []
parts = module_name.split('.') # split because we need to convert mod.name to mod/name
for path in sys.path:
target = os.path.join(path, *parts)
target_py = target + '.py'
if os.path.isdir(target):
found_at.append(target)
if os.path.exists(target_py):
found_at.append(target_py)
return found_at
class DebuggerInitializationError(Exception):
pass
class VerifyShadowedImport(object):
def __init__(self, import_name):
self.import_name = import_name
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is not None:
if exc_type == DebuggerInitializationError:
return False # It's already an error we generated.
# We couldn't even import it...
found_at = find_in_pythonpath(self.import_name)
if len(found_at) <= 1:
# It wasn't found anywhere or there was just 1 occurrence.
# Let's just return to show the original error.
return False
# We found more than 1 occurrence of the same module in the PYTHONPATH
# (the user module and the standard library module).
# Let's notify the user as it seems that the module was shadowed.
msg = self._generate_shadowed_import_message(found_at)
raise DebuggerInitializationError(msg)
def _generate_shadowed_import_message(self, found_at):
msg = '''It was not possible to initialize the debugger due to a module name conflict.
i.e.: the module "%(import_name)s" could not be imported because it is shadowed by:
%(found_at)s
Please rename this file/folder so that the original module from the standard library can be imported.''' % {
'import_name': self.import_name, 'found_at': found_at[0]}
return msg
def check(self, module, expected_attributes):
msg = ''
for expected_attribute in expected_attributes:
try:
getattr(module, expected_attribute)
except:
msg = self._generate_shadowed_import_message([module.__file__])
break
if msg:
raise DebuggerInitializationError(msg)
with VerifyShadowedImport('threading') as verify_shadowed:
import threading; verify_shadowed.check(threading, ['Thread', 'settrace', 'setprofile', 'Lock', 'RLock', 'current_thread'])
with VerifyShadowedImport('time') as verify_shadowed:
import time; verify_shadowed.check(time, ['sleep', 'time', 'mktime'])
with VerifyShadowedImport('socket') as verify_shadowed:
import socket; verify_shadowed.check(socket, ['socket', 'gethostname', 'getaddrinfo'])
with VerifyShadowedImport('select') as verify_shadowed:
import select; verify_shadowed.check(select, ['select'])
with VerifyShadowedImport('code') as verify_shadowed:
import code as _code; verify_shadowed.check(_code, ['compile_command', 'InteractiveInterpreter'])
with VerifyShadowedImport('_thread') as verify_shadowed:
import _thread as thread; verify_shadowed.check(thread, ['start_new_thread', 'start_new', 'allocate_lock'])
with VerifyShadowedImport('queue') as verify_shadowed:
import queue as _queue; verify_shadowed.check(_queue, ['Queue', 'LifoQueue', 'Empty', 'Full', 'deque'])
with VerifyShadowedImport('xmlrpclib') as verify_shadowed:
import xmlrpc.client as xmlrpclib; verify_shadowed.check(xmlrpclib, ['ServerProxy', 'Marshaller', 'Server'])
with VerifyShadowedImport('xmlrpc.server') as verify_shadowed:
import xmlrpc.server as xmlrpcserver; verify_shadowed.check(xmlrpcserver, ['SimpleXMLRPCServer'])
with VerifyShadowedImport('http.server') as verify_shadowed:
import http.server as BaseHTTPServer; verify_shadowed.check(BaseHTTPServer, ['BaseHTTPRequestHandler'])
# If set, this is a version of the threading.enumerate that doesn't have the patching to remove the pydevd threads.
# Note: as it can't be set during execution, don't import the name (import the module and access it through its name).
pydevd_saved_threading_enumerate = None

View File

@ -1,73 +0,0 @@
import sys
def patch_sys_module():
def patched_exc_info(fun):
def pydev_debugger_exc_info():
type, value, traceback = fun()
if type == ImportError:
# we should not show frame added by plugin_import call
if traceback and hasattr(traceback, "tb_next"):
return type, value, traceback.tb_next
return type, value, traceback
return pydev_debugger_exc_info
system_exc_info = sys.exc_info
sys.exc_info = patched_exc_info(system_exc_info)
if not hasattr(sys, "system_exc_info"):
sys.system_exc_info = system_exc_info
def patched_reload(orig_reload):
def pydev_debugger_reload(module):
orig_reload(module)
if module.__name__ == "sys":
# if sys module was reloaded we should patch it again
patch_sys_module()
return pydev_debugger_reload
def patch_reload():
import builtins # Py3
if hasattr(builtins, "reload"):
sys.builtin_orig_reload = builtins.reload
builtins.reload = patched_reload(sys.builtin_orig_reload) # @UndefinedVariable
try:
import imp
sys.imp_orig_reload = imp.reload
imp.reload = patched_reload(sys.imp_orig_reload) # @UndefinedVariable
except:
pass
else:
try:
import importlib
sys.importlib_orig_reload = importlib.reload # @UndefinedVariable
importlib.reload = patched_reload(sys.importlib_orig_reload) # @UndefinedVariable
except:
pass
del builtins
def cancel_patches_in_sys_module():
sys.exc_info = sys.system_exc_info # @UndefinedVariable
import builtins # Py3
if hasattr(sys, "builtin_orig_reload"):
builtins.reload = sys.builtin_orig_reload
if hasattr(sys, "imp_orig_reload"):
import imp
imp.reload = sys.imp_orig_reload
if hasattr(sys, "importlib_orig_reload"):
import importlib
importlib.reload = sys.importlib_orig_reload
del builtins

View File

@ -1,52 +0,0 @@
import inspect
import re
def do_find(f, mod):
import linecache
if inspect.ismodule(mod):
return f, 0, 0
lines = linecache.getlines(f)
if inspect.isclass(mod):
name = mod.__name__
pat = re.compile(r'^\s*class\s*' + name + r'\b')
for i in range(len(lines)):
if pat.match(lines[i]):
return f, i, 0
return f, 0, 0
if inspect.ismethod(mod):
mod = mod.im_func
if inspect.isfunction(mod):
try:
mod = mod.func_code
except AttributeError:
mod = mod.__code__ # python 3k
if inspect.istraceback(mod):
mod = mod.tb_frame
if inspect.isframe(mod):
mod = mod.f_code
if inspect.iscode(mod):
if not hasattr(mod, 'co_filename'):
return None, 0, 0
if not hasattr(mod, 'co_firstlineno'):
return mod.co_filename, 0, 0
lnum = mod.co_firstlineno
pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)')
while lnum > 0:
if pat.match(lines[lnum]):
break
lnum -= 1
return f, lnum, 0
raise RuntimeError('Do not know about: ' + f + ' ' + str(mod))

View File

@ -1,353 +0,0 @@
'''
Sample usage to track changes in a thread.
import threading
import time
watcher = fsnotify.Watcher()
watcher.accepted_file_extensions = {'.py', '.pyw'}
# Configure target values to compute throttling.
# Note: internal sleep times will be updated based on
# profiling the actual application runtime to match
# those values.
watcher.target_time_for_single_scan = 2.
watcher.target_time_for_notification = 4.
watcher.set_tracked_paths([target_dir])
def start_watching(): # Called from thread
for change_enum, change_path in watcher.iter_changes():
if change_enum == fsnotify.Change.added:
print('Added: ', change_path)
elif change_enum == fsnotify.Change.modified:
print('Modified: ', change_path)
elif change_enum == fsnotify.Change.deleted:
print('Deleted: ', change_path)
t = threading.Thread(target=start_watching)
t.daemon = True
t.start()
try:
...
finally:
watcher.dispose()
Note: changes are only reported for files (added/modified/deleted), not directories.
'''
import threading
import sys
from os.path import basename
from _pydev_bundle import pydev_log
from os import scandir
try:
from enum import IntEnum
except:
class IntEnum(object):
pass
import time
__author__ = 'Fabio Zadrozny'
__email__ = 'fabiofz@gmail.com'
__version__ = '0.1.5' # Version here and in setup.py
class Change(IntEnum):
added = 1
modified = 2
deleted = 3
class _SingleVisitInfo(object):
def __init__(self):
self.count = 0
self.visited_dirs = set()
self.file_to_mtime = {}
self.last_sleep_time = time.time()
class _PathWatcher(object):
'''
Helper to watch a single path.
'''
def __init__(self, root_path, accept_directory, accept_file, single_visit_info, max_recursion_level, sleep_time=.0):
'''
:type root_path: str
:type accept_directory: Callback[str, bool]
:type accept_file: Callback[str, bool]
:type max_recursion_level: int
:type sleep_time: float
'''
self.accept_directory = accept_directory
self.accept_file = accept_file
self._max_recursion_level = max_recursion_level
self._root_path = root_path
# Initial sleep value for throttling, it'll be auto-updated based on the
# Watcher.target_time_for_single_scan.
self.sleep_time = sleep_time
self.sleep_at_elapsed = 1. / 30.
# When created, do the initial snapshot right away!
old_file_to_mtime = {}
self._check(single_visit_info, lambda _change: None, old_file_to_mtime)
def __eq__(self, o):
if isinstance(o, _PathWatcher):
return self._root_path == o._root_path
return False
def __ne__(self, o):
return not self == o
def __hash__(self):
return hash(self._root_path)
def _check_dir(self, dir_path, single_visit_info, append_change, old_file_to_mtime, level):
# This is the actual poll loop
if dir_path in single_visit_info.visited_dirs or level > self._max_recursion_level:
return
single_visit_info.visited_dirs.add(dir_path)
try:
if isinstance(dir_path, bytes):
try:
dir_path = dir_path.decode(sys.getfilesystemencoding())
except UnicodeDecodeError:
try:
dir_path = dir_path.decode('utf-8')
except UnicodeDecodeError:
return # Ignore if we can't deal with the path.
new_files = single_visit_info.file_to_mtime
for entry in scandir(dir_path):
single_visit_info.count += 1
# Throttle if needed inside the loop
# to avoid consuming too much CPU.
if single_visit_info.count % 300 == 0:
if self.sleep_time > 0:
t = time.time()
diff = t - single_visit_info.last_sleep_time
if diff > self.sleep_at_elapsed:
time.sleep(self.sleep_time)
single_visit_info.last_sleep_time = time.time()
if entry.is_dir():
if self.accept_directory(entry.path):
self._check_dir(entry.path, single_visit_info, append_change, old_file_to_mtime, level + 1)
elif self.accept_file(entry.path):
stat = entry.stat()
mtime = (stat.st_mtime_ns, stat.st_size)
path = entry.path
new_files[path] = mtime
old_mtime = old_file_to_mtime.pop(path, None)
if not old_mtime:
append_change((Change.added, path))
elif old_mtime != mtime:
append_change((Change.modified, path))
except OSError:
pass # Directory was removed in the meanwhile.
def _check(self, single_visit_info, append_change, old_file_to_mtime):
self._check_dir(self._root_path, single_visit_info, append_change, old_file_to_mtime, 0)
class Watcher(object):
# By default (if accept_directory is not specified), these will be the
# ignored directories.
ignored_dirs = {u'.git', u'__pycache__', u'.idea', u'node_modules', u'.metadata'}
# By default (if accept_file is not specified), these will be the
# accepted files.
accepted_file_extensions = ()
# Set to the target value for doing full scan of all files (adds a sleep inside the poll loop
# which processes files to reach the target time).
# Lower values will consume more CPU
# Set to 0.0 to have no sleeps (which will result in a higher cpu load).
target_time_for_single_scan = 2.0
# Set the target value from the start of one scan to the start of another scan (adds a
# sleep after a full poll is done to reach the target time).
# Lower values will consume more CPU.
# Set to 0.0 to have a new scan start right away without any sleeps.
target_time_for_notification = 4.0
# Set to True to print the time for a single poll through all the paths.
print_poll_time = False
# This is the maximum recursion level.
max_recursion_level = 10
def __init__(self, accept_directory=None, accept_file=None):
'''
:param Callable[str, bool] accept_directory:
Callable that returns whether a directory should be watched.
Note: if passed it'll override the `ignored_dirs`
:param Callable[str, bool] accept_file:
Callable that returns whether a file should be watched.
Note: if passed it'll override the `accepted_file_extensions`.
'''
self._path_watchers = set()
self._disposed = threading.Event()
if accept_directory is None:
accept_directory = lambda dir_path: basename(dir_path) not in self.ignored_dirs
if accept_file is None:
accept_file = lambda path_name: \
not self.accepted_file_extensions or path_name.endswith(self.accepted_file_extensions)
self.accept_file = accept_file
self.accept_directory = accept_directory
self._single_visit_info = _SingleVisitInfo()
@property
def accept_directory(self):
return self._accept_directory
@accept_directory.setter
def accept_directory(self, accept_directory):
self._accept_directory = accept_directory
for path_watcher in self._path_watchers:
path_watcher.accept_directory = accept_directory
@property
def accept_file(self):
return self._accept_file
@accept_file.setter
def accept_file(self, accept_file):
self._accept_file = accept_file
for path_watcher in self._path_watchers:
path_watcher.accept_file = accept_file
def dispose(self):
self._disposed.set()
@property
def path_watchers(self):
return tuple(self._path_watchers)
def set_tracked_paths(self, paths):
"""
Note: always resets all path trackers to track the passed paths.
"""
if not isinstance(paths, (list, tuple, set)):
paths = (paths,)
# Sort by the path len so that the bigger paths come first (so,
# if there's any nesting we want the nested paths to be visited
# before the parent paths so that the max_recursion_level is correct).
paths = sorted(set(paths), key=lambda path:-len(path))
path_watchers = set()
self._single_visit_info = _SingleVisitInfo()
initial_time = time.time()
for path in paths:
sleep_time = 0. # When collecting the first time, sleep_time should be 0!
path_watcher = _PathWatcher(
path,
self.accept_directory,
self.accept_file,
self._single_visit_info,
max_recursion_level=self.max_recursion_level,
sleep_time=sleep_time,
)
path_watchers.add(path_watcher)
actual_time = (time.time() - initial_time)
pydev_log.debug('Tracking the following paths for changes: %s', paths)
pydev_log.debug('Time to track: %.2fs', actual_time)
pydev_log.debug('Folders found: %s', len(self._single_visit_info.visited_dirs))
pydev_log.debug('Files found: %s', len(self._single_visit_info.file_to_mtime))
self._path_watchers = path_watchers
def iter_changes(self):
'''
Continuously provides changes (until dispose() is called).
Changes provided are tuples with the Change enum and filesystem path.
:rtype: Iterable[Tuple[Change, str]]
'''
while not self._disposed.is_set():
initial_time = time.time()
old_visit_info = self._single_visit_info
old_file_to_mtime = old_visit_info.file_to_mtime
changes = []
append_change = changes.append
self._single_visit_info = single_visit_info = _SingleVisitInfo()
for path_watcher in self._path_watchers:
path_watcher._check(single_visit_info, append_change, old_file_to_mtime)
# Note that we pop entries while visiting, so, what remained is what's deleted.
for entry in old_file_to_mtime:
append_change((Change.deleted, entry))
for change in changes:
yield change
actual_time = (time.time() - initial_time)
if self.print_poll_time:
print('--- Total poll time: %.3fs' % actual_time)
if actual_time > 0:
if self.target_time_for_single_scan <= 0.0:
for path_watcher in self._path_watchers:
path_watcher.sleep_time = 0.0
else:
perc = self.target_time_for_single_scan / actual_time
# Prevent from changing the values too much (go slowly into the right
# direction).
# (to prevent from cases where the user puts the machine on sleep and
# values become too skewed).
if perc > 2.:
perc = 2.
elif perc < 0.5:
perc = 0.5
for path_watcher in self._path_watchers:
if path_watcher.sleep_time <= 0.0:
path_watcher.sleep_time = 0.001
new_sleep_time = path_watcher.sleep_time * perc
# Prevent from changing the values too much (go slowly into the right
# direction).
# (to prevent from cases where the user puts the machine on sleep and
# values become too skewed).
diff_sleep_time = new_sleep_time - path_watcher.sleep_time
path_watcher.sleep_time += (diff_sleep_time / (3.0 * len(self._path_watchers)))
if actual_time > 0:
self._disposed.wait(actual_time)
if path_watcher.sleep_time < 0.001:
path_watcher.sleep_time = 0.001
# print('new sleep time: %s' % path_watcher.sleep_time)
diff = self.target_time_for_notification - actual_time
if diff > 0.:
self._disposed.wait(diff)

View File

@ -1,639 +0,0 @@
import os
import sys
import traceback
from _pydev_bundle.pydev_imports import xmlrpclib, _queue, Exec
from _pydev_bundle._pydev_calltip_util import get_description
from _pydevd_bundle import pydevd_vars
from _pydevd_bundle import pydevd_xml
from _pydevd_bundle.pydevd_constants import (IS_JYTHON, NEXT_VALUE_SEPARATOR, get_global_debugger,
silence_warnings_decorator)
from contextlib import contextmanager
from _pydev_bundle import pydev_log
from _pydevd_bundle.pydevd_utils import interrupt_main_thread
from io import StringIO
# =======================================================================================================================
# BaseStdIn
# =======================================================================================================================
class BaseStdIn:
def __init__(self, original_stdin=sys.stdin, *args, **kwargs):
try:
self.encoding = sys.stdin.encoding
except:
# Not sure if it's available in all Python versions...
pass
self.original_stdin = original_stdin
try:
self.errors = sys.stdin.errors # Who knew? sys streams have an errors attribute!
except:
# Not sure if it's available in all Python versions...
pass
def readline(self, *args, **kwargs):
# sys.stderr.write('Cannot readline out of the console evaluation\n') -- don't show anything
# This could happen if the user had done input('enter number).<-- upon entering this, that message would appear,
# which is not something we want.
return '\n'
def write(self, *args, **kwargs):
pass # not available StdIn (but it can be expected to be in the stream interface)
def flush(self, *args, **kwargs):
pass # not available StdIn (but it can be expected to be in the stream interface)
def read(self, *args, **kwargs):
# in the interactive interpreter, a read and a readline are the same.
return self.readline()
def close(self, *args, **kwargs):
pass # expected in StdIn
def __iter__(self):
# BaseStdIn would not be considered as Iterable in Python 3 without explicit `__iter__` implementation
return self.original_stdin.__iter__()
def __getattr__(self, item):
# it's called if the attribute wasn't found
if hasattr(self.original_stdin, item):
return getattr(self.original_stdin, item)
raise AttributeError("%s has no attribute %s" % (self.original_stdin, item))
# =======================================================================================================================
# StdIn
# =======================================================================================================================
class StdIn(BaseStdIn):
'''
Object to be added to stdin (to emulate it as non-blocking while the next line arrives)
'''
def __init__(self, interpreter, host, client_port, original_stdin=sys.stdin):
BaseStdIn.__init__(self, original_stdin)
self.interpreter = interpreter
self.client_port = client_port
self.host = host
def readline(self, *args, **kwargs):
# Ok, callback into the client to get the new input
try:
server = xmlrpclib.Server('http://%s:%s' % (self.host, self.client_port))
requested_input = server.RequestInput()
if not requested_input:
return '\n' # Yes, a readline must return something (otherwise we can get an EOFError on the input() call).
else:
# readline should end with '\n' (not doing so makes IPython 5 remove the last *valid* character).
requested_input += '\n'
return requested_input
except KeyboardInterrupt:
raise # Let KeyboardInterrupt go through -- #PyDev-816: Interrupting infinite loop in the Interactive Console
except:
return '\n'
def close(self, *args, **kwargs):
pass # expected in StdIn
#=======================================================================================================================
# DebugConsoleStdIn
#=======================================================================================================================
class DebugConsoleStdIn(BaseStdIn):
'''
Object to be added to stdin (to emulate it as non-blocking while the next line arrives)
'''
def __init__(self, py_db, original_stdin):
'''
:param py_db:
If None, get_global_debugger() is used.
'''
BaseStdIn.__init__(self, original_stdin)
self._py_db = py_db
self._in_notification = 0
def __send_input_requested_message(self, is_started):
try:
py_db = self._py_db
if py_db is None:
py_db = get_global_debugger()
if py_db is None:
return
cmd = py_db.cmd_factory.make_input_requested_message(is_started)
py_db.writer.add_command(cmd)
except Exception:
pydev_log.exception()
@contextmanager
def notify_input_requested(self):
self._in_notification += 1
if self._in_notification == 1:
self.__send_input_requested_message(True)
try:
yield
finally:
self._in_notification -= 1
if self._in_notification == 0:
self.__send_input_requested_message(False)
def readline(self, *args, **kwargs):
with self.notify_input_requested():
return self.original_stdin.readline(*args, **kwargs)
def read(self, *args, **kwargs):
with self.notify_input_requested():
return self.original_stdin.read(*args, **kwargs)
class CodeFragment:
def __init__(self, text, is_single_line=True):
self.text = text
self.is_single_line = is_single_line
def append(self, code_fragment):
self.text = self.text + "\n" + code_fragment.text
if not code_fragment.is_single_line:
self.is_single_line = False
# =======================================================================================================================
# BaseInterpreterInterface
# =======================================================================================================================
class BaseInterpreterInterface:
def __init__(self, mainThread, connect_status_queue=None):
self.mainThread = mainThread
self.interruptable = False
self.exec_queue = _queue.Queue(0)
self.buffer = None
self.banner_shown = False
self.connect_status_queue = connect_status_queue
self.mpl_modules_for_patching = {}
self.init_mpl_modules_for_patching()
def build_banner(self):
return 'print({0})\n'.format(repr(self.get_greeting_msg()))
def get_greeting_msg(self):
return 'PyDev console: starting.\n'
def init_mpl_modules_for_patching(self):
from pydev_ipython.matplotlibtools import activate_matplotlib, activate_pylab, activate_pyplot
self.mpl_modules_for_patching = {
"matplotlib": lambda: activate_matplotlib(self.enableGui),
"matplotlib.pyplot": activate_pyplot,
"pylab": activate_pylab
}
def need_more_for_code(self, source):
# PyDev-502: PyDev 3.9 F2 doesn't support backslash continuations
# Strangely even the IPython console is_complete said it was complete
# even with a continuation char at the end.
if source.endswith('\\'):
return True
if hasattr(self.interpreter, 'is_complete'):
return not self.interpreter.is_complete(source)
try:
# At this point, it should always be single.
# If we don't do this, things as:
#
# for i in range(10): print(i)
#
# (in a single line) don't work.
# Note that it won't give an error and code will be None (so, it'll
# use execMultipleLines in the next call in this case).
symbol = 'single'
code = self.interpreter.compile(source, '<input>', symbol)
except (OverflowError, SyntaxError, ValueError):
# Case 1
return False
if code is None:
# Case 2
return True
# Case 3
return False
def need_more(self, code_fragment):
if self.buffer is None:
self.buffer = code_fragment
else:
self.buffer.append(code_fragment)
return self.need_more_for_code(self.buffer.text)
def create_std_in(self, debugger=None, original_std_in=None):
if debugger is None:
return StdIn(self, self.host, self.client_port, original_stdin=original_std_in)
else:
return DebugConsoleStdIn(py_db=debugger, original_stdin=original_std_in)
def add_exec(self, code_fragment, debugger=None):
# In case sys.excepthook called, use original excepthook #PyDev-877: Debug console freezes with Python 3.5+
# (showtraceback does it on python 3.5 onwards)
sys.excepthook = sys.__excepthook__
try:
original_in = sys.stdin
try:
help = None
if 'pydoc' in sys.modules:
pydoc = sys.modules['pydoc'] # Don't import it if it still is not there.
if hasattr(pydoc, 'help'):
# You never know how will the API be changed, so, let's code defensively here
help = pydoc.help
if not hasattr(help, 'input'):
help = None
except:
# Just ignore any error here
pass
more = False
try:
sys.stdin = self.create_std_in(debugger, original_in)
try:
if help is not None:
# This will enable the help() function to work.
try:
try:
help.input = sys.stdin
except AttributeError:
help._input = sys.stdin
except:
help = None
if not self._input_error_printed:
self._input_error_printed = True
sys.stderr.write('\nError when trying to update pydoc.help.input\n')
sys.stderr.write('(help() may not work -- please report this as a bug in the pydev bugtracker).\n\n')
traceback.print_exc()
try:
self.start_exec()
if hasattr(self, 'debugger'):
self.debugger.enable_tracing()
more = self.do_add_exec(code_fragment)
if hasattr(self, 'debugger'):
self.debugger.disable_tracing()
self.finish_exec(more)
finally:
if help is not None:
try:
try:
help.input = original_in
except AttributeError:
help._input = original_in
except:
pass
finally:
sys.stdin = original_in
except SystemExit:
raise
except:
traceback.print_exc()
finally:
sys.__excepthook__ = sys.excepthook
return more
def do_add_exec(self, codeFragment):
'''
Subclasses should override.
@return: more (True if more input is needed to complete the statement and False if the statement is complete).
'''
raise NotImplementedError()
def get_namespace(self):
'''
Subclasses should override.
@return: dict with namespace.
'''
raise NotImplementedError()
def __resolve_reference__(self, text):
"""
:type text: str
"""
obj = None
if '.' not in text:
try:
obj = self.get_namespace()[text]
except KeyError:
pass
if obj is None:
try:
obj = self.get_namespace()['__builtins__'][text]
except:
pass
if obj is None:
try:
obj = getattr(self.get_namespace()['__builtins__'], text, None)
except:
pass
else:
try:
last_dot = text.rindex('.')
parent_context = text[0:last_dot]
res = pydevd_vars.eval_in_context(parent_context, self.get_namespace(), self.get_namespace())
obj = getattr(res, text[last_dot + 1:])
except:
pass
return obj
def getDescription(self, text):
try:
obj = self.__resolve_reference__(text)
if obj is None:
return ''
return get_description(obj)
except:
return ''
def do_exec_code(self, code, is_single_line):
try:
code_fragment = CodeFragment(code, is_single_line)
more = self.need_more(code_fragment)
if not more:
code_fragment = self.buffer
self.buffer = None
self.exec_queue.put(code_fragment)
return more
except:
traceback.print_exc()
return False
def execLine(self, line):
return self.do_exec_code(line, True)
def execMultipleLines(self, lines):
if IS_JYTHON:
more = False
for line in lines.split('\n'):
more = self.do_exec_code(line, True)
return more
else:
return self.do_exec_code(lines, False)
def interrupt(self):
self.buffer = None # Also clear the buffer when it's interrupted.
try:
if self.interruptable:
# Fix for #PyDev-500: Console interrupt can't interrupt on sleep
interrupt_main_thread(self.mainThread)
self.finish_exec(False)
return True
except:
traceback.print_exc()
return False
def close(self):
sys.exit(0)
def start_exec(self):
self.interruptable = True
def get_server(self):
if getattr(self, 'host', None) is not None:
return xmlrpclib.Server('http://%s:%s' % (self.host, self.client_port))
else:
return None
server = property(get_server)
def ShowConsole(self):
server = self.get_server()
if server is not None:
server.ShowConsole()
def finish_exec(self, more):
self.interruptable = False
server = self.get_server()
if server is not None:
return server.NotifyFinished(more)
else:
return True
def getFrame(self):
xml = StringIO()
hidden_ns = self.get_ipython_hidden_vars_dict()
xml.write("<xml>")
xml.write(pydevd_xml.frame_vars_to_xml(self.get_namespace(), hidden_ns))
xml.write("</xml>")
return xml.getvalue()
@silence_warnings_decorator
def getVariable(self, attributes):
xml = StringIO()
xml.write("<xml>")
val_dict = pydevd_vars.resolve_compound_var_object_fields(self.get_namespace(), attributes)
if val_dict is None:
val_dict = {}
for k, val in val_dict.items():
val = val_dict[k]
evaluate_full_value = pydevd_xml.should_evaluate_full_value(val)
xml.write(pydevd_vars.var_to_xml(val, k, evaluate_full_value=evaluate_full_value))
xml.write("</xml>")
return xml.getvalue()
def getArray(self, attr, roffset, coffset, rows, cols, format):
name = attr.split("\t")[-1]
array = pydevd_vars.eval_in_context(name, self.get_namespace(), self.get_namespace())
return pydevd_vars.table_like_struct_to_xml(array, name, roffset, coffset, rows, cols, format)
def evaluate(self, expression):
xml = StringIO()
xml.write("<xml>")
result = pydevd_vars.eval_in_context(expression, self.get_namespace(), self.get_namespace())
xml.write(pydevd_vars.var_to_xml(result, expression))
xml.write("</xml>")
return xml.getvalue()
@silence_warnings_decorator
def loadFullValue(self, seq, scope_attrs):
"""
Evaluate full value for async Console variables in a separate thread and send results to IDE side
:param seq: id of command
:param scope_attrs: a sequence of variables with their attributes separated by NEXT_VALUE_SEPARATOR
(i.e.: obj\tattr1\tattr2NEXT_VALUE_SEPARATORobj2\attr1\tattr2)
:return:
"""
frame_variables = self.get_namespace()
var_objects = []
vars = scope_attrs.split(NEXT_VALUE_SEPARATOR)
for var_attrs in vars:
if '\t' in var_attrs:
name, attrs = var_attrs.split('\t', 1)
else:
name = var_attrs
attrs = None
if name in frame_variables:
var_object = pydevd_vars.resolve_var_object(frame_variables[name], attrs)
var_objects.append((var_object, name))
else:
var_object = pydevd_vars.eval_in_context(name, frame_variables, frame_variables)
var_objects.append((var_object, name))
from _pydevd_bundle.pydevd_comm import GetValueAsyncThreadConsole
py_db = getattr(self, 'debugger', None)
if py_db is None:
py_db = get_global_debugger()
if py_db is None:
from pydevd import PyDB
py_db = PyDB()
t = GetValueAsyncThreadConsole(py_db, self.get_server(), seq, var_objects)
t.start()
def changeVariable(self, attr, value):
def do_change_variable():
Exec('%s=%s' % (attr, value), self.get_namespace(), self.get_namespace())
# Important: it has to be really enabled in the main thread, so, schedule
# it to run in the main thread.
self.exec_queue.put(do_change_variable)
def connectToDebugger(self, debuggerPort, debugger_options=None):
'''
Used to show console with variables connection.
Mainly, monkey-patches things in the debugger structure so that the debugger protocol works.
'''
if debugger_options is None:
debugger_options = {}
env_key = "PYDEVD_EXTRA_ENVS"
if env_key in debugger_options:
for (env_name, value) in debugger_options[env_key].items():
existing_value = os.environ.get(env_name, None)
if existing_value:
os.environ[env_name] = "%s%c%s" % (existing_value, os.path.pathsep, value)
else:
os.environ[env_name] = value
if env_name == "PYTHONPATH":
sys.path.append(value)
del debugger_options[env_key]
def do_connect_to_debugger():
try:
# Try to import the packages needed to attach the debugger
import pydevd
from _pydev_bundle._pydev_saved_modules import threading
except:
# This happens on Jython embedded in host eclipse
traceback.print_exc()
sys.stderr.write('pydevd is not available, cannot connect\n')
from _pydevd_bundle.pydevd_constants import set_thread_id
from _pydev_bundle import pydev_localhost
set_thread_id(threading.current_thread(), "console_main")
VIRTUAL_FRAME_ID = "1" # matches PyStackFrameConsole.java
VIRTUAL_CONSOLE_ID = "console_main" # matches PyThreadConsole.java
f = FakeFrame()
f.f_back = None
f.f_globals = {} # As globals=locals here, let's simply let it empty (and save a bit of network traffic).
f.f_locals = self.get_namespace()
self.debugger = pydevd.PyDB()
self.debugger.add_fake_frame(thread_id=VIRTUAL_CONSOLE_ID, frame_id=VIRTUAL_FRAME_ID, frame=f)
try:
pydevd.apply_debugger_options(debugger_options)
self.debugger.connect(pydev_localhost.get_localhost(), debuggerPort)
self.debugger.prepare_to_run()
self.debugger.disable_tracing()
except:
traceback.print_exc()
sys.stderr.write('Failed to connect to target debugger.\n')
# Register to process commands when idle
self.debugrunning = False
try:
import pydevconsole
pydevconsole.set_debug_hook(self.debugger.process_internal_commands)
except:
traceback.print_exc()
sys.stderr.write('Version of Python does not support debuggable Interactive Console.\n')
# Important: it has to be really enabled in the main thread, so, schedule
# it to run in the main thread.
self.exec_queue.put(do_connect_to_debugger)
return ('connect complete',)
def handshake(self):
if self.connect_status_queue is not None:
self.connect_status_queue.put(True)
return "PyCharm"
def get_connect_status_queue(self):
return self.connect_status_queue
def hello(self, input_str):
# Don't care what the input string is
return ("Hello eclipse",)
def enableGui(self, guiname):
''' Enable the GUI specified in guiname (see inputhook for list).
As with IPython, enabling multiple GUIs isn't an error, but
only the last one's main loop runs and it may not work
'''
def do_enable_gui():
from _pydev_bundle.pydev_versioncheck import versionok_for_gui
if versionok_for_gui():
try:
from pydev_ipython.inputhook import enable_gui
enable_gui(guiname)
except:
sys.stderr.write("Failed to enable GUI event loop integration for '%s'\n" % guiname)
traceback.print_exc()
elif guiname not in ['none', '', None]:
# Only print a warning if the guiname was going to do something
sys.stderr.write("PyDev console: Python version does not support GUI event loop integration for '%s'\n" % guiname)
# Return value does not matter, so return back what was sent
return guiname
# Important: it has to be really enabled in the main thread, so, schedule
# it to run in the main thread.
self.exec_queue.put(do_enable_gui)
def get_ipython_hidden_vars_dict(self):
return None
# =======================================================================================================================
# FakeFrame
# =======================================================================================================================
class FakeFrame:
'''
Used to show console with variables connection.
A class to be used as a mock of a frame.
'''

View File

@ -1,40 +0,0 @@
import sys
import traceback
from types import ModuleType
from _pydevd_bundle.pydevd_constants import DebugInfoHolder
import builtins
class ImportHookManager(ModuleType):
def __init__(self, name, system_import):
ModuleType.__init__(self, name)
self._system_import = system_import
self._modules_to_patch = {}
def add_module_name(self, module_name, activate_function):
self._modules_to_patch[module_name] = activate_function
def do_import(self, name, *args, **kwargs):
module = self._system_import(name, *args, **kwargs)
try:
activate_func = self._modules_to_patch.pop(name, None)
if activate_func:
activate_func() # call activate function
except:
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 2:
traceback.print_exc()
# Restore normal system importer to reduce performance impact
# of calling this method every time an import statement is invoked
if not self._modules_to_patch:
builtins.__import__ = self._system_import
return module
import_hook_manager = ImportHookManager(__name__ + '.import_hook', builtins.__import__)
builtins.__import__ = import_hook_manager.do_import
sys.modules[import_hook_manager.__name__] = import_hook_manager

View File

@ -1,13 +0,0 @@
from _pydev_bundle._pydev_saved_modules import xmlrpclib
from _pydev_bundle._pydev_saved_modules import xmlrpcserver
SimpleXMLRPCServer = xmlrpcserver.SimpleXMLRPCServer
from _pydev_bundle._pydev_execfile import execfile
from _pydev_bundle._pydev_saved_modules import _queue
from _pydevd_bundle.pydevd_exec2 import Exec
from urllib.parse import quote, quote_plus, unquote_plus # @UnresolvedImport

View File

@ -1,97 +0,0 @@
import sys
from _pydev_bundle.pydev_console_utils import BaseInterpreterInterface
import traceback
# Uncomment to force PyDev standard shell.
# raise ImportError()
from _pydev_bundle.pydev_ipython_console_011 import get_pydev_frontend
#=======================================================================================================================
# InterpreterInterface
#=======================================================================================================================
class InterpreterInterface(BaseInterpreterInterface):
'''
The methods in this class should be registered in the xml-rpc server.
'''
def __init__(self, host, client_port, main_thread, show_banner=True, connect_status_queue=None):
BaseInterpreterInterface.__init__(self, main_thread, connect_status_queue)
self.client_port = client_port
self.host = host
self.interpreter = get_pydev_frontend(host, client_port)
self._input_error_printed = False
self.notification_succeeded = False
self.notification_tries = 0
self.notification_max_tries = 3
self.show_banner = show_banner
self.notify_about_magic()
def get_greeting_msg(self):
if self.show_banner:
self.interpreter.show_banner()
return self.interpreter.get_greeting_msg()
def do_add_exec(self, code_fragment):
self.notify_about_magic()
if code_fragment.text.rstrip().endswith('??'):
print('IPython-->')
try:
res = bool(self.interpreter.add_exec(code_fragment.text))
finally:
if code_fragment.text.rstrip().endswith('??'):
print('<--IPython')
return res
def get_namespace(self):
return self.interpreter.get_namespace()
def getCompletions(self, text, act_tok):
return self.interpreter.getCompletions(text, act_tok)
def close(self):
sys.exit(0)
def notify_about_magic(self):
if not self.notification_succeeded:
self.notification_tries += 1
if self.notification_tries > self.notification_max_tries:
return
completions = self.getCompletions("%", "%")
magic_commands = [x[0] for x in completions]
server = self.get_server()
if server is not None:
try:
server.NotifyAboutMagic(magic_commands, self.interpreter.is_automagic())
self.notification_succeeded = True
except:
self.notification_succeeded = False
def get_ipython_hidden_vars_dict(self):
try:
if hasattr(self.interpreter, 'ipython') and hasattr(self.interpreter.ipython, 'user_ns_hidden'):
user_ns_hidden = self.interpreter.ipython.user_ns_hidden
if isinstance(user_ns_hidden, dict):
# Since IPython 2 dict `user_ns_hidden` contains hidden variables and values
user_hidden_dict = user_ns_hidden.copy()
else:
# In IPython 1.x `user_ns_hidden` used to be a set with names of hidden variables
user_hidden_dict = dict([(key, val) for key, val in self.interpreter.ipython.user_ns.items()
if key in user_ns_hidden])
# while `_`, `__` and `___` were not initialized, they are not presented in `user_ns_hidden`
user_hidden_dict.setdefault('_', '')
user_hidden_dict.setdefault('__', '')
user_hidden_dict.setdefault('___', '')
return user_hidden_dict
except:
# Getting IPython variables shouldn't break loading frame variables
traceback.print_exc()

View File

@ -1,516 +0,0 @@
# TODO that would make IPython integration better
# - show output other times then when enter was pressed
# - support proper exit to allow IPython to cleanup (e.g. temp files created with %edit)
# - support Ctrl-D (Ctrl-Z on Windows)
# - use IPython (numbered) prompts in PyDev
# - better integration of IPython and PyDev completions
# - some of the semantics on handling the code completion are not correct:
# eg: Start a line with % and then type c should give %cd as a completion by it doesn't
# however type %c and request completions and %cd is given as an option
# eg: Completing a magic when user typed it without the leading % causes the % to be inserted
# to the left of what should be the first colon.
"""Interface to TerminalInteractiveShell for PyDev Interactive Console frontend
for IPython 0.11 to 1.0+.
"""
from __future__ import print_function
import os
import sys
import codeop
import traceback
from IPython.core.error import UsageError
from IPython.core.completer import IPCompleter
from IPython.core.interactiveshell import InteractiveShell, InteractiveShellABC
from IPython.core.usage import default_banner_parts
from IPython.utils.strdispatch import StrDispatch
import IPython.core.release as IPythonRelease
from IPython.terminal.interactiveshell import TerminalInteractiveShell
try:
from traitlets import CBool, Unicode
except ImportError:
from IPython.utils.traitlets import CBool, Unicode
from IPython.core import release
from _pydev_bundle.pydev_imports import xmlrpclib
default_pydev_banner_parts = default_banner_parts
default_pydev_banner = ''.join(default_pydev_banner_parts)
def show_in_pager(self, strng, *args, **kwargs):
""" Run a string through pager """
# On PyDev we just output the string, there are scroll bars in the console
# to handle "paging". This is the same behaviour as when TERM==dump (see
# page.py)
# for compatibility with mime-bundle form:
if isinstance(strng, dict):
strng = strng.get('text/plain', strng)
print(strng)
def create_editor_hook(pydev_host, pydev_client_port):
def call_editor(filename, line=0, wait=True):
""" Open an editor in PyDev """
if line is None:
line = 0
# Make sure to send an absolution path because unlike most editor hooks
# we don't launch a process. This is more like what happens in the zmqshell
filename = os.path.abspath(filename)
# import sys
# sys.__stderr__.write('Calling editor at: %s:%s\n' % (pydev_host, pydev_client_port))
# Tell PyDev to open the editor
server = xmlrpclib.Server('http://%s:%s' % (pydev_host, pydev_client_port))
server.IPythonEditor(filename, str(line))
if wait:
input("Press Enter when done editing:")
return call_editor
class PyDevIPCompleter(IPCompleter):
def __init__(self, *args, **kwargs):
""" Create a Completer that reuses the advanced completion support of PyDev
in addition to the completion support provided by IPython """
IPCompleter.__init__(self, *args, **kwargs)
# Use PyDev for python matches, see getCompletions below
if self.python_matches in self.matchers:
# `self.python_matches` matches attributes or global python names
self.matchers.remove(self.python_matches)
class PyDevIPCompleter6(IPCompleter):
def __init__(self, *args, **kwargs):
""" Create a Completer that reuses the advanced completion support of PyDev
in addition to the completion support provided by IPython """
IPCompleter.__init__(self, *args, **kwargs)
@property
def matchers(self):
"""All active matcher routines for completion"""
# To remove python_matches we now have to override it as it's now a property in the superclass.
return [
self.file_matches,
self.magic_matches,
self.python_func_kw_matches,
self.dict_key_matches,
]
@matchers.setter
def matchers(self, value):
# To stop the init in IPCompleter raising an AttributeError we now have to specify a setter as it's now a property in the superclass.
return
class PyDevTerminalInteractiveShell(TerminalInteractiveShell):
banner1 = Unicode(default_pydev_banner, config=True,
help="""The part of the banner to be printed before the profile"""
)
# TODO term_title: (can PyDev's title be changed???, see terminal.py for where to inject code, in particular set_term_title as used by %cd)
# for now, just disable term_title
term_title = CBool(False)
# Note in version 0.11 there is no guard in the IPython code about displaying a
# warning, so with 0.11 you get:
# WARNING: Readline services not available or not loaded.
# WARNING: The auto-indent feature requires the readline library
# Disable readline, readline type code is all handled by PyDev (on Java side)
readline_use = CBool(False)
# autoindent has no meaning in PyDev (PyDev always handles that on the Java side),
# and attempting to enable it will print a warning in the absence of readline.
autoindent = CBool(False)
# Force console to not give warning about color scheme choice and default to NoColor.
# TODO It would be nice to enable colors in PyDev but:
# - The PyDev Console (Eclipse Console) does not support the full range of colors, so the
# effect isn't as nice anyway at the command line
# - If done, the color scheme should default to LightBG, but actually be dependent on
# any settings the user has (such as if a dark theme is in use, then Linux is probably
# a better theme).
colors_force = CBool(True)
colors = Unicode("NoColor")
# Since IPython 5 the terminal interface is not compatible with Emacs `inferior-shell` and
# the `simple_prompt` flag is needed
simple_prompt = CBool(True)
# In the PyDev Console, GUI control is done via hookable XML-RPC server
@staticmethod
def enable_gui(gui=None, app=None):
"""Switch amongst GUI input hooks by name.
"""
# Deferred import
from pydev_ipython.inputhook import enable_gui as real_enable_gui
try:
return real_enable_gui(gui, app)
except ValueError as e:
raise UsageError("%s" % e)
#-------------------------------------------------------------------------
# Things related to hooks
#-------------------------------------------------------------------------
def init_history(self):
# Disable history so that we don't have an additional thread for that
# (and we don't use the history anyways).
self.config.HistoryManager.enabled = False
super(PyDevTerminalInteractiveShell, self).init_history()
def init_hooks(self):
super(PyDevTerminalInteractiveShell, self).init_hooks()
self.set_hook('show_in_pager', show_in_pager)
#-------------------------------------------------------------------------
# Things related to exceptions
#-------------------------------------------------------------------------
def showtraceback(self, exc_tuple=None, *args, **kwargs):
# IPython does a lot of clever stuff with Exceptions. However mostly
# it is related to IPython running in a terminal instead of an IDE.
# (e.g. it prints out snippets of code around the stack trace)
# PyDev does a lot of clever stuff too, so leave exception handling
# with default print_exc that PyDev can parse and do its clever stuff
# with (e.g. it puts links back to the original source code)
try:
if exc_tuple is None:
etype, value, tb = sys.exc_info()
else:
etype, value, tb = exc_tuple
except ValueError:
return
if tb is not None:
traceback.print_exception(etype, value, tb)
#-------------------------------------------------------------------------
# Things related to text completion
#-------------------------------------------------------------------------
# The way to construct an IPCompleter changed in most versions,
# so we have a custom, per version implementation of the construction
def _new_completer_100(self):
completer = PyDevIPCompleter(shell=self,
namespace=self.user_ns,
global_namespace=self.user_global_ns,
alias_table=self.alias_manager.alias_table,
use_readline=self.has_readline,
parent=self,
)
return completer
def _new_completer_234(self):
# correct for IPython versions 2.x, 3.x, 4.x
completer = PyDevIPCompleter(shell=self,
namespace=self.user_ns,
global_namespace=self.user_global_ns,
use_readline=self.has_readline,
parent=self,
)
return completer
def _new_completer_500(self):
completer = PyDevIPCompleter(shell=self,
namespace=self.user_ns,
global_namespace=self.user_global_ns,
use_readline=False,
parent=self
)
return completer
def _new_completer_600(self):
completer = PyDevIPCompleter6(shell=self,
namespace=self.user_ns,
global_namespace=self.user_global_ns,
use_readline=False,
parent=self
)
return completer
def add_completer_hooks(self):
from IPython.core.completerlib import module_completer, magic_run_completer, cd_completer
try:
from IPython.core.completerlib import reset_completer
except ImportError:
# reset_completer was added for rel-0.13
reset_completer = None
self.configurables.append(self.Completer)
# Add custom completers to the basic ones built into IPCompleter
sdisp = self.strdispatchers.get('complete_command', StrDispatch())
self.strdispatchers['complete_command'] = sdisp
self.Completer.custom_completers = sdisp
self.set_hook('complete_command', module_completer, str_key='import')
self.set_hook('complete_command', module_completer, str_key='from')
self.set_hook('complete_command', magic_run_completer, str_key='%run')
self.set_hook('complete_command', cd_completer, str_key='%cd')
if reset_completer:
self.set_hook('complete_command', reset_completer, str_key='%reset')
def init_completer(self):
"""Initialize the completion machinery.
This creates a completer that provides the completions that are
IPython specific. We use this to supplement PyDev's core code
completions.
"""
# PyDev uses its own completer and custom hooks so that it uses
# most completions from PyDev's core completer which provides
# extra information.
# See getCompletions for where the two sets of results are merged
if IPythonRelease._version_major >= 6:
self.Completer = self._new_completer_600()
elif IPythonRelease._version_major >= 5:
self.Completer = self._new_completer_500()
elif IPythonRelease._version_major >= 2:
self.Completer = self._new_completer_234()
elif IPythonRelease._version_major >= 1:
self.Completer = self._new_completer_100()
if hasattr(self.Completer, 'use_jedi'):
self.Completer.use_jedi = False
self.add_completer_hooks()
if IPythonRelease._version_major <= 3:
# Only configure readline if we truly are using readline. IPython can
# do tab-completion over the network, in GUIs, etc, where readline
# itself may be absent
if self.has_readline:
self.set_readline_completer()
#-------------------------------------------------------------------------
# Things related to aliases
#-------------------------------------------------------------------------
def init_alias(self):
# InteractiveShell defines alias's we want, but TerminalInteractiveShell defines
# ones we don't. So don't use super and instead go right to InteractiveShell
InteractiveShell.init_alias(self)
#-------------------------------------------------------------------------
# Things related to exiting
#-------------------------------------------------------------------------
def ask_exit(self):
""" Ask the shell to exit. Can be overiden and used as a callback. """
# TODO PyDev's console does not have support from the Python side to exit
# the console. If user forces the exit (with sys.exit()) then the console
# simply reports errors. e.g.:
# >>> import sys
# >>> sys.exit()
# Failed to create input stream: Connection refused
# >>>
# Console already exited with value: 0 while waiting for an answer.
# Error stream:
# Output stream:
# >>>
#
# Alternatively if you use the non-IPython shell this is what happens
# >>> exit()
# <type 'exceptions.SystemExit'>:None
# >>>
# <type 'exceptions.SystemExit'>:None
# >>>
#
super(PyDevTerminalInteractiveShell, self).ask_exit()
print('To exit the PyDev Console, terminate the console within IDE.')
#-------------------------------------------------------------------------
# Things related to magics
#-------------------------------------------------------------------------
def init_magics(self):
super(PyDevTerminalInteractiveShell, self).init_magics()
# TODO Any additional magics for PyDev?
InteractiveShellABC.register(PyDevTerminalInteractiveShell) # @UndefinedVariable
#=======================================================================================================================
# _PyDevFrontEnd
#=======================================================================================================================
class _PyDevFrontEnd:
version = release.__version__
def __init__(self):
# Create and initialize our IPython instance.
if hasattr(PyDevTerminalInteractiveShell, '_instance') and PyDevTerminalInteractiveShell._instance is not None:
self.ipython = PyDevTerminalInteractiveShell._instance
else:
self.ipython = PyDevTerminalInteractiveShell.instance()
self._curr_exec_line = 0
self._curr_exec_lines = []
def show_banner(self):
self.ipython.show_banner()
def update(self, globals, locals):
ns = self.ipython.user_ns
for key, value in list(ns.items()):
if key not in locals:
locals[key] = value
self.ipython.user_global_ns.clear()
self.ipython.user_global_ns.update(globals)
self.ipython.user_ns = locals
if hasattr(self.ipython, 'history_manager') and hasattr(self.ipython.history_manager, 'save_thread'):
self.ipython.history_manager.save_thread.pydev_do_not_trace = True # don't trace ipython history saving thread
def complete(self, string):
try:
if string:
return self.ipython.complete(None, line=string, cursor_pos=string.__len__())
else:
return self.ipython.complete(string, string, 0)
except:
# Silence completer exceptions
pass
def is_complete(self, string):
# Based on IPython 0.10.1
if string in ('', '\n'):
# Prefiltering, eg through ipython0, may return an empty
# string although some operations have been accomplished. We
# thus want to consider an empty string as a complete
# statement.
return True
else:
try:
# Add line returns here, to make sure that the statement is
# complete (except if '\' was used).
# This should probably be done in a different place (like
# maybe 'prefilter_input' method? For now, this works.
clean_string = string.rstrip('\n')
if not clean_string.endswith('\\'):
clean_string += '\n\n'
is_complete = codeop.compile_command(
clean_string,
"<string>",
"exec"
)
except Exception:
# XXX: Hack: return True so that the
# code gets executed and the error captured.
is_complete = True
return is_complete
def getCompletions(self, text, act_tok):
# Get completions from IPython and from PyDev and merge the results
# IPython only gives context free list of completions, while PyDev
# gives detailed information about completions.
try:
TYPE_IPYTHON = '11'
TYPE_IPYTHON_MAGIC = '12'
_line, ipython_completions = self.complete(text)
from _pydev_bundle._pydev_completer import Completer
completer = Completer(self.get_namespace(), None)
ret = completer.complete(act_tok)
append = ret.append
ip = self.ipython
pydev_completions = set([f[0] for f in ret])
for ipython_completion in ipython_completions:
# PyCharm was not expecting completions with '%'...
# Could be fixed in the backend, but it's probably better
# fixing it at PyCharm.
# if ipython_completion.startswith('%'):
# ipython_completion = ipython_completion[1:]
if ipython_completion not in pydev_completions:
pydev_completions.add(ipython_completion)
inf = ip.object_inspect(ipython_completion)
if inf['type_name'] == 'Magic function':
pydev_type = TYPE_IPYTHON_MAGIC
else:
pydev_type = TYPE_IPYTHON
pydev_doc = inf['docstring']
if pydev_doc is None:
pydev_doc = ''
append((ipython_completion, pydev_doc, '', pydev_type))
return ret
except:
import traceback;traceback.print_exc()
return []
def get_namespace(self):
return self.ipython.user_ns
def clear_buffer(self):
del self._curr_exec_lines[:]
def add_exec(self, line):
if self._curr_exec_lines:
self._curr_exec_lines.append(line)
buf = '\n'.join(self._curr_exec_lines)
if self.is_complete(buf):
self._curr_exec_line += 1
self.ipython.run_cell(buf)
del self._curr_exec_lines[:]
return False # execute complete (no more)
return True # needs more
else:
if not self.is_complete(line):
# Did not execute
self._curr_exec_lines.append(line)
return True # needs more
else:
self._curr_exec_line += 1
self.ipython.run_cell(line, store_history=True)
# hist = self.ipython.history_manager.output_hist_reprs
# rep = hist.get(self._curr_exec_line, None)
# if rep is not None:
# print(rep)
return False # execute complete (no more)
def is_automagic(self):
return self.ipython.automagic
def get_greeting_msg(self):
return 'PyDev console: using IPython %s\n' % self.version
class _PyDevFrontEndContainer:
_instance = None
_last_host_port = None
def get_pydev_frontend(pydev_host, pydev_client_port):
if _PyDevFrontEndContainer._instance is None:
_PyDevFrontEndContainer._instance = _PyDevFrontEnd()
if _PyDevFrontEndContainer._last_host_port != (pydev_host, pydev_client_port):
_PyDevFrontEndContainer._last_host_port = pydev_host, pydev_client_port
# Back channel to PyDev to open editors (in the future other
# info may go back this way. This is the same channel that is
# used to get stdin, see StdIn in pydev_console_utils)
_PyDevFrontEndContainer._instance.ipython.hooks['editor'] = create_editor_hook(pydev_host, pydev_client_port)
# Note: setting the callback directly because setting it with set_hook would actually create a chain instead
# of ovewriting at each new call).
# _PyDevFrontEndContainer._instance.ipython.set_hook('editor', create_editor_hook(pydev_host, pydev_client_port))
return _PyDevFrontEndContainer._instance

View File

@ -1,23 +0,0 @@
from _pydev_bundle._pydev_saved_modules import threading
# Hack for https://www.brainwy.com/tracker/PyDev/363 (i.e.: calling is_alive() can throw AssertionError under some
# circumstances).
# It is required to debug threads started by start_new_thread in Python 3.4
_temp = threading.Thread()
if hasattr(_temp, '_is_stopped'): # Python 3.x has this
def is_thread_alive(t):
return not t._is_stopped
elif hasattr(_temp, '_Thread__stopped'): # Python 2.x has this
def is_thread_alive(t):
return not t._Thread__stopped
else:
# Jython wraps a native java thread and thus only obeys the public API.
def is_thread_alive(t):
return t.is_alive()
del _temp

View File

@ -1,67 +0,0 @@
from _pydev_bundle._pydev_saved_modules import socket
import sys
IS_JYTHON = sys.platform.find('java') != -1
_cache = None
def get_localhost():
'''
Should return 127.0.0.1 in ipv4 and ::1 in ipv6
localhost is not used because on windows vista/windows 7, there can be issues where the resolving doesn't work
properly and takes a lot of time (had this issue on the pyunit server).
Using the IP directly solves the problem.
'''
# TODO: Needs better investigation!
global _cache
if _cache is None:
try:
for addr_info in socket.getaddrinfo("localhost", 80, 0, 0, socket.SOL_TCP):
config = addr_info[4]
if config[0] == '127.0.0.1':
_cache = '127.0.0.1'
return _cache
except:
# Ok, some versions of Python don't have getaddrinfo or SOL_TCP... Just consider it 127.0.0.1 in this case.
_cache = '127.0.0.1'
else:
_cache = 'localhost'
return _cache
def get_socket_names(n_sockets, close=False):
socket_names = []
sockets = []
for _ in range(n_sockets):
if IS_JYTHON:
# Although the option which would be pure java *should* work for Jython, the socket being returned is still 0
# (i.e.: it doesn't give the local port bound, only the original port, which was 0).
from java.net import ServerSocket
sock = ServerSocket(0)
socket_name = get_localhost(), sock.getLocalPort()
else:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((get_localhost(), 0))
socket_name = sock.getsockname()
sockets.append(sock)
socket_names.append(socket_name)
if close:
for s in sockets:
s.close()
return socket_names
def get_socket_name(close=False):
return get_socket_names(1, close)[0]
if __name__ == '__main__':
print(get_socket_name())

View File

@ -1,276 +0,0 @@
from _pydevd_bundle.pydevd_constants import DebugInfoHolder, SHOW_COMPILE_CYTHON_COMMAND_LINE, NULL, LOG_TIME, \
ForkSafeLock
from contextlib import contextmanager
import traceback
import os
import sys
class _LoggingGlobals(object):
_warn_once_map = {}
_debug_stream_filename = None
_debug_stream = NULL
_debug_stream_initialized = False
_initialize_lock = ForkSafeLock()
def initialize_debug_stream(reinitialize=False):
'''
:param bool reinitialize:
Reinitialize is used to update the debug stream after a fork (thus, if it wasn't
initialized, we don't need to do anything, just wait for the first regular log call
to initialize).
'''
if reinitialize:
if not _LoggingGlobals._debug_stream_initialized:
return
else:
if _LoggingGlobals._debug_stream_initialized:
return
with _LoggingGlobals._initialize_lock:
# Initialization is done lazilly, so, it's possible that multiple threads try to initialize
# logging.
# Check initial conditions again after obtaining the lock.
if reinitialize:
if not _LoggingGlobals._debug_stream_initialized:
return
else:
if _LoggingGlobals._debug_stream_initialized:
return
_LoggingGlobals._debug_stream_initialized = True
# Note: we cannot initialize with sys.stderr because when forking we may end up logging things in 'os' calls.
_LoggingGlobals._debug_stream = NULL
_LoggingGlobals._debug_stream_filename = None
if not DebugInfoHolder.PYDEVD_DEBUG_FILE:
_LoggingGlobals._debug_stream = sys.stderr
else:
# Add pid to the filename.
try:
target_file = DebugInfoHolder.PYDEVD_DEBUG_FILE
debug_file = _compute_filename_with_pid(target_file)
_LoggingGlobals._debug_stream = open(debug_file, 'w')
_LoggingGlobals._debug_stream_filename = debug_file
except Exception:
_LoggingGlobals._debug_stream = sys.stderr
# Don't fail when trying to setup logging, just show the exception.
traceback.print_exc()
def _compute_filename_with_pid(target_file, pid=None):
# Note: used in tests.
dirname = os.path.dirname(target_file)
basename = os.path.basename(target_file)
try:
os.makedirs(dirname)
except Exception:
pass # Ignore error if it already exists.
name, ext = os.path.splitext(basename)
if pid is None:
pid = os.getpid()
return os.path.join(dirname, '%s.%s%s' % (name, pid, ext))
def log_to(log_file:str, log_level:int=3) -> None:
with _LoggingGlobals._initialize_lock:
# Can be set directly.
DebugInfoHolder.DEBUG_TRACE_LEVEL = log_level
if DebugInfoHolder.PYDEVD_DEBUG_FILE != log_file:
# Note that we don't need to reset it unless it actually changed
# (would be the case where it's set as an env var in a new process
# and a subprocess initializes logging to the same value).
_LoggingGlobals._debug_stream = NULL
_LoggingGlobals._debug_stream_filename = None
DebugInfoHolder.PYDEVD_DEBUG_FILE = log_file
_LoggingGlobals._debug_stream_initialized = False
def list_log_files(pydevd_debug_file):
log_files = []
dirname = os.path.dirname(pydevd_debug_file)
basename = os.path.basename(pydevd_debug_file)
if os.path.isdir(dirname):
name, ext = os.path.splitext(basename)
for f in os.listdir(dirname):
if f.startswith(name) and f.endswith(ext):
log_files.append(os.path.join(dirname, f))
return log_files
@contextmanager
def log_context(trace_level, stream):
'''
To be used to temporarily change the logging settings.
'''
with _LoggingGlobals._initialize_lock:
original_trace_level = DebugInfoHolder.DEBUG_TRACE_LEVEL
original_debug_stream = _LoggingGlobals._debug_stream
original_pydevd_debug_file = DebugInfoHolder.PYDEVD_DEBUG_FILE
original_debug_stream_filename = _LoggingGlobals._debug_stream_filename
original_initialized = _LoggingGlobals._debug_stream_initialized
DebugInfoHolder.DEBUG_TRACE_LEVEL = trace_level
_LoggingGlobals._debug_stream = stream
_LoggingGlobals._debug_stream_initialized = True
try:
yield
finally:
with _LoggingGlobals._initialize_lock:
DebugInfoHolder.DEBUG_TRACE_LEVEL = original_trace_level
_LoggingGlobals._debug_stream = original_debug_stream
DebugInfoHolder.PYDEVD_DEBUG_FILE = original_pydevd_debug_file
_LoggingGlobals._debug_stream_filename = original_debug_stream_filename
_LoggingGlobals._debug_stream_initialized = original_initialized
import time
_last_log_time = time.time()
# Set to True to show pid in each logged message (usually the file has it, but sometimes it's handy).
_LOG_PID = False
def _pydevd_log(level, msg, *args):
'''
Levels are:
0 most serious warnings/errors (always printed)
1 warnings/significant events
2 informational trace
3 verbose mode
'''
if level <= DebugInfoHolder.DEBUG_TRACE_LEVEL:
# yes, we can have errors printing if the console of the program has been finished (and we're still trying to print something)
try:
try:
if args:
msg = msg % args
except:
msg = '%s - %s' % (msg, args)
if LOG_TIME:
global _last_log_time
new_log_time = time.time()
time_diff = new_log_time - _last_log_time
_last_log_time = new_log_time
msg = '%.2fs - %s\n' % (time_diff, msg,)
else:
msg = '%s\n' % (msg,)
if _LOG_PID:
msg = '<%s> - %s\n' % (os.getpid(), msg,)
try:
try:
initialize_debug_stream() # Do it as late as possible
_LoggingGlobals._debug_stream.write(msg)
except TypeError:
if isinstance(msg, bytes):
# Depending on the StringIO flavor, it may only accept unicode.
msg = msg.decode('utf-8', 'replace')
_LoggingGlobals._debug_stream.write(msg)
except UnicodeEncodeError:
# When writing to the stream it's possible that the string can't be represented
# in the encoding expected (in this case, convert it to the stream encoding
# or ascii if we can't find one suitable using a suitable replace).
encoding = getattr(_LoggingGlobals._debug_stream, 'encoding', 'ascii')
msg = msg.encode(encoding, 'backslashreplace')
msg = msg.decode(encoding)
_LoggingGlobals._debug_stream.write(msg)
_LoggingGlobals._debug_stream.flush()
except:
pass
return True
def _pydevd_log_exception(msg='', *args):
if msg or args:
_pydevd_log(0, msg, *args)
try:
initialize_debug_stream() # Do it as late as possible
traceback.print_exc(file=_LoggingGlobals._debug_stream)
_LoggingGlobals._debug_stream.flush()
except:
raise
def verbose(msg, *args):
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 3:
_pydevd_log(3, msg, *args)
def debug(msg, *args):
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 2:
_pydevd_log(2, msg, *args)
def info(msg, *args):
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 1:
_pydevd_log(1, msg, *args)
warn = info
def critical(msg, *args):
_pydevd_log(0, msg, *args)
def exception(msg='', *args):
try:
_pydevd_log_exception(msg, *args)
except:
pass # Should never fail (even at interpreter shutdown).
error = exception
def error_once(msg, *args):
try:
if args:
message = msg % args
else:
message = str(msg)
except:
message = '%s - %s' % (msg, args)
if message not in _LoggingGlobals._warn_once_map:
_LoggingGlobals._warn_once_map[message] = True
critical(message)
def exception_once(msg, *args):
try:
if args:
message = msg % args
else:
message = str(msg)
except:
message = '%s - %s' % (msg, args)
if message not in _LoggingGlobals._warn_once_map:
_LoggingGlobals._warn_once_map[message] = True
exception(message)
def debug_once(msg, *args):
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 3:
error_once(msg, *args)
def show_compile_cython_command_line():
if SHOW_COMPILE_CYTHON_COMMAND_LINE:
dirname = os.path.dirname(os.path.dirname(__file__))
error_once("warning: Debugger speedups using cython not found. Run '\"%s\" \"%s\" build_ext --inplace' to build.",
sys.executable, os.path.join(dirname, 'setup_pydevd_cython.py'))

View File

@ -1,216 +0,0 @@
from __future__ import nested_scopes
from _pydev_bundle._pydev_saved_modules import threading
import os
from _pydev_bundle import pydev_log
def set_trace_in_qt():
from _pydevd_bundle.pydevd_comm import get_global_debugger
py_db = get_global_debugger()
if py_db is not None:
threading.current_thread() # Create the dummy thread for qt.
py_db.enable_tracing()
_patched_qt = False
def patch_qt(qt_support_mode):
'''
This method patches qt (PySide2, PySide, PyQt4, PyQt5) so that we have hooks to set the tracing for QThread.
'''
if not qt_support_mode:
return
if qt_support_mode is True or qt_support_mode == 'True':
# do not break backward compatibility
qt_support_mode = 'auto'
if qt_support_mode == 'auto':
qt_support_mode = os.getenv('PYDEVD_PYQT_MODE', 'auto')
# Avoid patching more than once
global _patched_qt
if _patched_qt:
return
pydev_log.debug('Qt support mode: %s', qt_support_mode)
_patched_qt = True
if qt_support_mode == 'auto':
patch_qt_on_import = None
try:
import PySide2 # @UnresolvedImport @UnusedImport
qt_support_mode = 'pyside2'
except:
try:
import Pyside # @UnresolvedImport @UnusedImport
qt_support_mode = 'pyside'
except:
try:
import PyQt5 # @UnresolvedImport @UnusedImport
qt_support_mode = 'pyqt5'
except:
try:
import PyQt4 # @UnresolvedImport @UnusedImport
qt_support_mode = 'pyqt4'
except:
return
if qt_support_mode == 'pyside2':
try:
import PySide2.QtCore # @UnresolvedImport
_internal_patch_qt(PySide2.QtCore, qt_support_mode)
except:
return
elif qt_support_mode == 'pyside':
try:
import PySide.QtCore # @UnresolvedImport
_internal_patch_qt(PySide.QtCore, qt_support_mode)
except:
return
elif qt_support_mode == 'pyqt5':
try:
import PyQt5.QtCore # @UnresolvedImport
_internal_patch_qt(PyQt5.QtCore)
except:
return
elif qt_support_mode == 'pyqt4':
# Ok, we have an issue here:
# PyDev-452: Selecting PyQT API version using sip.setapi fails in debug mode
# http://pyqt.sourceforge.net/Docs/PyQt4/incompatible_apis.html
# Mostly, if the user uses a different API version (i.e.: v2 instead of v1),
# that has to be done before importing PyQt4 modules (PySide/PyQt5 don't have this issue
# as they only implements v2).
patch_qt_on_import = 'PyQt4'
def get_qt_core_module():
import PyQt4.QtCore # @UnresolvedImport
return PyQt4.QtCore
_patch_import_to_patch_pyqt_on_import(patch_qt_on_import, get_qt_core_module)
else:
raise ValueError('Unexpected qt support mode: %s' % (qt_support_mode,))
def _patch_import_to_patch_pyqt_on_import(patch_qt_on_import, get_qt_core_module):
# I don't like this approach very much as we have to patch __import__, but I like even less
# asking the user to configure something in the client side...
# So, our approach is to patch PyQt4 right before the user tries to import it (at which
# point he should've set the sip api version properly already anyways).
pydev_log.debug('Setting up Qt post-import monkeypatch.')
dotted = patch_qt_on_import + '.'
original_import = __import__
from _pydev_bundle._pydev_sys_patch import patch_sys_module, patch_reload, cancel_patches_in_sys_module
patch_sys_module()
patch_reload()
def patched_import(name, *args, **kwargs):
if patch_qt_on_import == name or name.startswith(dotted):
builtins.__import__ = original_import
cancel_patches_in_sys_module()
_internal_patch_qt(get_qt_core_module()) # Patch it only when the user would import the qt module
return original_import(name, *args, **kwargs)
import builtins # Py3
builtins.__import__ = patched_import
def _internal_patch_qt(QtCore, qt_support_mode='auto'):
pydev_log.debug('Patching Qt: %s', QtCore)
_original_thread_init = QtCore.QThread.__init__
_original_runnable_init = QtCore.QRunnable.__init__
_original_QThread = QtCore.QThread
class FuncWrapper:
def __init__(self, original):
self._original = original
def __call__(self, *args, **kwargs):
set_trace_in_qt()
return self._original(*args, **kwargs)
class StartedSignalWrapper(QtCore.QObject): # Wrapper for the QThread.started signal
try:
_signal = QtCore.Signal() # @UndefinedVariable
except:
_signal = QtCore.pyqtSignal() # @UndefinedVariable
def __init__(self, thread, original_started):
QtCore.QObject.__init__(self)
self.thread = thread
self.original_started = original_started
if qt_support_mode in ('pyside', 'pyside2'):
self._signal = original_started
else:
self._signal.connect(self._on_call)
self.original_started.connect(self._signal)
def connect(self, func, *args, **kwargs):
if qt_support_mode in ('pyside', 'pyside2'):
return self._signal.connect(FuncWrapper(func), *args, **kwargs)
else:
return self._signal.connect(func, *args, **kwargs)
def disconnect(self, *args, **kwargs):
return self._signal.disconnect(*args, **kwargs)
def emit(self, *args, **kwargs):
return self._signal.emit(*args, **kwargs)
def _on_call(self, *args, **kwargs):
set_trace_in_qt()
class ThreadWrapper(QtCore.QThread): # Wrapper for QThread
def __init__(self, *args, **kwargs):
_original_thread_init(self, *args, **kwargs)
# In PyQt5 the program hangs when we try to call original run method of QThread class.
# So we need to distinguish instances of QThread class and instances of QThread inheritors.
if self.__class__.run == _original_QThread.run:
self.run = self._exec_run
else:
self._original_run = self.run
self.run = self._new_run
self._original_started = self.started
self.started = StartedSignalWrapper(self, self.started)
def _exec_run(self):
set_trace_in_qt()
self.exec_()
return None
def _new_run(self):
set_trace_in_qt()
return self._original_run()
class RunnableWrapper(QtCore.QRunnable): # Wrapper for QRunnable
def __init__(self, *args, **kwargs):
_original_runnable_init(self, *args, **kwargs)
self._original_run = self.run
self.run = self._new_run
def _new_run(self):
set_trace_in_qt()
return self._original_run()
QtCore.QThread = ThreadWrapper
QtCore.QRunnable = RunnableWrapper

View File

@ -1,35 +0,0 @@
def overrides(method):
'''
Meant to be used as
class B:
@overrides(A.m1)
def m1(self):
pass
'''
def wrapper(func):
if func.__name__ != method.__name__:
msg = "Wrong @override: %r expected, but overwriting %r."
msg = msg % (func.__name__, method.__name__)
raise AssertionError(msg)
if func.__doc__ is None:
func.__doc__ = method.__doc__
return func
return wrapper
def implements(method):
def wrapper(func):
if func.__name__ != method.__name__:
msg = "Wrong @implements: %r expected, but implementing %r."
msg = msg % (func.__name__, method.__name__)
raise AssertionError(msg)
if func.__doc__ is None:
func.__doc__ = method.__doc__
return func
return wrapper

View File

@ -1,180 +0,0 @@
"""
The UserModuleDeleter and runfile methods are copied from
Spyder and carry their own license agreement.
http://code.google.com/p/spyderlib/source/browse/spyderlib/widgets/externalshell/sitecustomize.py
Spyder License Agreement (MIT License)
--------------------------------------
Copyright (c) 2009-2012 Pierre Raybaut
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import sys
import os
from _pydev_bundle._pydev_execfile import execfile
# The following classes and functions are mainly intended to be used from
# an interactive Python session
class UserModuleDeleter:
"""
User Module Deleter (UMD) aims at deleting user modules
to force Python to deeply reload them during import
pathlist [list]: ignore list in terms of module path
namelist [list]: ignore list in terms of module name
"""
def __init__(self, namelist=None, pathlist=None):
if namelist is None:
namelist = []
self.namelist = namelist
if pathlist is None:
pathlist = []
self.pathlist = pathlist
try:
# ignore all files in org.python.pydev/pysrc
import pydev_pysrc, inspect
self.pathlist.append(os.path.dirname(pydev_pysrc.__file__))
except:
pass
self.previous_modules = list(sys.modules.keys())
def is_module_ignored(self, modname, modpath):
for path in [sys.prefix] + self.pathlist:
if modpath.startswith(path):
return True
else:
return set(modname.split('.')) & set(self.namelist)
def run(self, verbose=False):
"""
Del user modules to force Python to deeply reload them
Do not del modules which are considered as system modules, i.e.
modules installed in subdirectories of Python interpreter's binary
Do not del C modules
"""
log = []
modules_copy = dict(sys.modules)
for modname, module in modules_copy.items():
if modname == 'aaaaa':
print(modname, module)
print(self.previous_modules)
if modname not in self.previous_modules:
modpath = getattr(module, '__file__', None)
if modpath is None:
# *module* is a C module that is statically linked into the
# interpreter. There is no way to know its path, so we
# choose to ignore it.
continue
if not self.is_module_ignored(modname, modpath):
log.append(modname)
del sys.modules[modname]
if verbose and log:
print("\x1b[4;33m%s\x1b[24m%s\x1b[0m" % ("UMD has deleted",
": " + ", ".join(log)))
__umd__ = None
_get_globals_callback = None
def _set_globals_function(get_globals):
global _get_globals_callback
_get_globals_callback = get_globals
def _get_globals():
"""Return current Python interpreter globals namespace"""
if _get_globals_callback is not None:
return _get_globals_callback()
else:
try:
from __main__ import __dict__ as namespace
except ImportError:
try:
# The import fails on IronPython
import __main__
namespace = __main__.__dict__
except:
namespace
shell = namespace.get('__ipythonshell__')
if shell is not None and hasattr(shell, 'user_ns'):
# IPython 0.12+ kernel
return shell.user_ns
else:
# Python interpreter
return namespace
return namespace
def runfile(filename, args=None, wdir=None, namespace=None):
"""
Run filename
args: command line arguments (string)
wdir: working directory
"""
try:
if hasattr(filename, 'decode'):
filename = filename.decode('utf-8')
except (UnicodeError, TypeError):
pass
global __umd__
if os.environ.get("PYDEV_UMD_ENABLED", "").lower() == "true":
if __umd__ is None:
namelist = os.environ.get("PYDEV_UMD_NAMELIST", None)
if namelist is not None:
namelist = namelist.split(',')
__umd__ = UserModuleDeleter(namelist=namelist)
else:
verbose = os.environ.get("PYDEV_UMD_VERBOSE", "").lower() == "true"
__umd__.run(verbose=verbose)
if args is not None and not isinstance(args, (bytes, str)):
raise TypeError("expected a character buffer object")
if namespace is None:
namespace = _get_globals()
if '__file__' in namespace:
old_file = namespace['__file__']
else:
old_file = None
namespace['__file__'] = filename
sys.argv = [filename]
if args is not None:
for arg in args.split():
sys.argv.append(arg)
if wdir is not None:
try:
if hasattr(wdir, 'decode'):
wdir = wdir.decode('utf-8')
except (UnicodeError, TypeError):
pass
os.chdir(wdir)
execfile(filename, namespace)
sys.argv = ['']
if old_file is None:
del namespace['__file__']
else:
namespace['__file__'] = old_file

View File

@ -1,16 +0,0 @@
import sys
def versionok_for_gui():
''' Return True if running Python is suitable for GUI Event Integration and deeper IPython integration '''
# We require Python 2.6+ ...
if sys.hexversion < 0x02060000:
return False
# Or Python 3.2+
if sys.hexversion >= 0x03000000 and sys.hexversion < 0x03020000:
return False
# Not supported under Jython nor IronPython
if sys.platform.startswith("java") or sys.platform.startswith('cli'):
return False
return True

View File

@ -1,857 +0,0 @@
from __future__ import nested_scopes
import fnmatch
import os.path
from _pydev_runfiles.pydev_runfiles_coverage import start_coverage_support
from _pydevd_bundle.pydevd_constants import * # @UnusedWildImport
import re
import time
#=======================================================================================================================
# Configuration
#=======================================================================================================================
class Configuration:
def __init__(
self,
files_or_dirs='',
verbosity=2,
include_tests=None,
tests=None,
port=None,
files_to_tests=None,
jobs=1,
split_jobs='tests',
coverage_output_dir=None,
coverage_include=None,
coverage_output_file=None,
exclude_files=None,
exclude_tests=None,
include_files=None,
django=False,
):
self.files_or_dirs = files_or_dirs
self.verbosity = verbosity
self.include_tests = include_tests
self.tests = tests
self.port = port
self.files_to_tests = files_to_tests
self.jobs = jobs
self.split_jobs = split_jobs
self.django = django
if include_tests:
assert isinstance(include_tests, (list, tuple))
if exclude_files:
assert isinstance(exclude_files, (list, tuple))
if exclude_tests:
assert isinstance(exclude_tests, (list, tuple))
self.exclude_files = exclude_files
self.include_files = include_files
self.exclude_tests = exclude_tests
self.coverage_output_dir = coverage_output_dir
self.coverage_include = coverage_include
self.coverage_output_file = coverage_output_file
def __str__(self):
return '''Configuration
- files_or_dirs: %s
- verbosity: %s
- tests: %s
- port: %s
- files_to_tests: %s
- jobs: %s
- split_jobs: %s
- include_files: %s
- include_tests: %s
- exclude_files: %s
- exclude_tests: %s
- coverage_output_dir: %s
- coverage_include_dir: %s
- coverage_output_file: %s
- django: %s
''' % (
self.files_or_dirs,
self.verbosity,
self.tests,
self.port,
self.files_to_tests,
self.jobs,
self.split_jobs,
self.include_files,
self.include_tests,
self.exclude_files,
self.exclude_tests,
self.coverage_output_dir,
self.coverage_include,
self.coverage_output_file,
self.django,
)
#=======================================================================================================================
# parse_cmdline
#=======================================================================================================================
def parse_cmdline(argv=None):
"""
Parses command line and returns test directories, verbosity, test filter and test suites
usage:
runfiles.py -v|--verbosity <level> -t|--tests <Test.test1,Test2> dirs|files
Multiprocessing options:
jobs=number (with the number of jobs to be used to run the tests)
split_jobs='module'|'tests'
if == module, a given job will always receive all the tests from a module
if == tests, the tests will be split independently of their originating module (default)
--exclude_files = comma-separated list of patterns with files to exclude (fnmatch style)
--include_files = comma-separated list of patterns with files to include (fnmatch style)
--exclude_tests = comma-separated list of patterns with test names to exclude (fnmatch style)
Note: if --tests is given, --exclude_files, --include_files and --exclude_tests are ignored!
"""
if argv is None:
argv = sys.argv
verbosity = 2
include_tests = None
tests = None
port = None
jobs = 1
split_jobs = 'tests'
files_to_tests = {}
coverage_output_dir = None
coverage_include = None
exclude_files = None
exclude_tests = None
include_files = None
django = False
from _pydev_bundle._pydev_getopt import gnu_getopt
optlist, dirs = gnu_getopt(
argv[1:], "",
[
"verbosity=",
"tests=",
"port=",
"config_file=",
"jobs=",
"split_jobs=",
"include_tests=",
"include_files=",
"exclude_files=",
"exclude_tests=",
"coverage_output_dir=",
"coverage_include=",
"django="
]
)
for opt, value in optlist:
if opt in ("-v", "--verbosity"):
verbosity = value
elif opt in ("-p", "--port"):
port = int(value)
elif opt in ("-j", "--jobs"):
jobs = int(value)
elif opt in ("-s", "--split_jobs"):
split_jobs = value
if split_jobs not in ('module', 'tests'):
raise AssertionError('Expected split to be either "module" or "tests". Was :%s' % (split_jobs,))
elif opt in ("-d", "--coverage_output_dir",):
coverage_output_dir = value.strip()
elif opt in ("-i", "--coverage_include",):
coverage_include = value.strip()
elif opt in ("-I", "--include_tests"):
include_tests = value.split(',')
elif opt in ("-E", "--exclude_files"):
exclude_files = value.split(',')
elif opt in ("-F", "--include_files"):
include_files = value.split(',')
elif opt in ("-e", "--exclude_tests"):
exclude_tests = value.split(',')
elif opt in ("-t", "--tests"):
tests = value.split(',')
elif opt in ("--django",):
django = value.strip() in ['true', 'True', '1']
elif opt in ("-c", "--config_file"):
config_file = value.strip()
if os.path.exists(config_file):
f = open(config_file, 'r')
try:
config_file_contents = f.read()
finally:
f.close()
if config_file_contents:
config_file_contents = config_file_contents.strip()
if config_file_contents:
for line in config_file_contents.splitlines():
file_and_test = line.split('|')
if len(file_and_test) == 2:
file, test = file_and_test
if file in files_to_tests:
files_to_tests[file].append(test)
else:
files_to_tests[file] = [test]
else:
sys.stderr.write('Could not find config file: %s\n' % (config_file,))
if type([]) != type(dirs):
dirs = [dirs]
ret_dirs = []
for d in dirs:
if '|' in d:
# paths may come from the ide separated by |
ret_dirs.extend(d.split('|'))
else:
ret_dirs.append(d)
verbosity = int(verbosity)
if tests:
if verbosity > 4:
sys.stdout.write('--tests provided. Ignoring --exclude_files, --exclude_tests and --include_files\n')
exclude_files = exclude_tests = include_files = None
config = Configuration(
ret_dirs,
verbosity,
include_tests,
tests,
port,
files_to_tests,
jobs,
split_jobs,
coverage_output_dir,
coverage_include,
exclude_files=exclude_files,
exclude_tests=exclude_tests,
include_files=include_files,
django=django,
)
if verbosity > 5:
sys.stdout.write(str(config) + '\n')
return config
#=======================================================================================================================
# PydevTestRunner
#=======================================================================================================================
class PydevTestRunner(object):
""" finds and runs a file or directory of files as a unit test """
__py_extensions = ["*.py", "*.pyw"]
__exclude_files = ["__init__.*"]
# Just to check that only this attributes will be written to this file
__slots__ = [
'verbosity', # Always used
'files_to_tests', # If this one is given, the ones below are not used
'files_or_dirs', # Files or directories received in the command line
'include_tests', # The filter used to collect the tests
'tests', # Strings with the tests to be run
'jobs', # Integer with the number of jobs that should be used to run the test cases
'split_jobs', # String with 'tests' or 'module' (how should the jobs be split)
'configuration',
'coverage',
]
def __init__(self, configuration):
self.verbosity = configuration.verbosity
self.jobs = configuration.jobs
self.split_jobs = configuration.split_jobs
files_to_tests = configuration.files_to_tests
if files_to_tests:
self.files_to_tests = files_to_tests
self.files_or_dirs = list(files_to_tests.keys())
self.tests = None
else:
self.files_to_tests = {}
self.files_or_dirs = configuration.files_or_dirs
self.tests = configuration.tests
self.configuration = configuration
self.__adjust_path()
def __adjust_path(self):
""" add the current file or directory to the python path """
path_to_append = None
for n in range(len(self.files_or_dirs)):
dir_name = self.__unixify(self.files_or_dirs[n])
if os.path.isdir(dir_name):
if not dir_name.endswith("/"):
self.files_or_dirs[n] = dir_name + "/"
path_to_append = os.path.normpath(dir_name)
elif os.path.isfile(dir_name):
path_to_append = os.path.dirname(dir_name)
else:
if not os.path.exists(dir_name):
block_line = '*' * 120
sys.stderr.write('\n%s\n* PyDev test runner error: %s does not exist.\n%s\n' % (block_line, dir_name, block_line))
return
msg = ("unknown type. \n%s\nshould be file or a directory.\n" % (dir_name))
raise RuntimeError(msg)
if path_to_append is not None:
# Add it as the last one (so, first things are resolved against the default dirs and
# if none resolves, then we try a relative import).
sys.path.append(path_to_append)
def __is_valid_py_file(self, fname):
""" tests that a particular file contains the proper file extension
and is not in the list of files to exclude """
is_valid_fname = 0
for invalid_fname in self.__class__.__exclude_files:
is_valid_fname += int(not fnmatch.fnmatch(fname, invalid_fname))
if_valid_ext = 0
for ext in self.__class__.__py_extensions:
if_valid_ext += int(fnmatch.fnmatch(fname, ext))
return is_valid_fname > 0 and if_valid_ext > 0
def __unixify(self, s):
""" stupid windows. converts the backslash to forwardslash for consistency """
return os.path.normpath(s).replace(os.sep, "/")
def __importify(self, s, dir=False):
""" turns directory separators into dots and removes the ".py*" extension
so the string can be used as import statement """
if not dir:
dirname, fname = os.path.split(s)
if fname.count('.') > 1:
# if there's a file named xxx.xx.py, it is not a valid module, so, let's not load it...
return
imp_stmt_pieces = [dirname.replace("\\", "/").replace("/", "."), os.path.splitext(fname)[0]]
if len(imp_stmt_pieces[0]) == 0:
imp_stmt_pieces = imp_stmt_pieces[1:]
return ".".join(imp_stmt_pieces)
else: # handle dir
return s.replace("\\", "/").replace("/", ".")
def __add_files(self, pyfiles, root, files):
""" if files match, appends them to pyfiles. used by os.path.walk fcn """
for fname in files:
if self.__is_valid_py_file(fname):
name_without_base_dir = self.__unixify(os.path.join(root, fname))
pyfiles.append(name_without_base_dir)
def find_import_files(self):
""" return a list of files to import """
if self.files_to_tests:
pyfiles = self.files_to_tests.keys()
else:
pyfiles = []
for base_dir in self.files_or_dirs:
if os.path.isdir(base_dir):
for root, dirs, files in os.walk(base_dir):
# Note: handling directories that should be excluded from the search because
# they don't have __init__.py
exclude = {}
for d in dirs:
for init in ['__init__.py', '__init__.pyo', '__init__.pyc', '__init__.pyw', '__init__$py.class']:
if os.path.exists(os.path.join(root, d, init).replace('\\', '/')):
break
else:
exclude[d] = 1
if exclude:
new = []
for d in dirs:
if d not in exclude:
new.append(d)
dirs[:] = new
self.__add_files(pyfiles, root, files)
elif os.path.isfile(base_dir):
pyfiles.append(base_dir)
if self.configuration.exclude_files or self.configuration.include_files:
ret = []
for f in pyfiles:
add = True
basename = os.path.basename(f)
if self.configuration.include_files:
add = False
for pat in self.configuration.include_files:
if fnmatch.fnmatchcase(basename, pat):
add = True
break
if not add:
if self.verbosity > 3:
sys.stdout.write('Skipped file: %s (did not match any include_files pattern: %s)\n' % (f, self.configuration.include_files))
elif self.configuration.exclude_files:
for pat in self.configuration.exclude_files:
if fnmatch.fnmatchcase(basename, pat):
if self.verbosity > 3:
sys.stdout.write('Skipped file: %s (matched exclude_files pattern: %s)\n' % (f, pat))
elif self.verbosity > 2:
sys.stdout.write('Skipped file: %s\n' % (f,))
add = False
break
if add:
if self.verbosity > 3:
sys.stdout.write('Adding file: %s for test discovery.\n' % (f,))
ret.append(f)
pyfiles = ret
return pyfiles
def __get_module_from_str(self, modname, print_exception, pyfile):
""" Import the module in the given import path.
* Returns the "final" module, so importing "coilib40.subject.visu"
returns the "visu" module, not the "coilib40" as returned by __import__ """
try:
mod = __import__(modname)
for part in modname.split('.')[1:]:
mod = getattr(mod, part)
return mod
except:
if print_exception:
from _pydev_runfiles import pydev_runfiles_xml_rpc
from _pydevd_bundle import pydevd_io
buf_err = pydevd_io.start_redirect(keep_original_redirection=True, std='stderr')
buf_out = pydevd_io.start_redirect(keep_original_redirection=True, std='stdout')
try:
import traceback;traceback.print_exc()
sys.stderr.write('ERROR: Module: %s could not be imported (file: %s).\n' % (modname, pyfile))
finally:
pydevd_io.end_redirect('stderr')
pydevd_io.end_redirect('stdout')
pydev_runfiles_xml_rpc.notifyTest(
'error', buf_out.getvalue(), buf_err.getvalue(), pyfile, modname, 0)
return None
def remove_duplicates_keeping_order(self, seq):
seen = set()
seen_add = seen.add
return [x for x in seq if not (x in seen or seen_add(x))]
def find_modules_from_files(self, pyfiles):
""" returns a list of modules given a list of files """
# let's make sure that the paths we want are in the pythonpath...
imports = [(s, self.__importify(s)) for s in pyfiles]
sys_path = [os.path.normpath(path) for path in sys.path]
sys_path = self.remove_duplicates_keeping_order(sys_path)
system_paths = []
for s in sys_path:
system_paths.append(self.__importify(s, True))
ret = []
for pyfile, imp in imports:
if imp is None:
continue # can happen if a file is not a valid module
choices = []
for s in system_paths:
if imp.startswith(s):
add = imp[len(s) + 1:]
if add:
choices.append(add)
# sys.stdout.write(' ' + add + ' ')
if not choices:
sys.stdout.write('PYTHONPATH not found for file: %s\n' % imp)
else:
for i, import_str in enumerate(choices):
print_exception = i == len(choices) - 1
mod = self.__get_module_from_str(import_str, print_exception, pyfile)
if mod is not None:
ret.append((pyfile, mod, import_str))
break
return ret
#===================================================================================================================
# GetTestCaseNames
#===================================================================================================================
class GetTestCaseNames:
"""Yes, we need a class for that (cannot use outer context on jython 2.1)"""
def __init__(self, accepted_classes, accepted_methods):
self.accepted_classes = accepted_classes
self.accepted_methods = accepted_methods
def __call__(self, testCaseClass):
"""Return a sorted sequence of method names found within testCaseClass"""
testFnNames = []
className = testCaseClass.__name__
if className in self.accepted_classes:
for attrname in dir(testCaseClass):
# If a class is chosen, we select all the 'test' methods'
if attrname.startswith('test') and hasattr(getattr(testCaseClass, attrname), '__call__'):
testFnNames.append(attrname)
else:
for attrname in dir(testCaseClass):
# If we have the class+method name, we must do a full check and have an exact match.
if className + '.' + attrname in self.accepted_methods:
if hasattr(getattr(testCaseClass, attrname), '__call__'):
testFnNames.append(attrname)
# sorted() is not available in jython 2.1
testFnNames.sort()
return testFnNames
def _decorate_test_suite(self, suite, pyfile, module_name):
import unittest
if isinstance(suite, unittest.TestSuite):
add = False
suite.__pydev_pyfile__ = pyfile
suite.__pydev_module_name__ = module_name
for t in suite._tests:
t.__pydev_pyfile__ = pyfile
t.__pydev_module_name__ = module_name
if self._decorate_test_suite(t, pyfile, module_name):
add = True
return add
elif isinstance(suite, unittest.TestCase):
return True
else:
return False
def find_tests_from_modules(self, file_and_modules_and_module_name):
""" returns the unittests given a list of modules """
# Use our own suite!
from _pydev_runfiles import pydev_runfiles_unittest
import unittest
unittest.TestLoader.suiteClass = pydev_runfiles_unittest.PydevTestSuite
loader = unittest.TestLoader()
ret = []
if self.files_to_tests:
for pyfile, m, module_name in file_and_modules_and_module_name:
accepted_classes = {}
accepted_methods = {}
tests = self.files_to_tests[pyfile]
for t in tests:
accepted_methods[t] = t
loader.getTestCaseNames = self.GetTestCaseNames(accepted_classes, accepted_methods)
suite = loader.loadTestsFromModule(m)
if self._decorate_test_suite(suite, pyfile, module_name):
ret.append(suite)
return ret
if self.tests:
accepted_classes = {}
accepted_methods = {}
for t in self.tests:
splitted = t.split('.')
if len(splitted) == 1:
accepted_classes[t] = t
elif len(splitted) == 2:
accepted_methods[t] = t
loader.getTestCaseNames = self.GetTestCaseNames(accepted_classes, accepted_methods)
for pyfile, m, module_name in file_and_modules_and_module_name:
suite = loader.loadTestsFromModule(m)
if self._decorate_test_suite(suite, pyfile, module_name):
ret.append(suite)
return ret
def filter_tests(self, test_objs, internal_call=False):
""" based on a filter name, only return those tests that have
the test case names that match """
import unittest
if not internal_call:
if not self.configuration.include_tests and not self.tests and not self.configuration.exclude_tests:
# No need to filter if we have nothing to filter!
return test_objs
if self.verbosity > 1:
if self.configuration.include_tests:
sys.stdout.write('Tests to include: %s\n' % (self.configuration.include_tests,))
if self.tests:
sys.stdout.write('Tests to run: %s\n' % (self.tests,))
if self.configuration.exclude_tests:
sys.stdout.write('Tests to exclude: %s\n' % (self.configuration.exclude_tests,))
test_suite = []
for test_obj in test_objs:
if isinstance(test_obj, unittest.TestSuite):
# Note: keep the suites as they are and just 'fix' the tests (so, don't use the iter_tests).
if test_obj._tests:
test_obj._tests = self.filter_tests(test_obj._tests, True)
if test_obj._tests: # Only add the suite if we still have tests there.
test_suite.append(test_obj)
elif isinstance(test_obj, unittest.TestCase):
try:
testMethodName = test_obj._TestCase__testMethodName
except AttributeError:
# changed in python 2.5
testMethodName = test_obj._testMethodName
add = True
if self.configuration.exclude_tests:
for pat in self.configuration.exclude_tests:
if fnmatch.fnmatchcase(testMethodName, pat):
if self.verbosity > 3:
sys.stdout.write('Skipped test: %s (matched exclude_tests pattern: %s)\n' % (testMethodName, pat))
elif self.verbosity > 2:
sys.stdout.write('Skipped test: %s\n' % (testMethodName,))
add = False
break
if add:
if self.__match_tests(self.tests, test_obj, testMethodName):
include = True
if self.configuration.include_tests:
include = False
for pat in self.configuration.include_tests:
if fnmatch.fnmatchcase(testMethodName, pat):
include = True
break
if include:
test_suite.append(test_obj)
else:
if self.verbosity > 3:
sys.stdout.write('Skipped test: %s (did not match any include_tests pattern %s)\n' % (
testMethodName, self.configuration.include_tests,))
return test_suite
def iter_tests(self, test_objs):
# Note: not using yield because of Jython 2.1.
import unittest
tests = []
for test_obj in test_objs:
if isinstance(test_obj, unittest.TestSuite):
tests.extend(self.iter_tests(test_obj._tests))
elif isinstance(test_obj, unittest.TestCase):
tests.append(test_obj)
return tests
def list_test_names(self, test_objs):
names = []
for tc in self.iter_tests(test_objs):
try:
testMethodName = tc._TestCase__testMethodName
except AttributeError:
# changed in python 2.5
testMethodName = tc._testMethodName
names.append(testMethodName)
return names
def __match_tests(self, tests, test_case, test_method_name):
if not tests:
return 1
for t in tests:
class_and_method = t.split('.')
if len(class_and_method) == 1:
# only class name
if class_and_method[0] == test_case.__class__.__name__:
return 1
elif len(class_and_method) == 2:
if class_and_method[0] == test_case.__class__.__name__ and class_and_method[1] == test_method_name:
return 1
return 0
def __match(self, filter_list, name):
""" returns whether a test name matches the test filter """
if filter_list is None:
return 1
for f in filter_list:
if re.match(f, name):
return 1
return 0
def run_tests(self, handle_coverage=True):
""" runs all tests """
sys.stdout.write("Finding files... ")
files = self.find_import_files()
if self.verbosity > 3:
sys.stdout.write('%s ... done.\n' % (self.files_or_dirs))
else:
sys.stdout.write('done.\n')
sys.stdout.write("Importing test modules ... ")
if handle_coverage:
coverage_files, coverage = start_coverage_support(self.configuration)
file_and_modules_and_module_name = self.find_modules_from_files(files)
sys.stdout.write("done.\n")
all_tests = self.find_tests_from_modules(file_and_modules_and_module_name)
all_tests = self.filter_tests(all_tests)
from _pydev_runfiles import pydev_runfiles_unittest
test_suite = pydev_runfiles_unittest.PydevTestSuite(all_tests)
from _pydev_runfiles import pydev_runfiles_xml_rpc
pydev_runfiles_xml_rpc.notifyTestsCollected(test_suite.countTestCases())
start_time = time.time()
def run_tests():
executed_in_parallel = False
if self.jobs > 1:
from _pydev_runfiles import pydev_runfiles_parallel
# What may happen is that the number of jobs needed is lower than the number of jobs requested
# (e.g.: 2 jobs were requested for running 1 test) -- in which case execute_tests_in_parallel will
# return False and won't run any tests.
executed_in_parallel = pydev_runfiles_parallel.execute_tests_in_parallel(
all_tests, self.jobs, self.split_jobs, self.verbosity, coverage_files, self.configuration.coverage_include)
if not executed_in_parallel:
# If in coverage, we don't need to pass anything here (coverage is already enabled for this execution).
runner = pydev_runfiles_unittest.PydevTextTestRunner(stream=sys.stdout, descriptions=1, verbosity=self.verbosity)
sys.stdout.write('\n')
runner.run(test_suite)
if self.configuration.django:
get_django_test_suite_runner()(run_tests).run_tests([])
else:
run_tests()
if handle_coverage:
coverage.stop()
coverage.save()
total_time = 'Finished in: %.2f secs.' % (time.time() - start_time,)
pydev_runfiles_xml_rpc.notifyTestRunFinished(total_time)
DJANGO_TEST_SUITE_RUNNER = None
def get_django_test_suite_runner():
global DJANGO_TEST_SUITE_RUNNER
if DJANGO_TEST_SUITE_RUNNER:
return DJANGO_TEST_SUITE_RUNNER
try:
# django >= 1.8
import django
from django.test.runner import DiscoverRunner
class MyDjangoTestSuiteRunner(DiscoverRunner):
def __init__(self, on_run_suite):
django.setup()
DiscoverRunner.__init__(self)
self.on_run_suite = on_run_suite
def build_suite(self, *args, **kwargs):
pass
def suite_result(self, *args, **kwargs):
pass
def run_suite(self, *args, **kwargs):
self.on_run_suite()
except:
# django < 1.8
try:
from django.test.simple import DjangoTestSuiteRunner
except:
class DjangoTestSuiteRunner:
def __init__(self):
pass
def run_tests(self, *args, **kwargs):
raise AssertionError("Unable to run suite with django.test.runner.DiscoverRunner nor django.test.simple.DjangoTestSuiteRunner because it couldn't be imported.")
class MyDjangoTestSuiteRunner(DjangoTestSuiteRunner):
def __init__(self, on_run_suite):
DjangoTestSuiteRunner.__init__(self)
self.on_run_suite = on_run_suite
def build_suite(self, *args, **kwargs):
pass
def suite_result(self, *args, **kwargs):
pass
def run_suite(self, *args, **kwargs):
self.on_run_suite()
DJANGO_TEST_SUITE_RUNNER = MyDjangoTestSuiteRunner
return DJANGO_TEST_SUITE_RUNNER
#=======================================================================================================================
# main
#=======================================================================================================================
def main(configuration):
PydevTestRunner(configuration).run_tests()

View File

@ -1,76 +0,0 @@
import os.path
import sys
from _pydevd_bundle.pydevd_constants import Null
#=======================================================================================================================
# get_coverage_files
#=======================================================================================================================
def get_coverage_files(coverage_output_dir, number_of_files):
base_dir = coverage_output_dir
ret = []
i = 0
while len(ret) < number_of_files:
while True:
f = os.path.join(base_dir, '.coverage.%s' % i)
i += 1
if not os.path.exists(f):
ret.append(f)
break #Break only inner for.
return ret
#=======================================================================================================================
# start_coverage_support
#=======================================================================================================================
def start_coverage_support(configuration):
return start_coverage_support_from_params(
configuration.coverage_output_dir,
configuration.coverage_output_file,
configuration.jobs,
configuration.coverage_include,
)
#=======================================================================================================================
# start_coverage_support_from_params
#=======================================================================================================================
def start_coverage_support_from_params(coverage_output_dir, coverage_output_file, jobs, coverage_include):
coverage_files = []
coverage_instance = Null()
if coverage_output_dir or coverage_output_file:
try:
import coverage #@UnresolvedImport
except:
sys.stderr.write('Error: coverage module could not be imported\n')
sys.stderr.write('Please make sure that the coverage module (http://nedbatchelder.com/code/coverage/)\n')
sys.stderr.write('is properly installed in your interpreter: %s\n' % (sys.executable,))
import traceback;traceback.print_exc()
else:
if coverage_output_dir:
if not os.path.exists(coverage_output_dir):
sys.stderr.write('Error: directory for coverage output (%s) does not exist.\n' % (coverage_output_dir,))
elif not os.path.isdir(coverage_output_dir):
sys.stderr.write('Error: expected (%s) to be a directory.\n' % (coverage_output_dir,))
else:
n = jobs
if n <= 0:
n += 1
n += 1 #Add 1 more for the current process (which will do the initial import).
coverage_files = get_coverage_files(coverage_output_dir, n)
os.environ['COVERAGE_FILE'] = coverage_files.pop(0)
coverage_instance = coverage.coverage(source=[coverage_include])
coverage_instance.start()
elif coverage_output_file:
#Client of parallel run.
os.environ['COVERAGE_FILE'] = coverage_output_file
coverage_instance = coverage.coverage(source=[coverage_include])
coverage_instance.start()
return coverage_files, coverage_instance

View File

@ -1,207 +0,0 @@
from nose.plugins.multiprocess import MultiProcessTestRunner # @UnresolvedImport
from nose.plugins.base import Plugin # @UnresolvedImport
import sys
from _pydev_runfiles import pydev_runfiles_xml_rpc
import time
from _pydev_runfiles.pydev_runfiles_coverage import start_coverage_support
from contextlib import contextmanager
from io import StringIO
import traceback
#=======================================================================================================================
# PydevPlugin
#=======================================================================================================================
class PydevPlugin(Plugin):
def __init__(self, configuration):
self.configuration = configuration
Plugin.__init__(self)
def begin(self):
# Called before any test is run (it's always called, with multiprocess or not)
self.start_time = time.time()
self.coverage_files, self.coverage = start_coverage_support(self.configuration)
def finalize(self, result):
# Called after all tests are run (it's always called, with multiprocess or not)
self.coverage.stop()
self.coverage.save()
pydev_runfiles_xml_rpc.notifyTestRunFinished('Finished in: %.2f secs.' % (time.time() - self.start_time,))
#===================================================================================================================
# Methods below are not called with multiprocess (so, we monkey-patch MultiProcessTestRunner.consolidate
# so that they're called, but unfortunately we loose some info -- i.e.: the time for each test in this
# process).
#===================================================================================================================
class Sentinel(object):
pass
@contextmanager
def _without_user_address(self, test):
# #PyDev-1095: Conflict between address in test and test.address() in PydevPlugin().report_cond()
user_test_instance = test.test
user_address = self.Sentinel
user_class_address = self.Sentinel
try:
if 'address' in user_test_instance.__dict__:
user_address = user_test_instance.__dict__.pop('address')
except:
# Just ignore anything here.
pass
try:
user_class_address = user_test_instance.__class__.address
del user_test_instance.__class__.address
except:
# Just ignore anything here.
pass
try:
yield
finally:
if user_address is not self.Sentinel:
user_test_instance.__dict__['address'] = user_address
if user_class_address is not self.Sentinel:
user_test_instance.__class__.address = user_class_address
def _get_test_address(self, test):
try:
if hasattr(test, 'address'):
with self._without_user_address(test):
address = test.address()
# test.address() is something as:
# ('D:\\workspaces\\temp\\test_workspace\\pytesting1\\src\\mod1\\hello.py', 'mod1.hello', 'TestCase.testMet1')
#
# and we must pass: location, test
# E.g.: ['D:\\src\\mod1\\hello.py', 'TestCase.testMet1']
address = address[0], address[2]
else:
# multiprocess
try:
address = test[0], test[1]
except TypeError:
# It may be an error at setup, in which case it's not really a test, but a Context object.
f = test.context.__file__
if f.endswith('.pyc'):
f = f[:-1]
elif f.endswith('$py.class'):
f = f[:-len('$py.class')] + '.py'
address = f, '?'
except:
sys.stderr.write("PyDev: Internal pydev error getting test address. Please report at the pydev bug tracker\n")
traceback.print_exc()
sys.stderr.write("\n\n\n")
address = '?', '?'
return address
def report_cond(self, cond, test, captured_output, error=''):
'''
@param cond: fail, error, ok
'''
address = self._get_test_address(test)
error_contents = self.get_io_from_error(error)
try:
time_str = '%.2f' % (time.time() - test._pydev_start_time)
except:
time_str = '?'
pydev_runfiles_xml_rpc.notifyTest(cond, captured_output, error_contents, address[0], address[1], time_str)
def startTest(self, test):
test._pydev_start_time = time.time()
file, test = self._get_test_address(test)
pydev_runfiles_xml_rpc.notifyStartTest(file, test)
def get_io_from_error(self, err):
if type(err) == type(()):
if len(err) != 3:
if len(err) == 2:
return err[1] # multiprocess
s = StringIO()
etype, value, tb = err
if isinstance(value, str):
return value
traceback.print_exception(etype, value, tb, file=s)
return s.getvalue()
return err
def get_captured_output(self, test):
if hasattr(test, 'capturedOutput') and test.capturedOutput:
return test.capturedOutput
return ''
def addError(self, test, err):
self.report_cond(
'error',
test,
self.get_captured_output(test),
err,
)
def addFailure(self, test, err):
self.report_cond(
'fail',
test,
self.get_captured_output(test),
err,
)
def addSuccess(self, test):
self.report_cond(
'ok',
test,
self.get_captured_output(test),
'',
)
PYDEV_NOSE_PLUGIN_SINGLETON = None
def start_pydev_nose_plugin_singleton(configuration):
global PYDEV_NOSE_PLUGIN_SINGLETON
PYDEV_NOSE_PLUGIN_SINGLETON = PydevPlugin(configuration)
return PYDEV_NOSE_PLUGIN_SINGLETON
original = MultiProcessTestRunner.consolidate
#=======================================================================================================================
# new_consolidate
#=======================================================================================================================
def new_consolidate(self, result, batch_result):
'''
Used so that it can work with the multiprocess plugin.
Monkeypatched because nose seems a bit unsupported at this time (ideally
the plugin would have this support by default).
'''
ret = original(self, result, batch_result)
parent_frame = sys._getframe().f_back
# addr is something as D:\pytesting1\src\mod1\hello.py:TestCase.testMet4
# so, convert it to what report_cond expects
addr = parent_frame.f_locals['addr']
i = addr.rindex(':')
addr = [addr[:i], addr[i + 1:]]
output, testsRun, failures, errors, errorClasses = batch_result
if failures or errors:
for failure in failures:
PYDEV_NOSE_PLUGIN_SINGLETON.report_cond('fail', addr, output, failure)
for error in errors:
PYDEV_NOSE_PLUGIN_SINGLETON.report_cond('error', addr, output, error)
else:
PYDEV_NOSE_PLUGIN_SINGLETON.report_cond('ok', addr, output)
return ret
MultiProcessTestRunner.consolidate = new_consolidate

View File

@ -1,267 +0,0 @@
import unittest
from _pydev_bundle._pydev_saved_modules import thread
import queue as Queue
from _pydev_runfiles import pydev_runfiles_xml_rpc
import time
import os
import threading
import sys
#=======================================================================================================================
# flatten_test_suite
#=======================================================================================================================
def flatten_test_suite(test_suite, ret):
if isinstance(test_suite, unittest.TestSuite):
for t in test_suite._tests:
flatten_test_suite(t, ret)
elif isinstance(test_suite, unittest.TestCase):
ret.append(test_suite)
#=======================================================================================================================
# execute_tests_in_parallel
#=======================================================================================================================
def execute_tests_in_parallel(tests, jobs, split, verbosity, coverage_files, coverage_include):
'''
@param tests: list(PydevTestSuite)
A list with the suites to be run
@param split: str
Either 'module' or the number of tests that should be run in each batch
@param coverage_files: list(file)
A list with the files that should be used for giving coverage information (if empty, coverage information
should not be gathered).
@param coverage_include: str
The pattern that should be included in the coverage.
@return: bool
Returns True if the tests were actually executed in parallel. If the tests were not executed because only 1
should be used (e.g.: 2 jobs were requested for running 1 test), False will be returned and no tests will be
run.
It may also return False if in debug mode (in which case, multi-processes are not accepted)
'''
try:
from _pydevd_bundle.pydevd_comm import get_global_debugger
if get_global_debugger() is not None:
return False
except:
pass # Ignore any error here.
# This queue will receive the tests to be run. Each entry in a queue is a list with the tests to be run together When
# split == 'tests', each list will have a single element, when split == 'module', each list will have all the tests
# from a given module.
tests_queue = []
queue_elements = []
if split == 'module':
module_to_tests = {}
for test in tests:
lst = []
flatten_test_suite(test, lst)
for test in lst:
key = (test.__pydev_pyfile__, test.__pydev_module_name__)
module_to_tests.setdefault(key, []).append(test)
for key, tests in module_to_tests.items():
queue_elements.append(tests)
if len(queue_elements) < jobs:
# Don't create jobs we will never use.
jobs = len(queue_elements)
elif split == 'tests':
for test in tests:
lst = []
flatten_test_suite(test, lst)
for test in lst:
queue_elements.append([test])
if len(queue_elements) < jobs:
# Don't create jobs we will never use.
jobs = len(queue_elements)
else:
raise AssertionError('Do not know how to handle: %s' % (split,))
for test_cases in queue_elements:
test_queue_elements = []
for test_case in test_cases:
try:
test_name = test_case.__class__.__name__ + "." + test_case._testMethodName
except AttributeError:
# Support for jython 2.1 (__testMethodName is pseudo-private in the test case)
test_name = test_case.__class__.__name__ + "." + test_case._TestCase__testMethodName
test_queue_elements.append(test_case.__pydev_pyfile__ + '|' + test_name)
tests_queue.append(test_queue_elements)
if jobs < 2:
return False
sys.stdout.write('Running tests in parallel with: %s jobs.\n' % (jobs,))
queue = Queue.Queue()
for item in tests_queue:
queue.put(item, block=False)
providers = []
clients = []
for i in range(jobs):
test_cases_provider = CommunicationThread(queue)
providers.append(test_cases_provider)
test_cases_provider.start()
port = test_cases_provider.port
if coverage_files:
clients.append(ClientThread(i, port, verbosity, coverage_files.pop(0), coverage_include))
else:
clients.append(ClientThread(i, port, verbosity))
for client in clients:
client.start()
client_alive = True
while client_alive:
client_alive = False
for client in clients:
# Wait for all the clients to exit.
if not client.finished:
client_alive = True
time.sleep(.2)
break
for provider in providers:
provider.shutdown()
return True
#=======================================================================================================================
# CommunicationThread
#=======================================================================================================================
class CommunicationThread(threading.Thread):
def __init__(self, tests_queue):
threading.Thread.__init__(self)
self.daemon = True
self.queue = tests_queue
self.finished = False
from _pydev_bundle.pydev_imports import SimpleXMLRPCServer
from _pydev_bundle import pydev_localhost
# Create server
server = SimpleXMLRPCServer((pydev_localhost.get_localhost(), 0), logRequests=False)
server.register_function(self.GetTestsToRun)
server.register_function(self.notifyStartTest)
server.register_function(self.notifyTest)
server.register_function(self.notifyCommands)
self.port = server.socket.getsockname()[1]
self.server = server
def GetTestsToRun(self, job_id):
'''
@param job_id:
@return: list(str)
Each entry is a string in the format: filename|Test.testName
'''
try:
ret = self.queue.get(block=False)
return ret
except: # Any exception getting from the queue (empty or not) means we finished our work on providing the tests.
self.finished = True
return []
def notifyCommands(self, job_id, commands):
# Batch notification.
for command in commands:
getattr(self, command[0])(job_id, *command[1], **command[2])
return True
def notifyStartTest(self, job_id, *args, **kwargs):
pydev_runfiles_xml_rpc.notifyStartTest(*args, **kwargs)
return True
def notifyTest(self, job_id, *args, **kwargs):
pydev_runfiles_xml_rpc.notifyTest(*args, **kwargs)
return True
def shutdown(self):
if hasattr(self.server, 'shutdown'):
self.server.shutdown()
else:
self._shutdown = True
def run(self):
if hasattr(self.server, 'shutdown'):
self.server.serve_forever()
else:
self._shutdown = False
while not self._shutdown:
self.server.handle_request()
#=======================================================================================================================
# Client
#=======================================================================================================================
class ClientThread(threading.Thread):
def __init__(self, job_id, port, verbosity, coverage_output_file=None, coverage_include=None):
threading.Thread.__init__(self)
self.daemon = True
self.port = port
self.job_id = job_id
self.verbosity = verbosity
self.finished = False
self.coverage_output_file = coverage_output_file
self.coverage_include = coverage_include
def _reader_thread(self, pipe, target):
while True:
target.write(pipe.read(1))
def run(self):
try:
from _pydev_runfiles import pydev_runfiles_parallel_client
# TODO: Support Jython:
#
# For jython, instead of using sys.executable, we should use:
# r'D:\bin\jdk_1_5_09\bin\java.exe',
# '-classpath',
# 'D:/bin/jython-2.2.1/jython.jar',
# 'org.python.util.jython',
args = [
sys.executable,
pydev_runfiles_parallel_client.__file__,
str(self.job_id),
str(self.port),
str(self.verbosity),
]
if self.coverage_output_file and self.coverage_include:
args.append(self.coverage_output_file)
args.append(self.coverage_include)
import subprocess
if False:
proc = subprocess.Popen(args, env=os.environ, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
thread.start_new_thread(self._reader_thread, (proc.stdout, sys.stdout))
thread.start_new_thread(target=self._reader_thread, args=(proc.stderr, sys.stderr))
else:
proc = subprocess.Popen(args, env=os.environ, shell=False)
proc.wait()
finally:
self.finished = True

View File

@ -1,214 +0,0 @@
from _pydev_bundle.pydev_imports import xmlrpclib, _queue
Queue = _queue.Queue
import traceback
import sys
from _pydev_runfiles.pydev_runfiles_coverage import start_coverage_support_from_params
import threading
#=======================================================================================================================
# ParallelNotification
#=======================================================================================================================
class ParallelNotification(object):
def __init__(self, method, args, kwargs):
self.method = method
self.args = args
self.kwargs = kwargs
def to_tuple(self):
return self.method, self.args, self.kwargs
#=======================================================================================================================
# KillServer
#=======================================================================================================================
class KillServer(object):
pass
#=======================================================================================================================
# ServerComm
#=======================================================================================================================
class ServerComm(threading.Thread):
def __init__(self, job_id, server):
self.notifications_queue = Queue()
threading.Thread.__init__(self)
self.setDaemon(False) #Wait for all the notifications to be passed before exiting!
assert job_id is not None
assert port is not None
self.job_id = job_id
self.finished = False
self.server = server
def run(self):
while True:
kill_found = False
commands = []
command = self.notifications_queue.get(block=True)
if isinstance(command, KillServer):
kill_found = True
else:
assert isinstance(command, ParallelNotification)
commands.append(command.to_tuple())
try:
while True:
command = self.notifications_queue.get(block=False) #No block to create a batch.
if isinstance(command, KillServer):
kill_found = True
else:
assert isinstance(command, ParallelNotification)
commands.append(command.to_tuple())
except:
pass #That's OK, we're getting it until it becomes empty so that we notify multiple at once.
if commands:
try:
#Batch notification.
self.server.lock.acquire()
try:
self.server.notifyCommands(self.job_id, commands)
finally:
self.server.lock.release()
except:
traceback.print_exc()
if kill_found:
self.finished = True
return
#=======================================================================================================================
# ServerFacade
#=======================================================================================================================
class ServerFacade(object):
def __init__(self, notifications_queue):
self.notifications_queue = notifications_queue
def notifyTestsCollected(self, *args, **kwargs):
pass #This notification won't be passed
def notifyTestRunFinished(self, *args, **kwargs):
pass #This notification won't be passed
def notifyStartTest(self, *args, **kwargs):
self.notifications_queue.put_nowait(ParallelNotification('notifyStartTest', args, kwargs))
def notifyTest(self, *args, **kwargs):
self.notifications_queue.put_nowait(ParallelNotification('notifyTest', args, kwargs))
#=======================================================================================================================
# run_client
#=======================================================================================================================
def run_client(job_id, port, verbosity, coverage_output_file, coverage_include):
job_id = int(job_id)
from _pydev_bundle import pydev_localhost
server = xmlrpclib.Server('http://%s:%s' % (pydev_localhost.get_localhost(), port))
server.lock = threading.Lock()
server_comm = ServerComm(job_id, server)
server_comm.start()
try:
server_facade = ServerFacade(server_comm.notifications_queue)
from _pydev_runfiles import pydev_runfiles
from _pydev_runfiles import pydev_runfiles_xml_rpc
pydev_runfiles_xml_rpc.set_server(server_facade)
#Starts None and when the 1st test is gotten, it's started (because a server may be initiated and terminated
#before receiving any test -- which would mean a different process got all the tests to run).
coverage = None
try:
tests_to_run = [1]
while tests_to_run:
#Investigate: is it dangerous to use the same xmlrpclib server from different threads?
#It seems it should be, as it creates a new connection for each request...
server.lock.acquire()
try:
tests_to_run = server.GetTestsToRun(job_id)
finally:
server.lock.release()
if not tests_to_run:
break
if coverage is None:
_coverage_files, coverage = start_coverage_support_from_params(
None, coverage_output_file, 1, coverage_include)
files_to_tests = {}
for test in tests_to_run:
filename_and_test = test.split('|')
if len(filename_and_test) == 2:
files_to_tests.setdefault(filename_and_test[0], []).append(filename_and_test[1])
configuration = pydev_runfiles.Configuration(
'',
verbosity,
None,
None,
None,
files_to_tests,
1, #Always single job here
None,
#The coverage is handled in this loop.
coverage_output_file=None,
coverage_include=None,
)
test_runner = pydev_runfiles.PydevTestRunner(configuration)
sys.stdout.flush()
test_runner.run_tests(handle_coverage=False)
finally:
if coverage is not None:
coverage.stop()
coverage.save()
except:
traceback.print_exc()
server_comm.notifications_queue.put_nowait(KillServer())
#=======================================================================================================================
# main
#=======================================================================================================================
if __name__ == '__main__':
if len(sys.argv) -1 == 3:
job_id, port, verbosity = sys.argv[1:]
coverage_output_file, coverage_include = None, None
elif len(sys.argv) -1 == 5:
job_id, port, verbosity, coverage_output_file, coverage_include = sys.argv[1:]
else:
raise AssertionError('Could not find out how to handle the parameters: '+sys.argv[1:])
job_id = int(job_id)
port = int(port)
verbosity = int(verbosity)
run_client(job_id, port, verbosity, coverage_output_file, coverage_include)

View File

@ -1,306 +0,0 @@
from _pydev_runfiles import pydev_runfiles_xml_rpc
import pickle
import zlib
import base64
import os
from pydevd_file_utils import canonical_normalized_path
import pytest
import sys
import time
from pathlib import Path
#=========================================================================
# Load filters with tests we should skip
#=========================================================================
py_test_accept_filter = None
def _load_filters():
global py_test_accept_filter
if py_test_accept_filter is None:
py_test_accept_filter = os.environ.get('PYDEV_PYTEST_SKIP')
if py_test_accept_filter:
py_test_accept_filter = pickle.loads(
zlib.decompress(base64.b64decode(py_test_accept_filter)))
# Newer versions of pytest resolve symlinks, so, we
# may need to filter with a resolved path too.
new_dct = {}
for filename, value in py_test_accept_filter.items():
new_dct[canonical_normalized_path(str(Path(filename).resolve()))] = value
py_test_accept_filter.update(new_dct)
else:
py_test_accept_filter = {}
def is_in_xdist_node():
main_pid = os.environ.get('PYDEV_MAIN_PID')
if main_pid and main_pid != str(os.getpid()):
return True
return False
connected = False
def connect_to_server_for_communication_to_xml_rpc_on_xdist():
global connected
if connected:
return
connected = True
if is_in_xdist_node():
port = os.environ.get('PYDEV_PYTEST_SERVER')
if not port:
sys.stderr.write(
'Error: no PYDEV_PYTEST_SERVER environment variable defined.\n')
else:
pydev_runfiles_xml_rpc.initialize_server(int(port), daemon=True)
PY2 = sys.version_info[0] <= 2
PY3 = not PY2
class State:
start_time = time.time()
buf_err = None
buf_out = None
def start_redirect():
if State.buf_out is not None:
return
from _pydevd_bundle import pydevd_io
State.buf_err = pydevd_io.start_redirect(keep_original_redirection=True, std='stderr')
State.buf_out = pydevd_io.start_redirect(keep_original_redirection=True, std='stdout')
def get_curr_output():
buf_out = State.buf_out
buf_err = State.buf_err
return buf_out.getvalue() if buf_out is not None else '', buf_err.getvalue() if buf_err is not None else ''
def pytest_unconfigure():
if is_in_xdist_node():
return
# Only report that it finished when on the main node (we don't want to report
# the finish on each separate node).
pydev_runfiles_xml_rpc.notifyTestRunFinished(
'Finished in: %.2f secs.' % (time.time() - State.start_time,))
def pytest_collection_modifyitems(session, config, items):
# A note: in xdist, this is not called on the main process, only in the
# secondary nodes, so, we'll actually make the filter and report it multiple
# times.
connect_to_server_for_communication_to_xml_rpc_on_xdist()
_load_filters()
if not py_test_accept_filter:
pydev_runfiles_xml_rpc.notifyTestsCollected(len(items))
return # Keep on going (nothing to filter)
new_items = []
for item in items:
f = canonical_normalized_path(str(item.parent.fspath))
name = item.name
if f not in py_test_accept_filter:
# print('Skip file: %s' % (f,))
continue # Skip the file
i = name.find('[')
name_without_parametrize = None
if i > 0:
name_without_parametrize = name[:i]
accept_tests = py_test_accept_filter[f]
if item.cls is not None:
class_name = item.cls.__name__
else:
class_name = None
for test in accept_tests:
if test == name:
# Direct match of the test (just go on with the default
# loading)
new_items.append(item)
break
if name_without_parametrize is not None and test == name_without_parametrize:
# This happens when parameterizing pytest tests on older versions
# of pytest where the test name doesn't include the fixture name
# in it.
new_items.append(item)
break
if class_name is not None:
if test == class_name + '.' + name:
new_items.append(item)
break
if name_without_parametrize is not None and test == class_name + '.' + name_without_parametrize:
new_items.append(item)
break
if class_name == test:
new_items.append(item)
break
else:
pass
# print('Skip test: %s.%s. Accept: %s' % (class_name, name, accept_tests))
# Modify the original list
items[:] = new_items
pydev_runfiles_xml_rpc.notifyTestsCollected(len(items))
try:
"""
pytest > 5.4 uses own version of TerminalWriter based on py.io.TerminalWriter
and assumes there is a specific method TerminalWriter._write_source
so try load pytest version first or fallback to default one
"""
from _pytest._io import TerminalWriter
except ImportError:
from py.io import TerminalWriter
def _get_error_contents_from_report(report):
if report.longrepr is not None:
try:
tw = TerminalWriter(stringio=True)
stringio = tw.stringio
except TypeError:
import io
stringio = io.StringIO()
tw = TerminalWriter(file=stringio)
tw.hasmarkup = False
report.toterminal(tw)
exc = stringio.getvalue()
s = exc.strip()
if s:
return s
return ''
def pytest_collectreport(report):
error_contents = _get_error_contents_from_report(report)
if error_contents:
report_test('fail', '<collect errors>', '<collect errors>', '', error_contents, 0.0)
def append_strings(s1, s2):
if s1.__class__ == s2.__class__:
return s1 + s2
# Prefer str
if isinstance(s1, bytes):
s1 = s1.decode('utf-8', 'replace')
if isinstance(s2, bytes):
s2 = s2.decode('utf-8', 'replace')
return s1 + s2
def pytest_runtest_logreport(report):
if is_in_xdist_node():
# When running with xdist, we don't want the report to be called from the node, only
# from the main process.
return
report_duration = report.duration
report_when = report.when
report_outcome = report.outcome
if hasattr(report, 'wasxfail'):
if report_outcome != 'skipped':
report_outcome = 'passed'
if report_outcome == 'passed':
# passed on setup/teardown: no need to report if in setup or teardown
# (only on the actual test if it passed).
if report_when in ('setup', 'teardown'):
return
status = 'ok'
elif report_outcome == 'skipped':
status = 'skip'
else:
# It has only passed, skipped and failed (no error), so, let's consider
# error if not on call.
if report_when in ('setup', 'teardown'):
status = 'error'
else:
# any error in the call (not in setup or teardown) is considered a
# regular failure.
status = 'fail'
# This will work if pytest is not capturing it, if it is, nothing will
# come from here...
captured_output, error_contents = getattr(report, 'pydev_captured_output', ''), getattr(report, 'pydev_error_contents', '')
for type_section, value in report.sections:
if value:
if type_section in ('err', 'stderr', 'Captured stderr call'):
error_contents = append_strings(error_contents, value)
else:
captured_output = append_strings(error_contents, value)
filename = getattr(report, 'pydev_fspath_strpath', '<unable to get>')
test = report.location[2]
if report_outcome != 'skipped':
# On skipped, we'll have a traceback for the skip, which is not what we
# want.
exc = _get_error_contents_from_report(report)
if exc:
if error_contents:
error_contents = append_strings(error_contents, '----------------------------- Exceptions -----------------------------\n')
error_contents = append_strings(error_contents, exc)
report_test(status, filename, test, captured_output, error_contents, report_duration)
def report_test(status, filename, test, captured_output, error_contents, duration):
'''
@param filename: 'D:\\src\\mod1\\hello.py'
@param test: 'TestCase.testMet1'
@param status: fail, error, ok
'''
time_str = '%.2f' % (duration,)
pydev_runfiles_xml_rpc.notifyTest(
status, captured_output, error_contents, filename, test, time_str)
if not hasattr(pytest, 'hookimpl'):
raise AssertionError('Please upgrade pytest (the current version of pytest: %s is unsupported)' % (pytest.__version__,))
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_makereport(item, call):
outcome = yield
report = outcome.get_result()
report.pydev_fspath_strpath = item.fspath.strpath
report.pydev_captured_output, report.pydev_error_contents = get_curr_output()
@pytest.mark.tryfirst
def pytest_runtest_setup(item):
'''
Note: with xdist will be on a secondary process.
'''
# We have our own redirection: if xdist does its redirection, we'll have
# nothing in our contents (which is OK), but if it does, we'll get nothing
# from pytest but will get our own here.
start_redirect()
filename = item.fspath.strpath
test = item.location[2]
pydev_runfiles_xml_rpc.notifyStartTest(filename, test)

View File

@ -1,150 +0,0 @@
import unittest as python_unittest
from _pydev_runfiles import pydev_runfiles_xml_rpc
import time
from _pydevd_bundle import pydevd_io
import traceback
from _pydevd_bundle.pydevd_constants import * # @UnusedWildImport
from io import StringIO
#=======================================================================================================================
# PydevTextTestRunner
#=======================================================================================================================
class PydevTextTestRunner(python_unittest.TextTestRunner):
def _makeResult(self):
return PydevTestResult(self.stream, self.descriptions, self.verbosity)
_PythonTextTestResult = python_unittest.TextTestRunner()._makeResult().__class__
#=======================================================================================================================
# PydevTestResult
#=======================================================================================================================
class PydevTestResult(_PythonTextTestResult):
def addSubTest(self, test, subtest, err):
"""Called at the end of a subtest.
'err' is None if the subtest ended successfully, otherwise it's a
tuple of values as returned by sys.exc_info().
"""
_PythonTextTestResult.addSubTest(self, test, subtest, err)
if err is not None:
subdesc = subtest._subDescription()
error = (test, self._exc_info_to_string(err, test))
self._reportErrors([error], [], '', '%s %s' % (self.get_test_name(test), subdesc))
def startTest(self, test):
_PythonTextTestResult.startTest(self, test)
self.buf = pydevd_io.start_redirect(keep_original_redirection=True, std='both')
self.start_time = time.time()
self._current_errors_stack = []
self._current_failures_stack = []
try:
test_name = test.__class__.__name__ + "." + test._testMethodName
except AttributeError:
# Support for jython 2.1 (__testMethodName is pseudo-private in the test case)
test_name = test.__class__.__name__ + "." + test._TestCase__testMethodName
pydev_runfiles_xml_rpc.notifyStartTest(
test.__pydev_pyfile__, test_name)
def get_test_name(self, test):
try:
try:
test_name = test.__class__.__name__ + "." + test._testMethodName
except AttributeError:
# Support for jython 2.1 (__testMethodName is pseudo-private in the test case)
try:
test_name = test.__class__.__name__ + "." + test._TestCase__testMethodName
# Support for class/module exceptions (test is instance of _ErrorHolder)
except:
test_name = test.description.split()[1][1:-1] + ' <' + test.description.split()[0] + '>'
except:
traceback.print_exc()
return '<unable to get test name>'
return test_name
def stopTest(self, test):
end_time = time.time()
pydevd_io.end_redirect(std='both')
_PythonTextTestResult.stopTest(self, test)
captured_output = self.buf.getvalue()
del self.buf
error_contents = ''
test_name = self.get_test_name(test)
diff_time = '%.2f' % (end_time - self.start_time)
skipped = False
outcome = getattr(test, '_outcome', None)
if outcome is not None:
skipped = bool(getattr(outcome, 'skipped', None))
if skipped:
pydev_runfiles_xml_rpc.notifyTest(
'skip', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
elif not self._current_errors_stack and not self._current_failures_stack:
pydev_runfiles_xml_rpc.notifyTest(
'ok', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
else:
self._reportErrors(self._current_errors_stack, self._current_failures_stack, captured_output, test_name)
def _reportErrors(self, errors, failures, captured_output, test_name, diff_time=''):
error_contents = []
for test, s in errors + failures:
if type(s) == type((1,)): # If it's a tuple (for jython 2.1)
sio = StringIO()
traceback.print_exception(s[0], s[1], s[2], file=sio)
s = sio.getvalue()
error_contents.append(s)
sep = '\n' + self.separator1
error_contents = sep.join(error_contents)
if errors and not failures:
try:
pydev_runfiles_xml_rpc.notifyTest(
'error', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
except:
file_start = error_contents.find('File "')
file_end = error_contents.find('", ', file_start)
if file_start != -1 and file_end != -1:
file = error_contents[file_start + 6:file_end]
else:
file = '<unable to get file>'
pydev_runfiles_xml_rpc.notifyTest(
'error', captured_output, error_contents, file, test_name, diff_time)
elif failures and not errors:
pydev_runfiles_xml_rpc.notifyTest(
'fail', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
else: # Ok, we got both, errors and failures. Let's mark it as an error in the end.
pydev_runfiles_xml_rpc.notifyTest(
'error', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
def addError(self, test, err):
_PythonTextTestResult.addError(self, test, err)
# Support for class/module exceptions (test is instance of _ErrorHolder)
if not hasattr(self, '_current_errors_stack') or test.__class__.__name__ == '_ErrorHolder':
# Not in start...end, so, report error now (i.e.: django pre/post-setup)
self._reportErrors([self.errors[-1]], [], '', self.get_test_name(test))
else:
self._current_errors_stack.append(self.errors[-1])
def addFailure(self, test, err):
_PythonTextTestResult.addFailure(self, test, err)
if not hasattr(self, '_current_failures_stack'):
# Not in start...end, so, report error now (i.e.: django pre/post-setup)
self._reportErrors([], [self.failures[-1]], '', self.get_test_name(test))
else:
self._current_failures_stack.append(self.failures[-1])
class PydevTestSuite(python_unittest.TestSuite):
pass

View File

@ -1,257 +0,0 @@
import sys
import threading
import traceback
import warnings
from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding
from _pydev_bundle.pydev_imports import xmlrpclib, _queue
from _pydevd_bundle.pydevd_constants import Null
Queue = _queue.Queue
# This may happen in IronPython (in Python it shouldn't happen as there are
# 'fast' replacements that are used in xmlrpclib.py)
warnings.filterwarnings(
'ignore', 'The xmllib module is obsolete.*', DeprecationWarning)
file_system_encoding = getfilesystemencoding()
#=======================================================================================================================
# _ServerHolder
#=======================================================================================================================
class _ServerHolder:
'''
Helper so that we don't have to use a global here.
'''
SERVER = None
#=======================================================================================================================
# set_server
#=======================================================================================================================
def set_server(server):
_ServerHolder.SERVER = server
#=======================================================================================================================
# ParallelNotification
#=======================================================================================================================
class ParallelNotification(object):
def __init__(self, method, args):
self.method = method
self.args = args
def to_tuple(self):
return self.method, self.args
#=======================================================================================================================
# KillServer
#=======================================================================================================================
class KillServer(object):
pass
#=======================================================================================================================
# ServerFacade
#=======================================================================================================================
class ServerFacade(object):
def __init__(self, notifications_queue):
self.notifications_queue = notifications_queue
def notifyTestsCollected(self, *args):
self.notifications_queue.put_nowait(ParallelNotification('notifyTestsCollected', args))
def notifyConnected(self, *args):
self.notifications_queue.put_nowait(ParallelNotification('notifyConnected', args))
def notifyTestRunFinished(self, *args):
self.notifications_queue.put_nowait(ParallelNotification('notifyTestRunFinished', args))
def notifyStartTest(self, *args):
self.notifications_queue.put_nowait(ParallelNotification('notifyStartTest', args))
def notifyTest(self, *args):
new_args = []
for arg in args:
new_args.append(_encode_if_needed(arg))
args = tuple(new_args)
self.notifications_queue.put_nowait(ParallelNotification('notifyTest', args))
#=======================================================================================================================
# ServerComm
#=======================================================================================================================
class ServerComm(threading.Thread):
def __init__(self, notifications_queue, port, daemon=False):
threading.Thread.__init__(self)
self.setDaemon(daemon) # If False, wait for all the notifications to be passed before exiting!
self.finished = False
self.notifications_queue = notifications_queue
from _pydev_bundle import pydev_localhost
# It is necessary to specify an encoding, that matches
# the encoding of all bytes-strings passed into an
# XMLRPC call: "All 8-bit strings in the data structure are assumed to use the
# packet encoding. Unicode strings are automatically converted,
# where necessary."
# Byte strings most likely come from file names.
encoding = file_system_encoding
if encoding == "mbcs":
# Windos symbolic name for the system encoding CP_ACP.
# We need to convert it into a encoding that is recognized by Java.
# Unfortunately this is not always possible. You could use
# GetCPInfoEx and get a name similar to "windows-1251". Then
# you need a table to translate on a best effort basis. Much to complicated.
# ISO-8859-1 is good enough.
encoding = "ISO-8859-1"
self.server = xmlrpclib.Server('http://%s:%s' % (pydev_localhost.get_localhost(), port),
encoding=encoding)
def run(self):
while True:
kill_found = False
commands = []
command = self.notifications_queue.get(block=True)
if isinstance(command, KillServer):
kill_found = True
else:
assert isinstance(command, ParallelNotification)
commands.append(command.to_tuple())
try:
while True:
command = self.notifications_queue.get(block=False) # No block to create a batch.
if isinstance(command, KillServer):
kill_found = True
else:
assert isinstance(command, ParallelNotification)
commands.append(command.to_tuple())
except:
pass # That's OK, we're getting it until it becomes empty so that we notify multiple at once.
if commands:
try:
self.server.notifyCommands(commands)
except:
traceback.print_exc()
if kill_found:
self.finished = True
return
#=======================================================================================================================
# initialize_server
#=======================================================================================================================
def initialize_server(port, daemon=False):
if _ServerHolder.SERVER is None:
if port is not None:
notifications_queue = Queue()
_ServerHolder.SERVER = ServerFacade(notifications_queue)
_ServerHolder.SERVER_COMM = ServerComm(notifications_queue, port, daemon)
_ServerHolder.SERVER_COMM.start()
else:
# Create a null server, so that we keep the interface even without any connection.
_ServerHolder.SERVER = Null()
_ServerHolder.SERVER_COMM = Null()
try:
_ServerHolder.SERVER.notifyConnected()
except:
traceback.print_exc()
#=======================================================================================================================
# notifyTest
#=======================================================================================================================
def notifyTestsCollected(tests_count):
assert tests_count is not None
try:
_ServerHolder.SERVER.notifyTestsCollected(tests_count)
except:
traceback.print_exc()
#=======================================================================================================================
# notifyStartTest
#=======================================================================================================================
def notifyStartTest(file, test):
'''
@param file: the tests file (c:/temp/test.py)
@param test: the test ran (i.e.: TestCase.test1)
'''
assert file is not None
if test is None:
test = '' # Could happen if we have an import error importing module.
try:
_ServerHolder.SERVER.notifyStartTest(file, test)
except:
traceback.print_exc()
def _encode_if_needed(obj):
# In the java side we expect strings to be ISO-8859-1 (org.python.pydev.debug.pyunit.PyUnitServer.initializeDispatches().new Dispatch() {...}.getAsStr(Object))
if isinstance(obj, str): # Unicode in py3
return xmlrpclib.Binary(obj.encode('ISO-8859-1', 'xmlcharrefreplace'))
elif isinstance(obj, bytes):
try:
return xmlrpclib.Binary(obj.decode(sys.stdin.encoding).encode('ISO-8859-1', 'xmlcharrefreplace'))
except:
return xmlrpclib.Binary(obj) # bytes already
return obj
#=======================================================================================================================
# notifyTest
#=======================================================================================================================
def notifyTest(cond, captured_output, error_contents, file, test, time):
'''
@param cond: ok, fail, error
@param captured_output: output captured from stdout
@param captured_output: output captured from stderr
@param file: the tests file (c:/temp/test.py)
@param test: the test ran (i.e.: TestCase.test1)
@param time: float with the number of seconds elapsed
'''
assert cond is not None
assert captured_output is not None
assert error_contents is not None
assert file is not None
if test is None:
test = '' # Could happen if we have an import error importing module.
assert time is not None
try:
captured_output = _encode_if_needed(captured_output)
error_contents = _encode_if_needed(error_contents)
_ServerHolder.SERVER.notifyTest(cond, captured_output, error_contents, file, test, time)
except:
traceback.print_exc()
#=======================================================================================================================
# notifyTestRunFinished
#=======================================================================================================================
def notifyTestRunFinished(total_time):
assert total_time is not None
try:
_ServerHolder.SERVER.notifyTestRunFinished(total_time)
except:
traceback.print_exc()
#=======================================================================================================================
# force_server_kill
#=======================================================================================================================
def force_server_kill():
_ServerHolder.SERVER_COMM.notifications_queue.put_nowait(KillServer())

View File

@ -1,592 +0,0 @@
'''
Run this module to regenerate the `pydevd_schema.py` file.
Note that it'll generate it based on the current debugProtocol.json. Erase it and rerun
to download the latest version.
'''
def is_variable_to_translate(cls_name, var_name):
if var_name in ('variablesReference', 'frameId', 'threadId'):
return True
if cls_name == 'StackFrame' and var_name == 'id':
# It's frameId everywhere except on StackFrame.
return True
if cls_name == 'Thread' and var_name == 'id':
# It's threadId everywhere except on Thread.
return True
return False
def _get_noqa_for_var(prop_name):
return ' # noqa (assign to builtin)' if prop_name in ('type', 'format', 'id', 'hex', 'breakpoint', 'filter') else ''
class _OrderedSet(object):
# Not a good ordered set (just something to be small without adding any deps)
def __init__(self, initial_contents=None):
self._contents = []
self._contents_as_set = set()
if initial_contents is not None:
for x in initial_contents:
self.add(x)
def add(self, x):
if x not in self._contents_as_set:
self._contents_as_set.add(x)
self._contents.append(x)
def discard(self, x):
if x in self._contents_as_set:
self._contents_as_set.remove(x)
self._contents.remove(x)
def copy(self):
return _OrderedSet(self._contents)
def update(self, contents):
for x in contents:
self.add(x)
def __iter__(self):
return iter(self._contents)
def __contains__(self, item):
return item in self._contents_as_set
def __len__(self):
return len(self._contents)
def set_repr(self):
if len(self) == 0:
return 'set()'
lst = [repr(x) for x in self]
return 'set([' + ', '.join(lst) + '])'
class Ref(object):
def __init__(self, ref, ref_data):
self.ref = ref
self.ref_data = ref_data
def __str__(self):
return self.ref
def load_schema_data():
import os.path
import json
json_file = os.path.join(os.path.dirname(__file__), 'debugProtocol.json')
if not os.path.exists(json_file):
import requests
req = requests.get('https://raw.githubusercontent.com/microsoft/debug-adapter-protocol/gh-pages/debugAdapterProtocol.json')
assert req.status_code == 200
with open(json_file, 'wb') as stream:
stream.write(req.content)
with open(json_file, 'rb') as json_contents:
json_schema_data = json.loads(json_contents.read())
return json_schema_data
def load_custom_schema_data():
import os.path
import json
json_file = os.path.join(os.path.dirname(__file__), 'debugProtocolCustom.json')
with open(json_file, 'rb') as json_contents:
json_schema_data = json.loads(json_contents.read())
return json_schema_data
def create_classes_to_generate_structure(json_schema_data):
definitions = json_schema_data['definitions']
class_to_generatees = {}
for name, definition in definitions.items():
all_of = definition.get('allOf')
description = definition.get('description')
is_enum = definition.get('type') == 'string' and 'enum' in definition
enum_values = None
if is_enum:
enum_values = definition['enum']
properties = {}
properties.update(definition.get('properties', {}))
required = _OrderedSet(definition.get('required', _OrderedSet()))
base_definitions = []
if all_of is not None:
for definition in all_of:
ref = definition.get('$ref')
if ref is not None:
assert ref.startswith('#/definitions/')
ref = ref[len('#/definitions/'):]
base_definitions.append(ref)
else:
if not description:
description = definition.get('description')
properties.update(definition.get('properties', {}))
required.update(_OrderedSet(definition.get('required', _OrderedSet())))
if isinstance(description, (list, tuple)):
description = '\n'.join(description)
if name == 'ModulesRequest': # Hack to accept modules request without arguments (ptvsd: 2050).
required.discard('arguments')
class_to_generatees[name] = dict(
name=name,
properties=properties,
base_definitions=base_definitions,
description=description,
required=required,
is_enum=is_enum,
enum_values=enum_values
)
return class_to_generatees
def collect_bases(curr_class, classes_to_generate, memo=None):
ret = []
if memo is None:
memo = {}
base_definitions = curr_class['base_definitions']
for base_definition in base_definitions:
if base_definition not in memo:
ret.append(base_definition)
ret.extend(collect_bases(classes_to_generate[base_definition], classes_to_generate, memo))
return ret
def fill_properties_and_required_from_base(classes_to_generate):
# Now, resolve properties based on refs
for class_to_generate in classes_to_generate.values():
dct = {}
s = _OrderedSet()
for base_definition in reversed(collect_bases(class_to_generate, classes_to_generate)):
# Note: go from base to current so that the initial order of the properties has that
# same order.
dct.update(classes_to_generate[base_definition].get('properties', {}))
s.update(classes_to_generate[base_definition].get('required', _OrderedSet()))
dct.update(class_to_generate['properties'])
class_to_generate['properties'] = dct
s.update(class_to_generate['required'])
class_to_generate['required'] = s
return class_to_generate
def update_class_to_generate_description(class_to_generate):
import textwrap
description = class_to_generate['description']
lines = []
for line in description.splitlines():
wrapped = textwrap.wrap(line.strip(), 100)
lines.extend(wrapped)
lines.append('')
while lines and lines[-1] == '':
lines = lines[:-1]
class_to_generate['description'] = ' ' + ('\n '.join(lines))
def update_class_to_generate_type(classes_to_generate, class_to_generate):
properties = class_to_generate.get('properties')
for _prop_name, prop_val in properties.items():
prop_type = prop_val.get('type', '')
if not prop_type:
prop_type = prop_val.pop('$ref', '')
if prop_type:
assert prop_type.startswith('#/definitions/')
prop_type = prop_type[len('#/definitions/'):]
prop_val['type'] = Ref(prop_type, classes_to_generate[prop_type])
def update_class_to_generate_register_dec(classes_to_generate, class_to_generate):
# Default
class_to_generate['register_request'] = ''
class_to_generate['register_dec'] = '@register'
properties = class_to_generate.get('properties')
enum_type = properties.get('type', {}).get('enum')
command = None
event = None
if enum_type and len(enum_type) == 1 and next(iter(enum_type)) in ("request", "response", "event"):
msg_type = next(iter(enum_type))
if msg_type == 'response':
# The actual command is typed in the request
response_name = class_to_generate['name']
request_name = response_name[:-len('Response')] + 'Request'
if request_name in classes_to_generate:
command = classes_to_generate[request_name]['properties'].get('command')
else:
if response_name == 'ErrorResponse':
command = {'enum': ['error']}
else:
raise AssertionError('Unhandled: %s' % (response_name,))
elif msg_type == 'request':
command = properties.get('command')
elif msg_type == 'event':
command = properties.get('event')
else:
raise AssertionError('Unexpected condition.')
if command:
enum = command.get('enum')
if enum and len(enum) == 1:
class_to_generate['register_request'] = '@register_%s(%r)\n' % (msg_type, enum[0])
def extract_prop_name_and_prop(class_to_generate):
properties = class_to_generate.get('properties')
required = _OrderedSet(class_to_generate.get('required', _OrderedSet()))
# Sort so that required come first
prop_name_and_prop = list(properties.items())
def compute_sort_key(x):
key = x[0]
if key in required:
if key == 'seq':
return 0.5 # seq when required is after the other required keys (to have a default of -1).
return 0
return 1
prop_name_and_prop.sort(key=compute_sort_key)
return prop_name_and_prop
def update_class_to_generate_to_json(class_to_generate):
required = _OrderedSet(class_to_generate.get('required', _OrderedSet()))
prop_name_and_prop = extract_prop_name_and_prop(class_to_generate)
to_dict_body = ['def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be unused)']
translate_prop_names = []
for prop_name, prop in prop_name_and_prop:
if is_variable_to_translate(class_to_generate['name'], prop_name):
translate_prop_names.append(prop_name)
for prop_name, prop in prop_name_and_prop:
namespace = dict(prop_name=prop_name, noqa=_get_noqa_for_var(prop_name))
to_dict_body.append(' %(prop_name)s = self.%(prop_name)s%(noqa)s' % namespace)
if prop.get('type') == 'array':
to_dict_body.append(' if %(prop_name)s and hasattr(%(prop_name)s[0], "to_dict"):' % namespace)
to_dict_body.append(' %(prop_name)s = [x.to_dict() for x in %(prop_name)s]' % namespace)
if translate_prop_names:
to_dict_body.append(' if update_ids_to_dap:')
for prop_name in translate_prop_names:
namespace = dict(prop_name=prop_name, noqa=_get_noqa_for_var(prop_name))
to_dict_body.append(' if %(prop_name)s is not None:' % namespace)
to_dict_body.append(' %(prop_name)s = self._translate_id_to_dap(%(prop_name)s)%(noqa)s' % namespace)
if not translate_prop_names:
update_dict_ids_from_dap_body = []
else:
update_dict_ids_from_dap_body = ['', '', '@classmethod', 'def update_dict_ids_from_dap(cls, dct):']
for prop_name in translate_prop_names:
namespace = dict(prop_name=prop_name)
update_dict_ids_from_dap_body.append(' if %(prop_name)r in dct:' % namespace)
update_dict_ids_from_dap_body.append(' dct[%(prop_name)r] = cls._translate_id_from_dap(dct[%(prop_name)r])' % namespace)
update_dict_ids_from_dap_body.append(' return dct')
class_to_generate['update_dict_ids_from_dap'] = _indent_lines('\n'.join(update_dict_ids_from_dap_body))
to_dict_body.append(' dct = {')
first_not_required = False
for prop_name, prop in prop_name_and_prop:
use_to_dict = prop['type'].__class__ == Ref and not prop['type'].ref_data.get('is_enum', False)
is_array = prop['type'] == 'array'
ref_array_cls_name = ''
if is_array:
ref = prop['items'].get('$ref')
if ref is not None:
ref_array_cls_name = ref.split('/')[-1]
namespace = dict(prop_name=prop_name, ref_array_cls_name=ref_array_cls_name)
if prop_name in required:
if use_to_dict:
to_dict_body.append(' %(prop_name)r: %(prop_name)s.to_dict(update_ids_to_dap=update_ids_to_dap),' % namespace)
else:
if ref_array_cls_name:
to_dict_body.append(' %(prop_name)r: [%(ref_array_cls_name)s.update_dict_ids_to_dap(o) for o in %(prop_name)s] if (update_ids_to_dap and %(prop_name)s) else %(prop_name)s,' % namespace)
else:
to_dict_body.append(' %(prop_name)r: %(prop_name)s,' % namespace)
else:
if not first_not_required:
first_not_required = True
to_dict_body.append(' }')
to_dict_body.append(' if %(prop_name)s is not None:' % namespace)
if use_to_dict:
to_dict_body.append(' dct[%(prop_name)r] = %(prop_name)s.to_dict(update_ids_to_dap=update_ids_to_dap)' % namespace)
else:
if ref_array_cls_name:
to_dict_body.append(' dct[%(prop_name)r] = [%(ref_array_cls_name)s.update_dict_ids_to_dap(o) for o in %(prop_name)s] if (update_ids_to_dap and %(prop_name)s) else %(prop_name)s' % namespace)
else:
to_dict_body.append(' dct[%(prop_name)r] = %(prop_name)s' % namespace)
if not first_not_required:
first_not_required = True
to_dict_body.append(' }')
to_dict_body.append(' dct.update(self.kwargs)')
to_dict_body.append(' return dct')
class_to_generate['to_dict'] = _indent_lines('\n'.join(to_dict_body))
if not translate_prop_names:
update_dict_ids_to_dap_body = []
else:
update_dict_ids_to_dap_body = ['', '', '@classmethod', 'def update_dict_ids_to_dap(cls, dct):']
for prop_name in translate_prop_names:
namespace = dict(prop_name=prop_name)
update_dict_ids_to_dap_body.append(' if %(prop_name)r in dct:' % namespace)
update_dict_ids_to_dap_body.append(' dct[%(prop_name)r] = cls._translate_id_to_dap(dct[%(prop_name)r])' % namespace)
update_dict_ids_to_dap_body.append(' return dct')
class_to_generate['update_dict_ids_to_dap'] = _indent_lines('\n'.join(update_dict_ids_to_dap_body))
def update_class_to_generate_init(class_to_generate):
args = []
init_body = []
docstring = []
required = _OrderedSet(class_to_generate.get('required', _OrderedSet()))
prop_name_and_prop = extract_prop_name_and_prop(class_to_generate)
translate_prop_names = []
for prop_name, prop in prop_name_and_prop:
if is_variable_to_translate(class_to_generate['name'], prop_name):
translate_prop_names.append(prop_name)
enum = prop.get('enum')
if enum and len(enum) == 1:
init_body.append(' self.%(prop_name)s = %(enum)r' % dict(prop_name=prop_name, enum=next(iter(enum))))
else:
if prop_name in required:
if prop_name == 'seq':
args.append(prop_name + '=-1')
else:
args.append(prop_name)
else:
args.append(prop_name + '=None')
if prop['type'].__class__ == Ref:
ref = prop['type']
ref_data = ref.ref_data
if ref_data.get('is_enum', False):
init_body.append(' if %s is not None:' % (prop_name,))
init_body.append(' assert %s in %s.VALID_VALUES' % (prop_name, str(ref)))
init_body.append(' self.%(prop_name)s = %(prop_name)s' % dict(
prop_name=prop_name))
else:
namespace = dict(
prop_name=prop_name,
ref_name=str(ref)
)
init_body.append(' if %(prop_name)s is None:' % namespace)
init_body.append(' self.%(prop_name)s = %(ref_name)s()' % namespace)
init_body.append(' else:')
init_body.append(' self.%(prop_name)s = %(ref_name)s(update_ids_from_dap=update_ids_from_dap, **%(prop_name)s) if %(prop_name)s.__class__ != %(ref_name)s else %(prop_name)s' % namespace
)
else:
init_body.append(' self.%(prop_name)s = %(prop_name)s' % dict(prop_name=prop_name))
if prop['type'] == 'array':
ref = prop['items'].get('$ref')
if ref is not None:
ref_array_cls_name = ref.split('/')[-1]
init_body.append(' if update_ids_from_dap and self.%(prop_name)s:' % dict(prop_name=prop_name))
init_body.append(' for o in self.%(prop_name)s:' % dict(prop_name=prop_name))
init_body.append(' %(ref_array_cls_name)s.update_dict_ids_from_dap(o)' % dict(ref_array_cls_name=ref_array_cls_name))
prop_type = prop['type']
prop_description = prop.get('description', '')
if isinstance(prop_description, (list, tuple)):
prop_description = '\n '.join(prop_description)
docstring.append(':param %(prop_type)s %(prop_name)s: %(prop_description)s' % dict(
prop_type=prop_type, prop_name=prop_name, prop_description=prop_description))
if translate_prop_names:
init_body.append(' if update_ids_from_dap:')
for prop_name in translate_prop_names:
init_body.append(' self.%(prop_name)s = self._translate_id_from_dap(self.%(prop_name)s)' % dict(prop_name=prop_name))
docstring = _indent_lines('\n'.join(docstring))
init_body = '\n'.join(init_body)
# Actually bundle the whole __init__ from the parts.
args = ', '.join(args)
if args:
args = ', ' + args
# Note: added kwargs because some messages are expected to be extended by the user (so, we'll actually
# make all extendable so that we don't have to worry about which ones -- we loose a little on typing,
# but may be better than doing a allow list based on something only pointed out in the documentation).
class_to_generate['init'] = '''def __init__(self%(args)s, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused)
"""
%(docstring)s
"""
%(init_body)s
self.kwargs = kwargs
''' % dict(args=args, init_body=init_body, docstring=docstring)
class_to_generate['init'] = _indent_lines(class_to_generate['init'])
def update_class_to_generate_props(class_to_generate):
import json
def default(o):
if isinstance(o, Ref):
return o.ref
raise AssertionError('Unhandled: %s' % (o,))
properties = class_to_generate['properties']
class_to_generate['props'] = ' __props__ = %s' % _indent_lines(
json.dumps(properties, indent=4, default=default)).strip()
def update_class_to_generate_refs(class_to_generate):
properties = class_to_generate['properties']
class_to_generate['refs'] = ' __refs__ = %s' % _OrderedSet(
key for (key, val) in properties.items() if val['type'].__class__ == Ref).set_repr()
def update_class_to_generate_enums(class_to_generate):
class_to_generate['enums'] = ''
if class_to_generate.get('is_enum', False):
enums = ''
for enum in class_to_generate['enum_values']:
enums += ' %s = %r\n' % (enum.upper(), enum)
enums += '\n'
enums += ' VALID_VALUES = %s\n\n' % _OrderedSet(class_to_generate['enum_values']).set_repr()
class_to_generate['enums'] = enums
def update_class_to_generate_objects(classes_to_generate, class_to_generate):
properties = class_to_generate['properties']
for key, val in properties.items():
if 'type' not in val:
val['type'] = 'TypeNA'
continue
if val['type'] == 'object':
create_new = val.copy()
create_new.update({
'name': '%s%s' % (class_to_generate['name'], key.title()),
'description': ' "%s" of %s' % (key, class_to_generate['name'])
})
if 'properties' not in create_new:
create_new['properties'] = {}
assert create_new['name'] not in classes_to_generate
classes_to_generate[create_new['name']] = create_new
update_class_to_generate_type(classes_to_generate, create_new)
update_class_to_generate_props(create_new)
# Update nested object types
update_class_to_generate_objects(classes_to_generate, create_new)
val['type'] = Ref(create_new['name'], classes_to_generate[create_new['name']])
val.pop('properties', None)
def gen_debugger_protocol():
import os.path
import sys
if sys.version_info[:2] < (3, 6):
raise AssertionError('Must be run with Python 3.6 onwards (to keep dict order).')
classes_to_generate = create_classes_to_generate_structure(load_schema_data())
classes_to_generate.update(create_classes_to_generate_structure(load_custom_schema_data()))
class_to_generate = fill_properties_and_required_from_base(classes_to_generate)
for class_to_generate in list(classes_to_generate.values()):
update_class_to_generate_description(class_to_generate)
update_class_to_generate_type(classes_to_generate, class_to_generate)
update_class_to_generate_props(class_to_generate)
update_class_to_generate_objects(classes_to_generate, class_to_generate)
for class_to_generate in classes_to_generate.values():
update_class_to_generate_refs(class_to_generate)
update_class_to_generate_init(class_to_generate)
update_class_to_generate_enums(class_to_generate)
update_class_to_generate_to_json(class_to_generate)
update_class_to_generate_register_dec(classes_to_generate, class_to_generate)
class_template = '''
%(register_request)s%(register_dec)s
class %(name)s(BaseSchema):
"""
%(description)s
Note: automatically generated code. Do not edit manually.
"""
%(enums)s%(props)s
%(refs)s
__slots__ = list(__props__.keys()) + ['kwargs']
%(init)s%(update_dict_ids_from_dap)s
%(to_dict)s%(update_dict_ids_to_dap)s
'''
contents = []
contents.append('# coding: utf-8')
contents.append('# Automatically generated code.')
contents.append('# Do not edit manually.')
contents.append('# Generated by running: %s' % os.path.basename(__file__))
contents.append('from .pydevd_base_schema import BaseSchema, register, register_request, register_response, register_event')
contents.append('')
for class_to_generate in classes_to_generate.values():
contents.append(class_template % class_to_generate)
parent_dir = os.path.dirname(__file__)
schema = os.path.join(parent_dir, 'pydevd_schema.py')
with open(schema, 'w', encoding='utf-8') as stream:
stream.write('\n'.join(contents))
def _indent_lines(lines, indent=' '):
out_lines = []
for line in lines.splitlines(keepends=True):
out_lines.append(indent + line)
return ''.join(out_lines)
if __name__ == '__main__':
gen_debugger_protocol()

View File

@ -1,147 +0,0 @@
from _pydevd_bundle._debug_adapter.pydevd_schema_log import debug_exception
import json
import itertools
from functools import partial
class BaseSchema(object):
@staticmethod
def initialize_ids_translation():
BaseSchema._dap_id_to_obj_id = {0:0, None:None}
BaseSchema._obj_id_to_dap_id = {0:0, None:None}
BaseSchema._next_dap_id = partial(next, itertools.count(1))
def to_json(self):
return json.dumps(self.to_dict())
@staticmethod
def _translate_id_to_dap(obj_id):
if obj_id == '*':
return '*'
# Note: we don't invalidate ids, so, if some object starts using the same id
# of another object, the same id will be used.
dap_id = BaseSchema._obj_id_to_dap_id.get(obj_id)
if dap_id is None:
dap_id = BaseSchema._obj_id_to_dap_id[obj_id] = BaseSchema._next_dap_id()
BaseSchema._dap_id_to_obj_id[dap_id] = obj_id
return dap_id
@staticmethod
def _translate_id_from_dap(dap_id):
if dap_id == '*':
return '*'
try:
return BaseSchema._dap_id_to_obj_id[dap_id]
except:
raise KeyError('Wrong ID sent from the client: %s' % (dap_id,))
@staticmethod
def update_dict_ids_to_dap(dct):
return dct
@staticmethod
def update_dict_ids_from_dap(dct):
return dct
BaseSchema.initialize_ids_translation()
_requests_to_types = {}
_responses_to_types = {}
_event_to_types = {}
_all_messages = {}
def register(cls):
_all_messages[cls.__name__] = cls
return cls
def register_request(command):
def do_register(cls):
_requests_to_types[command] = cls
return cls
return do_register
def register_response(command):
def do_register(cls):
_responses_to_types[command] = cls
return cls
return do_register
def register_event(event):
def do_register(cls):
_event_to_types[event] = cls
return cls
return do_register
def from_dict(dct, update_ids_from_dap=False):
msg_type = dct.get('type')
if msg_type is None:
raise ValueError('Unable to make sense of message: %s' % (dct,))
if msg_type == 'request':
to_type = _requests_to_types
use = dct['command']
elif msg_type == 'response':
to_type = _responses_to_types
use = dct['command']
else:
to_type = _event_to_types
use = dct['event']
cls = to_type.get(use)
if cls is None:
raise ValueError('Unable to create message from dict: %s. %s not in %s' % (dct, use, sorted(to_type.keys())))
try:
return cls(update_ids_from_dap=update_ids_from_dap, **dct)
except:
msg = 'Error creating %s from %s' % (cls, dct)
debug_exception(msg)
raise
def from_json(json_msg, update_ids_from_dap=False, on_dict_loaded=lambda dct:None):
if isinstance(json_msg, bytes):
json_msg = json_msg.decode('utf-8')
as_dict = json.loads(json_msg)
on_dict_loaded(as_dict)
try:
return from_dict(as_dict, update_ids_from_dap=update_ids_from_dap)
except:
if as_dict.get('type') == 'response' and not as_dict.get('success'):
# Error messages may not have required body (return as a generic Response).
Response = _all_messages['Response']
return Response(**as_dict)
else:
raise
def get_response_class(request):
if request.__class__ == dict:
return _responses_to_types[request['command']]
return _responses_to_types[request.command]
def build_response(request, kwargs=None):
if kwargs is None:
kwargs = {'success':True}
else:
if 'success' not in kwargs:
kwargs['success'] = True
response_class = _responses_to_types[request.command]
kwargs.setdefault('seq', -1) # To be overwritten before sending
return response_class(command=request.command, request_seq=request.seq, **kwargs)

View File

@ -1,46 +0,0 @@
import os
import traceback
from _pydevd_bundle.pydevd_constants import ForkSafeLock
_pid = os.getpid()
_pid_msg = '%s: ' % (_pid,)
_debug_lock = ForkSafeLock()
DEBUG = False
DEBUG_FILE = os.path.join(os.path.dirname(__file__), '__debug_output__.txt')
def debug(msg):
if DEBUG:
with _debug_lock:
_pid_prefix = _pid_msg
if isinstance(msg, bytes):
_pid_prefix = _pid_prefix.encode('utf-8')
if not msg.endswith(b'\r') and not msg.endswith(b'\n'):
msg += b'\n'
mode = 'a+b'
else:
if not msg.endswith('\r') and not msg.endswith('\n'):
msg += '\n'
mode = 'a+'
with open(DEBUG_FILE, mode) as stream:
stream.write(_pid_prefix)
stream.write(msg)
def debug_exception(msg=None):
if DEBUG:
if msg:
debug(msg)
with _debug_lock:
with open(DEBUG_FILE, 'a+') as stream:
_pid_prefix = _pid_msg
if isinstance(msg, bytes):
_pid_prefix = _pid_prefix.encode('utf-8')
stream.write(_pid_prefix)
traceback.print_exc(file=stream)

View File

@ -1,554 +0,0 @@
"""
A copy of the code module in the standard library with some changes to work with
async evaluation.
Utilities needed to emulate Python's interactive interpreter.
"""
# Inspired by similar code by Jeff Epler and Fredrik Lundh.
import sys
import traceback
import inspect
# START --------------------------- from codeop import CommandCompiler, compile_command
# START --------------------------- from codeop import CommandCompiler, compile_command
# START --------------------------- from codeop import CommandCompiler, compile_command
# START --------------------------- from codeop import CommandCompiler, compile_command
# START --------------------------- from codeop import CommandCompiler, compile_command
r"""Utilities to compile possibly incomplete Python source code.
This module provides two interfaces, broadly similar to the builtin
function compile(), which take program text, a filename and a 'mode'
and:
- Return code object if the command is complete and valid
- Return None if the command is incomplete
- Raise SyntaxError, ValueError or OverflowError if the command is a
syntax error (OverflowError and ValueError can be produced by
malformed literals).
Approach:
First, check if the source consists entirely of blank lines and
comments; if so, replace it with 'pass', because the built-in
parser doesn't always do the right thing for these.
Compile three times: as is, with \n, and with \n\n appended. If it
compiles as is, it's complete. If it compiles with one \n appended,
we expect more. If it doesn't compile either way, we compare the
error we get when compiling with \n or \n\n appended. If the errors
are the same, the code is broken. But if the errors are different, we
expect more. Not intuitive; not even guaranteed to hold in future
releases; but this matches the compiler's behavior from Python 1.4
through 2.2, at least.
Caveat:
It is possible (but not likely) that the parser stops parsing with a
successful outcome before reaching the end of the source; in this
case, trailing symbols may be ignored instead of causing an error.
For example, a backslash followed by two newlines may be followed by
arbitrary garbage. This will be fixed once the API for the parser is
better.
The two interfaces are:
compile_command(source, filename, symbol):
Compiles a single command in the manner described above.
CommandCompiler():
Instances of this class have __call__ methods identical in
signature to compile_command; the difference is that if the
instance compiles program text containing a __future__ statement,
the instance 'remembers' and compiles all subsequent program texts
with the statement in force.
The module also provides another class:
Compile():
Instances of this class act like the built-in function compile,
but with 'memory' in the sense described above.
"""
import __future__
_features = [getattr(__future__, fname)
for fname in __future__.all_feature_names]
__all__ = ["compile_command", "Compile", "CommandCompiler"]
PyCF_DONT_IMPLY_DEDENT = 0x200 # Matches pythonrun.h
def _maybe_compile(compiler, source, filename, symbol):
# Check for source consisting of only blank lines and comments
for line in source.split("\n"):
line = line.strip()
if line and line[0] != '#':
break # Leave it alone
else:
if symbol != "eval":
source = "pass" # Replace it with a 'pass' statement
err = err1 = err2 = None
code = code1 = code2 = None
try:
code = compiler(source, filename, symbol)
except SyntaxError as err:
pass
try:
code1 = compiler(source + "\n", filename, symbol)
except SyntaxError as e:
err1 = e
try:
code2 = compiler(source + "\n\n", filename, symbol)
except SyntaxError as e:
err2 = e
try:
if code:
return code
if not code1 and repr(err1) == repr(err2):
raise err1
finally:
err1 = err2 = None
def _compile(source, filename, symbol):
return compile(source, filename, symbol, PyCF_DONT_IMPLY_DEDENT)
def compile_command(source, filename="<input>", symbol="single"):
r"""Compile a command and determine whether it is incomplete.
Arguments:
source -- the source string; may contain \n characters
filename -- optional filename from which source was read; default
"<input>"
symbol -- optional grammar start symbol; "single" (default) or "eval"
Return value / exceptions raised:
- Return a code object if the command is complete and valid
- Return None if the command is incomplete
- Raise SyntaxError, ValueError or OverflowError if the command is a
syntax error (OverflowError and ValueError can be produced by
malformed literals).
"""
return _maybe_compile(_compile, source, filename, symbol)
class Compile:
"""Instances of this class behave much like the built-in compile
function, but if one is used to compile text containing a future
statement, it "remembers" and compiles all subsequent program texts
with the statement in force."""
def __init__(self):
self.flags = PyCF_DONT_IMPLY_DEDENT
try:
from ast import PyCF_ALLOW_TOP_LEVEL_AWAIT
self.flags |= PyCF_ALLOW_TOP_LEVEL_AWAIT
except:
pass
def __call__(self, source, filename, symbol):
codeob = compile(source, filename, symbol, self.flags, 1)
for feature in _features:
if codeob.co_flags & feature.compiler_flag:
self.flags |= feature.compiler_flag
return codeob
class CommandCompiler:
"""Instances of this class have __call__ methods identical in
signature to compile_command; the difference is that if the
instance compiles program text containing a __future__ statement,
the instance 'remembers' and compiles all subsequent program texts
with the statement in force."""
def __init__(self,):
self.compiler = Compile()
def __call__(self, source, filename="<input>", symbol="single"):
r"""Compile a command and determine whether it is incomplete.
Arguments:
source -- the source string; may contain \n characters
filename -- optional filename from which source was read;
default "<input>"
symbol -- optional grammar start symbol; "single" (default) or
"eval"
Return value / exceptions raised:
- Return a code object if the command is complete and valid
- Return None if the command is incomplete
- Raise SyntaxError, ValueError or OverflowError if the command is a
syntax error (OverflowError and ValueError can be produced by
malformed literals).
"""
return _maybe_compile(self.compiler, source, filename, symbol)
# END --------------------------- from codeop import CommandCompiler, compile_command
# END --------------------------- from codeop import CommandCompiler, compile_command
# END --------------------------- from codeop import CommandCompiler, compile_command
# END --------------------------- from codeop import CommandCompiler, compile_command
# END --------------------------- from codeop import CommandCompiler, compile_command
__all__ = ["InteractiveInterpreter", "InteractiveConsole", "interact",
"compile_command"]
from _pydev_bundle._pydev_saved_modules import threading
class _EvalAwaitInNewEventLoop(threading.Thread):
def __init__(self, compiled, updated_globals, updated_locals):
threading.Thread.__init__(self)
self.daemon = True
self._compiled = compiled
self._updated_globals = updated_globals
self._updated_locals = updated_locals
# Output
self.evaluated_value = None
self.exc = None
async def _async_func(self):
return await eval(self._compiled, self._updated_locals, self._updated_globals)
def run(self):
try:
import asyncio
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
self.evaluated_value = asyncio.run(self._async_func())
except:
self.exc = sys.exc_info()
class InteractiveInterpreter:
"""Base class for InteractiveConsole.
This class deals with parsing and interpreter state (the user's
namespace); it doesn't deal with input buffering or prompting or
input file naming (the filename is always passed in explicitly).
"""
def __init__(self, locals=None):
"""Constructor.
The optional 'locals' argument specifies the dictionary in
which code will be executed; it defaults to a newly created
dictionary with key "__name__" set to "__console__" and key
"__doc__" set to None.
"""
if locals is None:
locals = {"__name__": "__console__", "__doc__": None}
self.locals = locals
self.compile = CommandCompiler()
def runsource(self, source, filename="<input>", symbol="single"):
"""Compile and run some source in the interpreter.
Arguments are as for compile_command().
One of several things can happen:
1) The input is incorrect; compile_command() raised an
exception (SyntaxError or OverflowError). A syntax traceback
will be printed by calling the showsyntaxerror() method.
2) The input is incomplete, and more input is required;
compile_command() returned None. Nothing happens.
3) The input is complete; compile_command() returned a code
object. The code is executed by calling self.runcode() (which
also handles run-time exceptions, except for SystemExit).
The return value is True in case 2, False in the other cases (unless
an exception is raised). The return value can be used to
decide whether to use sys.ps1 or sys.ps2 to prompt the next
line.
"""
try:
code = self.compile(source, filename, symbol)
except (OverflowError, SyntaxError, ValueError):
# Case 1
self.showsyntaxerror(filename)
return False
if code is None:
# Case 2
return True
# Case 3
self.runcode(code)
return False
def runcode(self, code):
"""Execute a code object.
When an exception occurs, self.showtraceback() is called to
display a traceback. All exceptions are caught except
SystemExit, which is reraised.
A note about KeyboardInterrupt: this exception may occur
elsewhere in this code, and may not always be caught. The
caller should be prepared to deal with it.
"""
try:
is_async = False
if hasattr(inspect, 'CO_COROUTINE'):
is_async = inspect.CO_COROUTINE & code.co_flags == inspect.CO_COROUTINE
if is_async:
t = _EvalAwaitInNewEventLoop(code, self.locals, None)
t.start()
t.join()
if t.exc:
raise t.exc[1].with_traceback(t.exc[2])
else:
exec(code, self.locals)
except SystemExit:
raise
except:
self.showtraceback()
def showsyntaxerror(self, filename=None):
"""Display the syntax error that just occurred.
This doesn't display a stack trace because there isn't one.
If a filename is given, it is stuffed in the exception instead
of what was there before (because Python's parser always uses
"<string>" when reading from a string).
The output is written by self.write(), below.
"""
type, value, tb = sys.exc_info()
sys.last_type = type
sys.last_value = value
sys.last_traceback = tb
if filename and type is SyntaxError:
# Work hard to stuff the correct filename in the exception
try:
msg, (dummy_filename, lineno, offset, line) = value.args
except ValueError:
# Not the format we expect; leave it alone
pass
else:
# Stuff in the right filename
value = SyntaxError(msg, (filename, lineno, offset, line))
sys.last_value = value
if sys.excepthook is sys.__excepthook__:
lines = traceback.format_exception_only(type, value)
self.write(''.join(lines))
else:
# If someone has set sys.excepthook, we let that take precedence
# over self.write
sys.excepthook(type, value, tb)
def showtraceback(self):
"""Display the exception that just occurred.
We remove the first stack item because it is our own code.
The output is written by self.write(), below.
"""
sys.last_type, sys.last_value, last_tb = ei = sys.exc_info()
sys.last_traceback = last_tb
try:
lines = traceback.format_exception(ei[0], ei[1], last_tb.tb_next)
if sys.excepthook is sys.__excepthook__:
self.write(''.join(lines))
else:
# If someone has set sys.excepthook, we let that take precedence
# over self.write
sys.excepthook(ei[0], ei[1], last_tb)
finally:
last_tb = ei = None
def write(self, data):
"""Write a string.
The base implementation writes to sys.stderr; a subclass may
replace this with a different implementation.
"""
sys.stderr.write(data)
class InteractiveConsole(InteractiveInterpreter):
"""Closely emulate the behavior of the interactive Python interpreter.
This class builds on InteractiveInterpreter and adds prompting
using the familiar sys.ps1 and sys.ps2, and input buffering.
"""
def __init__(self, locals=None, filename="<console>"):
"""Constructor.
The optional locals argument will be passed to the
InteractiveInterpreter base class.
The optional filename argument should specify the (file)name
of the input stream; it will show up in tracebacks.
"""
InteractiveInterpreter.__init__(self, locals)
self.filename = filename
self.resetbuffer()
def resetbuffer(self):
"""Reset the input buffer."""
self.buffer = []
def interact(self, banner=None, exitmsg=None):
"""Closely emulate the interactive Python console.
The optional banner argument specifies the banner to print
before the first interaction; by default it prints a banner
similar to the one printed by the real Python interpreter,
followed by the current class name in parentheses (so as not
to confuse this with the real interpreter -- since it's so
close!).
The optional exitmsg argument specifies the exit message
printed when exiting. Pass the empty string to suppress
printing an exit message. If exitmsg is not given or None,
a default message is printed.
"""
try:
sys.ps1
except AttributeError:
sys.ps1 = ">>> "
try:
sys.ps2
except AttributeError:
sys.ps2 = "... "
cprt = 'Type "help", "copyright", "credits" or "license" for more information.'
if banner is None:
self.write("Python %s on %s\n%s\n(%s)\n" %
(sys.version, sys.platform, cprt,
self.__class__.__name__))
elif banner:
self.write("%s\n" % str(banner))
more = 0
while 1:
try:
if more:
prompt = sys.ps2
else:
prompt = sys.ps1
try:
line = self.raw_input(prompt)
except EOFError:
self.write("\n")
break
else:
more = self.push(line)
except KeyboardInterrupt:
self.write("\nKeyboardInterrupt\n")
self.resetbuffer()
more = 0
if exitmsg is None:
self.write('now exiting %s...\n' % self.__class__.__name__)
elif exitmsg != '':
self.write('%s\n' % exitmsg)
def push(self, line):
"""Push a line to the interpreter.
The line should not have a trailing newline; it may have
internal newlines. The line is appended to a buffer and the
interpreter's runsource() method is called with the
concatenated contents of the buffer as source. If this
indicates that the command was executed or invalid, the buffer
is reset; otherwise, the command is incomplete, and the buffer
is left as it was after the line was appended. The return
value is 1 if more input is required, 0 if the line was dealt
with in some way (this is the same as runsource()).
"""
self.buffer.append(line)
source = "\n".join(self.buffer)
more = self.runsource(source, self.filename)
if not more:
self.resetbuffer()
return more
def raw_input(self, prompt=""):
"""Write a prompt and read a line.
The returned line does not include the trailing newline.
When the user enters the EOF key sequence, EOFError is raised.
The base implementation uses the built-in function
input(); a subclass may replace this with a different
implementation.
"""
return input(prompt)
def interact(banner=None, readfunc=None, local=None, exitmsg=None):
"""Closely emulate the interactive Python interpreter.
This is a backwards compatible interface to the InteractiveConsole
class. When readfunc is not specified, it attempts to import the
readline module to enable GNU readline if it is available.
Arguments (all optional, all default to None):
banner -- passed to InteractiveConsole.interact()
readfunc -- if not None, replaces InteractiveConsole.raw_input()
local -- passed to InteractiveInterpreter.__init__()
exitmsg -- passed to InteractiveConsole.interact()
"""
console = InteractiveConsole(local)
if readfunc is not None:
console.raw_input = readfunc
else:
try:
import readline
except ImportError:
pass
console.interact(banner, exitmsg)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-q', action='store_true',
help="don't print version and copyright messages")
args = parser.parse_args()
if args.q or sys.flags.quiet:
banner = ''
else:
banner = None
interact(banner)

View File

@ -1,19 +0,0 @@
# Defines which version of the PyDBAdditionalThreadInfo we'll use.
from _pydevd_bundle.pydevd_constants import ENV_FALSE_LOWER_VALUES, USE_CYTHON_FLAG, \
ENV_TRUE_LOWER_VALUES
if USE_CYTHON_FLAG in ENV_TRUE_LOWER_VALUES:
# We must import the cython version if forcing cython
from _pydevd_bundle.pydevd_cython_wrapper import PyDBAdditionalThreadInfo, set_additional_thread_info, _set_additional_thread_info_lock # @UnusedImport
elif USE_CYTHON_FLAG in ENV_FALSE_LOWER_VALUES:
# Use the regular version if not forcing cython
from _pydevd_bundle.pydevd_additional_thread_info_regular import PyDBAdditionalThreadInfo, set_additional_thread_info, _set_additional_thread_info_lock # @UnusedImport @Reimport
else:
# Regular: use fallback if not found (message is already given elsewhere).
try:
from _pydevd_bundle.pydevd_cython_wrapper import PyDBAdditionalThreadInfo, set_additional_thread_info, _set_additional_thread_info_lock
except ImportError:
from _pydevd_bundle.pydevd_additional_thread_info_regular import PyDBAdditionalThreadInfo, set_additional_thread_info, _set_additional_thread_info_lock # @UnusedImport

View File

@ -1,153 +0,0 @@
from _pydevd_bundle.pydevd_constants import (STATE_RUN, PYTHON_SUSPEND, SUPPORT_GEVENT, ForkSafeLock,
_current_frames)
from _pydev_bundle import pydev_log
# IFDEF CYTHON
# pydev_log.debug("Using Cython speedups")
# ELSE
from _pydevd_bundle.pydevd_frame import PyDBFrame
# ENDIF
version = 11
#=======================================================================================================================
# PyDBAdditionalThreadInfo
#=======================================================================================================================
# IFDEF CYTHON
# cdef class PyDBAdditionalThreadInfo:
# ELSE
class PyDBAdditionalThreadInfo(object):
# ENDIF
# Note: the params in cython are declared in pydevd_cython.pxd.
# IFDEF CYTHON
# ELSE
__slots__ = [
'pydev_state',
'pydev_step_stop',
'pydev_original_step_cmd',
'pydev_step_cmd',
'pydev_notify_kill',
'pydev_django_resolve_frame',
'pydev_call_from_jinja2',
'pydev_call_inside_jinja2',
'is_tracing',
'conditional_breakpoint_exception',
'pydev_message',
'suspend_type',
'pydev_next_line',
'pydev_func_name',
'suspended_at_unhandled',
'trace_suspend_type',
'top_level_thread_tracer_no_back_frames',
'top_level_thread_tracer_unhandled',
'thread_tracer',
'step_in_initial_location',
# Used for CMD_SMART_STEP_INTO (to know which smart step into variant to use)
'pydev_smart_parent_offset',
'pydev_smart_child_offset',
# Used for CMD_SMART_STEP_INTO (list[_pydevd_bundle.pydevd_bytecode_utils.Variant])
# Filled when the cmd_get_smart_step_into_variants is requested (so, this is a copy
# of the last request for a given thread and pydev_smart_parent_offset/pydev_smart_child_offset relies on it).
'pydev_smart_step_into_variants',
'target_id_to_smart_step_into_variant',
'pydev_use_scoped_step_frame',
]
# ENDIF
def __init__(self):
self.pydev_state = STATE_RUN # STATE_RUN or STATE_SUSPEND
self.pydev_step_stop = None
# Note: we have `pydev_original_step_cmd` and `pydev_step_cmd` because the original is to
# say the action that started it and the other is to say what's the current tracing behavior
# (because it's possible that we start with a step over but may have to switch to a
# different step strategy -- for instance, if a step over is done and we return the current
# method the strategy is changed to a step in).
self.pydev_original_step_cmd = -1 # Something as CMD_STEP_INTO, CMD_STEP_OVER, etc.
self.pydev_step_cmd = -1 # Something as CMD_STEP_INTO, CMD_STEP_OVER, etc.
self.pydev_notify_kill = False
self.pydev_django_resolve_frame = False
self.pydev_call_from_jinja2 = None
self.pydev_call_inside_jinja2 = None
self.is_tracing = 0
self.conditional_breakpoint_exception = None
self.pydev_message = ''
self.suspend_type = PYTHON_SUSPEND
self.pydev_next_line = -1
self.pydev_func_name = '.invalid.' # Must match the type in cython
self.suspended_at_unhandled = False
self.trace_suspend_type = 'trace' # 'trace' or 'frame_eval'
self.top_level_thread_tracer_no_back_frames = []
self.top_level_thread_tracer_unhandled = None
self.thread_tracer = None
self.step_in_initial_location = None
self.pydev_smart_parent_offset = -1
self.pydev_smart_child_offset = -1
self.pydev_smart_step_into_variants = ()
self.target_id_to_smart_step_into_variant = {}
# Flag to indicate ipython use-case where each line will be executed as a call/line/return
# in a new new frame but in practice we want to consider each new frame as if it was all
# part of the same frame.
#
# In practice this means that a step over shouldn't revert to a step in and we need some
# special logic to know when we should stop in a step over as we need to consider 2
# different frames as being equal if they're logically the continuation of a frame
# being executed by ipython line by line.
#
# See: https://github.com/microsoft/debugpy/issues/869#issuecomment-1132141003
self.pydev_use_scoped_step_frame = False
def get_topmost_frame(self, thread):
'''
Gets the topmost frame for the given thread. Note that it may be None
and callers should remove the reference to the frame as soon as possible
to avoid disturbing user code.
'''
# sys._current_frames(): dictionary with thread id -> topmost frame
current_frames = _current_frames()
topmost_frame = current_frames.get(thread.ident)
if topmost_frame is None:
# Note: this is expected for dummy threads (so, getting the topmost frame should be
# treated as optional).
pydev_log.info(
'Unable to get topmost frame for thread: %s, thread.ident: %s, id(thread): %s\nCurrent frames: %s.\n'
'GEVENT_SUPPORT: %s',
thread,
thread.ident,
id(thread),
current_frames,
SUPPORT_GEVENT,
)
return topmost_frame
def __str__(self):
return 'State:%s Stop:%s Cmd: %s Kill:%s' % (
self.pydev_state, self.pydev_step_stop, self.pydev_step_cmd, self.pydev_notify_kill)
_set_additional_thread_info_lock = ForkSafeLock()
def set_additional_thread_info(thread):
try:
additional_info = thread.additional_info
if additional_info is None:
raise AttributeError()
except:
with _set_additional_thread_info_lock:
# If it's not there, set it within a lock to avoid any racing
# conditions.
additional_info = getattr(thread, 'additional_info', None)
if additional_info is None:
additional_info = PyDBAdditionalThreadInfo()
thread.additional_info = additional_info
return additional_info

View File

@ -1,184 +0,0 @@
from _pydev_bundle import pydev_log
from _pydevd_bundle import pydevd_import_class
from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame
from _pydev_bundle._pydev_saved_modules import threading
class ExceptionBreakpoint(object):
def __init__(
self,
qname,
condition,
expression,
notify_on_handled_exceptions,
notify_on_unhandled_exceptions,
notify_on_user_unhandled_exceptions,
notify_on_first_raise_only,
ignore_libraries
):
exctype = get_exception_class(qname)
self.qname = qname
if exctype is not None:
self.name = exctype.__name__
else:
self.name = None
self.condition = condition
self.expression = expression
self.notify_on_unhandled_exceptions = notify_on_unhandled_exceptions
self.notify_on_handled_exceptions = notify_on_handled_exceptions
self.notify_on_first_raise_only = notify_on_first_raise_only
self.notify_on_user_unhandled_exceptions = notify_on_user_unhandled_exceptions
self.ignore_libraries = ignore_libraries
self.type = exctype
def __str__(self):
return self.qname
@property
def has_condition(self):
return self.condition is not None
def handle_hit_condition(self, frame):
return False
class LineBreakpoint(object):
def __init__(self, breakpoint_id, line, condition, func_name, expression, suspend_policy="NONE", hit_condition=None, is_logpoint=False):
self.breakpoint_id = breakpoint_id
self.line = line
self.condition = condition
self.func_name = func_name
self.expression = expression
self.suspend_policy = suspend_policy
self.hit_condition = hit_condition
self._hit_count = 0
self._hit_condition_lock = threading.Lock()
self.is_logpoint = is_logpoint
@property
def has_condition(self):
return bool(self.condition) or bool(self.hit_condition)
def handle_hit_condition(self, frame):
if not self.hit_condition:
return False
ret = False
with self._hit_condition_lock:
self._hit_count += 1
expr = self.hit_condition.replace('@HIT@', str(self._hit_count))
try:
ret = bool(eval(expr, frame.f_globals, frame.f_locals))
except Exception:
ret = False
return ret
class FunctionBreakpoint(object):
def __init__(self, func_name, condition, expression, suspend_policy="NONE", hit_condition=None, is_logpoint=False):
self.condition = condition
self.func_name = func_name
self.expression = expression
self.suspend_policy = suspend_policy
self.hit_condition = hit_condition
self._hit_count = 0
self._hit_condition_lock = threading.Lock()
self.is_logpoint = is_logpoint
@property
def has_condition(self):
return bool(self.condition) or bool(self.hit_condition)
def handle_hit_condition(self, frame):
if not self.hit_condition:
return False
ret = False
with self._hit_condition_lock:
self._hit_count += 1
expr = self.hit_condition.replace('@HIT@', str(self._hit_count))
try:
ret = bool(eval(expr, frame.f_globals, frame.f_locals))
except Exception:
ret = False
return ret
def get_exception_breakpoint(exctype, exceptions):
if not exctype:
exception_full_qname = None
else:
exception_full_qname = str(exctype.__module__) + '.' + exctype.__name__
exc = None
if exceptions is not None:
try:
return exceptions[exception_full_qname]
except KeyError:
for exception_breakpoint in exceptions.values():
if exception_breakpoint.type is not None and issubclass(exctype, exception_breakpoint.type):
if exc is None or issubclass(exception_breakpoint.type, exc.type):
exc = exception_breakpoint
return exc
def stop_on_unhandled_exception(py_db, thread, additional_info, arg):
exctype, value, tb = arg
break_on_uncaught_exceptions = py_db.break_on_uncaught_exceptions
if break_on_uncaught_exceptions:
exception_breakpoint = py_db.get_exception_breakpoint(exctype, break_on_uncaught_exceptions)
else:
exception_breakpoint = None
if not exception_breakpoint:
return
if tb is None: # sometimes it can be None, e.g. with GTK
return
if exctype is KeyboardInterrupt:
return
if exctype is SystemExit and py_db.ignore_system_exit_code(value):
return
frames = []
user_frame = None
while tb is not None:
if not py_db.exclude_exception_by_filter(exception_breakpoint, tb):
user_frame = tb.tb_frame
frames.append(tb.tb_frame)
tb = tb.tb_next
if user_frame is None:
return
frames_byid = dict([(id(frame), frame) for frame in frames])
add_exception_to_frame(user_frame, arg)
if exception_breakpoint.condition is not None:
eval_result = py_db.handle_breakpoint_condition(additional_info, exception_breakpoint, user_frame)
if not eval_result:
return
if exception_breakpoint.expression is not None:
py_db.handle_breakpoint_expression(exception_breakpoint, additional_info, user_frame)
try:
additional_info.pydev_message = exception_breakpoint.qname
except:
additional_info.pydev_message = exception_breakpoint.qname.encode('utf-8')
pydev_log.debug('Handling post-mortem stop on exception breakpoint %s' % (exception_breakpoint.qname,))
py_db.do_stop_on_unhandled_exception(thread, user_frame, frames_byid, arg)
def get_exception_class(kls):
try:
return eval(kls)
except:
return pydevd_import_class.import_name(kls)

View File

@ -1,843 +0,0 @@
"""
Bytecode analysing utils. Originally added for using in smart step into.
Note: not importable from Python 2.
"""
from _pydev_bundle import pydev_log
from types import CodeType
from _pydevd_frame_eval.vendored.bytecode.instr import _Variable
from _pydevd_frame_eval.vendored import bytecode
from _pydevd_frame_eval.vendored.bytecode import cfg as bytecode_cfg
import dis
import opcode as _opcode
from _pydevd_bundle.pydevd_constants import KeyifyList, DebugInfoHolder, IS_PY311_OR_GREATER
from bisect import bisect
from collections import deque
# When True, throws errors on unknown bytecodes, when False, ignore those as if they didn't change the stack.
STRICT_MODE = False
DEBUG = False
_BINARY_OPS = set([opname for opname in dis.opname if opname.startswith('BINARY_')])
_BINARY_OP_MAP = {
'BINARY_POWER': '__pow__',
'BINARY_MULTIPLY': '__mul__',
'BINARY_MATRIX_MULTIPLY': '__matmul__',
'BINARY_FLOOR_DIVIDE': '__floordiv__',
'BINARY_TRUE_DIVIDE': '__div__',
'BINARY_MODULO': '__mod__',
'BINARY_ADD': '__add__',
'BINARY_SUBTRACT': '__sub__',
'BINARY_LSHIFT': '__lshift__',
'BINARY_RSHIFT': '__rshift__',
'BINARY_AND': '__and__',
'BINARY_OR': '__or__',
'BINARY_XOR': '__xor__',
'BINARY_SUBSCR': '__getitem__',
'BINARY_DIVIDE': '__div__'
}
_COMP_OP_MAP = {
'<': '__lt__',
'<=': '__le__',
'==': '__eq__',
'!=': '__ne__',
'>': '__gt__',
'>=': '__ge__',
'in': '__contains__',
'not in': '__contains__',
}
class Target(object):
__slots__ = ['arg', 'lineno', 'offset', 'children_targets']
def __init__(self, arg, lineno, offset, children_targets=()):
self.arg = arg
self.lineno = lineno
self.offset = offset
self.children_targets = children_targets
def __repr__(self):
ret = []
for s in self.__slots__:
ret.append('%s: %s' % (s, getattr(self, s)))
return 'Target(%s)' % ', '.join(ret)
__str__ = __repr__
class _TargetIdHashable(object):
def __init__(self, target):
self.target = target
def __eq__(self, other):
if not hasattr(other, 'target'):
return
return other.target is self.target
def __ne__(self, other):
return not self == other
def __hash__(self):
return id(self.target)
class _StackInterpreter(object):
'''
Good reference: https://github.com/python/cpython/blob/fcb55c0037baab6f98f91ee38ce84b6f874f034a/Python/ceval.c
'''
def __init__(self, bytecode):
self.bytecode = bytecode
self._stack = deque()
self.function_calls = []
self.load_attrs = {}
self.func = set()
self.func_name_id_to_code_object = {}
def __str__(self):
return 'Stack:\nFunction calls:\n%s\nLoad attrs:\n%s\n' % (self.function_calls, list(self.load_attrs.values()))
def _getname(self, instr):
if instr.opcode in _opcode.hascompare:
cmp_op = dis.cmp_op[instr.arg]
if cmp_op not in ('exception match', 'BAD'):
return _COMP_OP_MAP.get(cmp_op, cmp_op)
return instr.arg
def _getcallname(self, instr):
if instr.name == 'BINARY_SUBSCR':
return '__getitem__().__call__'
if instr.name == 'CALL_FUNCTION':
# Note: previously a '__call__().__call__' was returned, but this was a bit weird
# and on Python 3.9 this construct could appear for some internal things where
# it wouldn't be expected.
# Note: it'd be what we had in func()().
return None
if instr.name == 'MAKE_FUNCTION':
return '__func__().__call__'
if instr.name == 'LOAD_ASSERTION_ERROR':
return 'AssertionError'
name = self._getname(instr)
if isinstance(name, CodeType):
name = name.co_qualname # Note: only available for Python 3.11
if isinstance(name, _Variable):
name = name.name
if not isinstance(name, str):
return None
if name.endswith('>'): # xxx.<listcomp>, xxx.<lambda>, ...
return name.split('.')[-1]
return name
def _no_stack_change(self, instr):
pass # Can be aliased when the instruction does nothing.
def on_LOAD_GLOBAL(self, instr):
self._stack.append(instr)
def on_POP_TOP(self, instr):
try:
self._stack.pop()
except IndexError:
pass # Ok (in the end of blocks)
def on_LOAD_ATTR(self, instr):
self.on_POP_TOP(instr) # replaces the current top
self._stack.append(instr)
self.load_attrs[_TargetIdHashable(instr)] = Target(self._getname(instr), instr.lineno, instr.offset)
on_LOOKUP_METHOD = on_LOAD_ATTR # Improvement in PyPy
def on_LOAD_CONST(self, instr):
self._stack.append(instr)
on_LOAD_DEREF = on_LOAD_CONST
on_LOAD_NAME = on_LOAD_CONST
on_LOAD_CLOSURE = on_LOAD_CONST
on_LOAD_CLASSDEREF = on_LOAD_CONST
# Although it actually changes the stack, it's inconsequential for us as a function call can't
# really be found there.
on_IMPORT_NAME = _no_stack_change
on_IMPORT_FROM = _no_stack_change
on_IMPORT_STAR = _no_stack_change
on_SETUP_ANNOTATIONS = _no_stack_change
def on_STORE_FAST(self, instr):
try:
self._stack.pop()
except IndexError:
pass # Ok, we may have a block just with the store
# Note: it stores in the locals and doesn't put anything in the stack.
on_STORE_GLOBAL = on_STORE_FAST
on_STORE_DEREF = on_STORE_FAST
on_STORE_ATTR = on_STORE_FAST
on_STORE_NAME = on_STORE_FAST
on_DELETE_NAME = on_POP_TOP
on_DELETE_ATTR = on_POP_TOP
on_DELETE_GLOBAL = on_POP_TOP
on_DELETE_FAST = on_POP_TOP
on_DELETE_DEREF = on_POP_TOP
on_DICT_UPDATE = on_POP_TOP
on_SET_UPDATE = on_POP_TOP
on_GEN_START = on_POP_TOP
def on_NOP(self, instr):
pass
def _handle_call_from_instr(self, func_name_instr, func_call_instr):
self.load_attrs.pop(_TargetIdHashable(func_name_instr), None)
call_name = self._getcallname(func_name_instr)
target = None
if not call_name:
pass # Ignore if we can't identify a name
elif call_name in ('<listcomp>', '<genexpr>', '<setcomp>', '<dictcomp>'):
code_obj = self.func_name_id_to_code_object[_TargetIdHashable(func_name_instr)]
if code_obj is not None:
children_targets = _get_smart_step_into_targets(code_obj)
if children_targets:
# i.e.: we have targets inside of a <listcomp> or <genexpr>.
# Note that to actually match this in the debugger we need to do matches on 2 frames,
# the one with the <listcomp> and then the actual target inside the <listcomp>.
target = Target(call_name, func_name_instr.lineno, func_call_instr.offset, children_targets)
self.function_calls.append(
target)
else:
# Ok, regular call
target = Target(call_name, func_name_instr.lineno, func_call_instr.offset)
self.function_calls.append(target)
if DEBUG and target is not None:
print('Created target', target)
self._stack.append(func_call_instr) # Keep the func call as the result
def on_COMPARE_OP(self, instr):
try:
_right = self._stack.pop()
except IndexError:
return
try:
_left = self._stack.pop()
except IndexError:
return
cmp_op = dis.cmp_op[instr.arg]
if cmp_op not in ('exception match', 'BAD'):
self.function_calls.append(Target(self._getname(instr), instr.lineno, instr.offset))
self._stack.append(instr)
def on_IS_OP(self, instr):
try:
self._stack.pop()
except IndexError:
return
try:
self._stack.pop()
except IndexError:
return
def on_BINARY_SUBSCR(self, instr):
try:
_sub = self._stack.pop()
except IndexError:
return
try:
_container = self._stack.pop()
except IndexError:
return
self.function_calls.append(Target(_BINARY_OP_MAP[instr.name], instr.lineno, instr.offset))
self._stack.append(instr)
on_BINARY_MATRIX_MULTIPLY = on_BINARY_SUBSCR
on_BINARY_POWER = on_BINARY_SUBSCR
on_BINARY_MULTIPLY = on_BINARY_SUBSCR
on_BINARY_FLOOR_DIVIDE = on_BINARY_SUBSCR
on_BINARY_TRUE_DIVIDE = on_BINARY_SUBSCR
on_BINARY_MODULO = on_BINARY_SUBSCR
on_BINARY_ADD = on_BINARY_SUBSCR
on_BINARY_SUBTRACT = on_BINARY_SUBSCR
on_BINARY_LSHIFT = on_BINARY_SUBSCR
on_BINARY_RSHIFT = on_BINARY_SUBSCR
on_BINARY_AND = on_BINARY_SUBSCR
on_BINARY_OR = on_BINARY_SUBSCR
on_BINARY_XOR = on_BINARY_SUBSCR
def on_LOAD_METHOD(self, instr):
self.on_POP_TOP(instr) # Remove the previous as we're loading something from it.
self._stack.append(instr)
def on_MAKE_FUNCTION(self, instr):
if not IS_PY311_OR_GREATER:
# The qualifier name is no longer put in the stack.
qualname = self._stack.pop()
code_obj_instr = self._stack.pop()
else:
# In 3.11 the code object has a co_qualname which we can use.
qualname = code_obj_instr = self._stack.pop()
arg = instr.arg
if arg & 0x08:
_func_closure = self._stack.pop()
if arg & 0x04:
_func_annotations = self._stack.pop()
if arg & 0x02:
_func_kwdefaults = self._stack.pop()
if arg & 0x01:
_func_defaults = self._stack.pop()
call_name = self._getcallname(qualname)
if call_name in ('<genexpr>', '<listcomp>', '<setcomp>', '<dictcomp>'):
if isinstance(code_obj_instr.arg, CodeType):
self.func_name_id_to_code_object[_TargetIdHashable(qualname)] = code_obj_instr.arg
self._stack.append(qualname)
def on_LOAD_FAST(self, instr):
self._stack.append(instr)
def on_LOAD_ASSERTION_ERROR(self, instr):
self._stack.append(instr)
on_LOAD_BUILD_CLASS = on_LOAD_FAST
def on_CALL_METHOD(self, instr):
# pop the actual args
for _ in range(instr.arg):
self._stack.pop()
func_name_instr = self._stack.pop()
self._handle_call_from_instr(func_name_instr, instr)
def on_PUSH_NULL(self, instr):
self._stack.append(instr)
def on_CALL_FUNCTION(self, instr):
arg = instr.arg
argc = arg & 0xff # positional args
argc += ((arg >> 8) * 2) # keyword args
# pop the actual args
for _ in range(argc):
try:
self._stack.pop()
except IndexError:
return
try:
func_name_instr = self._stack.pop()
except IndexError:
return
self._handle_call_from_instr(func_name_instr, instr)
def on_CALL_FUNCTION_KW(self, instr):
# names of kw args
_names_of_kw_args = self._stack.pop()
# pop the actual args
arg = instr.arg
argc = arg & 0xff # positional args
argc += ((arg >> 8) * 2) # keyword args
for _ in range(argc):
self._stack.pop()
func_name_instr = self._stack.pop()
self._handle_call_from_instr(func_name_instr, instr)
def on_CALL_FUNCTION_VAR(self, instr):
# var name
_var_arg = self._stack.pop()
# pop the actual args
arg = instr.arg
argc = arg & 0xff # positional args
argc += ((arg >> 8) * 2) # keyword args
for _ in range(argc):
self._stack.pop()
func_name_instr = self._stack.pop()
self._handle_call_from_instr(func_name_instr, instr)
def on_CALL_FUNCTION_VAR_KW(self, instr):
# names of kw args
_names_of_kw_args = self._stack.pop()
arg = instr.arg
argc = arg & 0xff # positional args
argc += ((arg >> 8) * 2) # keyword args
# also pop **kwargs
self._stack.pop()
# pop the actual args
for _ in range(argc):
self._stack.pop()
func_name_instr = self._stack.pop()
self._handle_call_from_instr(func_name_instr, instr)
def on_CALL_FUNCTION_EX(self, instr):
if instr.arg & 0x01:
_kwargs = self._stack.pop()
_callargs = self._stack.pop()
func_name_instr = self._stack.pop()
self._handle_call_from_instr(func_name_instr, instr)
on_YIELD_VALUE = _no_stack_change
on_GET_AITER = _no_stack_change
on_GET_ANEXT = _no_stack_change
on_END_ASYNC_FOR = _no_stack_change
on_BEFORE_ASYNC_WITH = _no_stack_change
on_SETUP_ASYNC_WITH = _no_stack_change
on_YIELD_FROM = _no_stack_change
on_SETUP_LOOP = _no_stack_change
on_FOR_ITER = _no_stack_change
on_BREAK_LOOP = _no_stack_change
on_JUMP_ABSOLUTE = _no_stack_change
on_RERAISE = _no_stack_change
on_LIST_TO_TUPLE = _no_stack_change
on_CALL_FINALLY = _no_stack_change
on_POP_FINALLY = _no_stack_change
def on_JUMP_IF_FALSE_OR_POP(self, instr):
try:
self._stack.pop()
except IndexError:
return
on_JUMP_IF_TRUE_OR_POP = on_JUMP_IF_FALSE_OR_POP
def on_JUMP_IF_NOT_EXC_MATCH(self, instr):
try:
self._stack.pop()
except IndexError:
return
try:
self._stack.pop()
except IndexError:
return
def on_ROT_TWO(self, instr):
try:
p0 = self._stack.pop()
except IndexError:
return
try:
p1 = self._stack.pop()
except:
self._stack.append(p0)
return
self._stack.append(p0)
self._stack.append(p1)
def on_ROT_THREE(self, instr):
try:
p0 = self._stack.pop()
except IndexError:
return
try:
p1 = self._stack.pop()
except:
self._stack.append(p0)
return
try:
p2 = self._stack.pop()
except:
self._stack.append(p0)
self._stack.append(p1)
return
self._stack.append(p0)
self._stack.append(p1)
self._stack.append(p2)
def on_ROT_FOUR(self, instr):
try:
p0 = self._stack.pop()
except IndexError:
return
try:
p1 = self._stack.pop()
except:
self._stack.append(p0)
return
try:
p2 = self._stack.pop()
except:
self._stack.append(p0)
self._stack.append(p1)
return
try:
p3 = self._stack.pop()
except:
self._stack.append(p0)
self._stack.append(p1)
self._stack.append(p2)
return
self._stack.append(p0)
self._stack.append(p1)
self._stack.append(p2)
self._stack.append(p3)
def on_BUILD_LIST_FROM_ARG(self, instr):
self._stack.append(instr)
def on_BUILD_MAP(self, instr):
for _i in range(instr.arg):
self._stack.pop()
self._stack.pop()
self._stack.append(instr)
def on_BUILD_CONST_KEY_MAP(self, instr):
self.on_POP_TOP(instr) # keys
for _i in range(instr.arg):
self.on_POP_TOP(instr) # value
self._stack.append(instr)
on_RETURN_VALUE = on_POP_TOP
on_POP_JUMP_IF_FALSE = on_POP_TOP
on_POP_JUMP_IF_TRUE = on_POP_TOP
on_DICT_MERGE = on_POP_TOP
on_LIST_APPEND = on_POP_TOP
on_SET_ADD = on_POP_TOP
on_LIST_EXTEND = on_POP_TOP
on_UNPACK_EX = on_POP_TOP
# ok: doesn't change the stack (converts top to getiter(top))
on_GET_ITER = _no_stack_change
on_GET_AWAITABLE = _no_stack_change
on_GET_YIELD_FROM_ITER = _no_stack_change
def on_RETURN_GENERATOR(self, instr):
self._stack.append(instr)
on_RETURN_GENERATOR = _no_stack_change
on_RESUME = _no_stack_change
def on_MAP_ADD(self, instr):
self.on_POP_TOP(instr)
self.on_POP_TOP(instr)
def on_UNPACK_SEQUENCE(self, instr):
self._stack.pop()
for _i in range(instr.arg):
self._stack.append(instr)
def on_BUILD_LIST(self, instr):
for _i in range(instr.arg):
self.on_POP_TOP(instr)
self._stack.append(instr)
on_BUILD_TUPLE = on_BUILD_LIST
on_BUILD_STRING = on_BUILD_LIST
on_BUILD_TUPLE_UNPACK_WITH_CALL = on_BUILD_LIST
on_BUILD_TUPLE_UNPACK = on_BUILD_LIST
on_BUILD_LIST_UNPACK = on_BUILD_LIST
on_BUILD_MAP_UNPACK_WITH_CALL = on_BUILD_LIST
on_BUILD_MAP_UNPACK = on_BUILD_LIST
on_BUILD_SET = on_BUILD_LIST
on_BUILD_SET_UNPACK = on_BUILD_LIST
on_SETUP_FINALLY = _no_stack_change
on_POP_FINALLY = _no_stack_change
on_BEGIN_FINALLY = _no_stack_change
on_END_FINALLY = _no_stack_change
def on_RAISE_VARARGS(self, instr):
for _i in range(instr.arg):
self.on_POP_TOP(instr)
on_POP_BLOCK = _no_stack_change
on_JUMP_FORWARD = _no_stack_change
on_POP_EXCEPT = _no_stack_change
on_SETUP_EXCEPT = _no_stack_change
on_WITH_EXCEPT_START = _no_stack_change
on_END_FINALLY = _no_stack_change
on_BEGIN_FINALLY = _no_stack_change
on_SETUP_WITH = _no_stack_change
on_WITH_CLEANUP_START = _no_stack_change
on_WITH_CLEANUP_FINISH = _no_stack_change
on_FORMAT_VALUE = _no_stack_change
on_EXTENDED_ARG = _no_stack_change
def on_INPLACE_ADD(self, instr):
# This would actually pop 2 and leave the value in the stack.
# In a += 1 it pop `a` and `1` and leave the resulting value
# for a load. In our case, let's just pop the `1` and leave the `a`
# instead of leaving the INPLACE_ADD bytecode.
try:
self._stack.pop()
except IndexError:
pass
on_INPLACE_POWER = on_INPLACE_ADD
on_INPLACE_MULTIPLY = on_INPLACE_ADD
on_INPLACE_MATRIX_MULTIPLY = on_INPLACE_ADD
on_INPLACE_TRUE_DIVIDE = on_INPLACE_ADD
on_INPLACE_FLOOR_DIVIDE = on_INPLACE_ADD
on_INPLACE_MODULO = on_INPLACE_ADD
on_INPLACE_SUBTRACT = on_INPLACE_ADD
on_INPLACE_RSHIFT = on_INPLACE_ADD
on_INPLACE_LSHIFT = on_INPLACE_ADD
on_INPLACE_AND = on_INPLACE_ADD
on_INPLACE_OR = on_INPLACE_ADD
on_INPLACE_XOR = on_INPLACE_ADD
def on_DUP_TOP(self, instr):
try:
i = self._stack[-1]
except IndexError:
# ok (in the start of block)
self._stack.append(instr)
else:
self._stack.append(i)
def on_DUP_TOP_TWO(self, instr):
if len(self._stack) == 0:
self._stack.append(instr)
return
if len(self._stack) == 1:
i = self._stack[-1]
self._stack.append(i)
self._stack.append(instr)
return
i = self._stack[-1]
j = self._stack[-2]
self._stack.append(j)
self._stack.append(i)
def on_BUILD_SLICE(self, instr):
for _ in range(instr.arg):
try:
self._stack.pop()
except IndexError:
pass
self._stack.append(instr)
def on_STORE_SUBSCR(self, instr):
try:
self._stack.pop()
self._stack.pop()
self._stack.pop()
except IndexError:
pass
def on_DELETE_SUBSCR(self, instr):
try:
self._stack.pop()
self._stack.pop()
except IndexError:
pass
# Note: on Python 3 this is only found on interactive mode to print the results of
# some evaluation.
on_PRINT_EXPR = on_POP_TOP
on_UNARY_POSITIVE = _no_stack_change
on_UNARY_NEGATIVE = _no_stack_change
on_UNARY_NOT = _no_stack_change
on_UNARY_INVERT = _no_stack_change
on_CACHE = _no_stack_change
on_PRECALL = _no_stack_change
def _get_smart_step_into_targets(code):
'''
:return list(Target)
'''
b = bytecode.Bytecode.from_code(code)
cfg = bytecode_cfg.ControlFlowGraph.from_bytecode(b)
ret = []
for block in cfg:
if DEBUG:
print('\nStart block----')
stack = _StackInterpreter(block)
for instr in block:
try:
func_name = 'on_%s' % (instr.name,)
func = getattr(stack, func_name, None)
if DEBUG:
if instr.name != 'CACHE': # Filter the ones we don't want to see.
print('\nWill handle: ', instr, '>>', stack._getname(instr), '<<')
print('Current stack:')
for entry in stack._stack:
print(' arg:', stack._getname(entry), '(', entry, ')')
if func is None:
if STRICT_MODE:
raise AssertionError('%s not found.' % (func_name,))
else:
continue
func(instr)
except:
if STRICT_MODE:
raise # Error in strict mode.
else:
# In non-strict mode, log it (if in verbose mode) and keep on going.
if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 2:
pydev_log.exception('Exception computing step into targets (handled).')
ret.extend(stack.function_calls)
# No longer considering attr loads as calls (while in theory sometimes it's possible
# that something as `some.attr` can turn out to be a property which could be stepped
# in, it's not that common in practice and can be surprising for users, so, disabling
# step into from stepping into properties).
# ret.extend(stack.load_attrs.values())
return ret
# Note that the offset is unique within the frame (so, we can use it as the target id).
# Also, as the offset is the instruction offset within the frame, it's possible to
# to inspect the parent frame for frame.f_lasti to know where we actually are (as the
# caller name may not always match the new frame name).
class Variant(object):
__slots__ = ['name', 'is_visited', 'line', 'offset', 'call_order', 'children_variants', 'parent']
def __init__(self, name, is_visited, line, offset, call_order, children_variants=None):
self.name = name
self.is_visited = is_visited
self.line = line
self.offset = offset
self.call_order = call_order
self.children_variants = children_variants
self.parent = None
if children_variants:
for variant in children_variants:
variant.parent = self
def __repr__(self):
ret = []
for s in self.__slots__:
if s == 'parent':
try:
parent = self.parent
except AttributeError:
ret.append('%s: <not set>' % (s,))
else:
if parent is None:
ret.append('parent: None')
else:
ret.append('parent: %s (%s)' % (parent.name, parent.offset))
continue
if s == 'children_variants':
ret.append('children_variants: %s' % (len(self.children_variants) if self.children_variants else 0))
continue
try:
ret.append('%s: %s' % (s, getattr(self, s)))
except AttributeError:
ret.append('%s: <not set>' % (s,))
return 'Variant(%s)' % ', '.join(ret)
__str__ = __repr__
def _convert_target_to_variant(target, start_line, end_line, call_order_cache, lasti, base):
name = target.arg
if not isinstance(name, str):
return
if target.lineno > end_line:
return
if target.lineno < start_line:
return
call_order = call_order_cache.get(name, 0) + 1
call_order_cache[name] = call_order
is_visited = target.offset <= lasti
children_targets = target.children_targets
children_variants = None
if children_targets:
children_variants = [
_convert_target_to_variant(child, start_line, end_line, call_order_cache, lasti, base)
for child in target.children_targets]
return Variant(name, is_visited, target.lineno - base, target.offset, call_order, children_variants)
def calculate_smart_step_into_variants(frame, start_line, end_line, base=0):
"""
Calculate smart step into variants for the given line range.
:param frame:
:type frame: :py:class:`types.FrameType`
:param start_line:
:param end_line:
:return: A list of call names from the first to the last.
:note: it's guaranteed that the offsets appear in order.
:raise: :py:class:`RuntimeError` if failed to parse the bytecode or if dis cannot be used.
"""
variants = []
code = frame.f_code
lasti = frame.f_lasti
call_order_cache = {}
if DEBUG:
print('dis.dis:')
if IS_PY311_OR_GREATER:
dis.dis(code, show_caches=False)
else:
dis.dis(code)
for target in _get_smart_step_into_targets(code):
variant = _convert_target_to_variant(target, start_line, end_line, call_order_cache, lasti, base)
if variant is None:
continue
variants.append(variant)
return variants
def get_smart_step_into_variant_from_frame_offset(frame_f_lasti, variants):
"""
Given the frame.f_lasti, return the related `Variant`.
:note: if the offset is found before any variant available or no variants are
available, None is returned.
:rtype: Variant|NoneType
"""
if not variants:
return None
i = bisect(KeyifyList(variants, lambda entry:entry.offset), frame_f_lasti)
if i == 0:
return None
else:
return variants[i - 1]

View File

@ -1,611 +0,0 @@
"""
Decompiler that can be used with the debugger (where statements correctly represent the
line numbers).
Note: this is a work in progress / proof of concept / not ready to be used.
"""
import dis
from _pydevd_bundle.pydevd_collect_bytecode_info import iter_instructions
from _pydev_bundle import pydev_log
import sys
import inspect
from io import StringIO
class _Stack(object):
def __init__(self):
self._contents = []
def push(self, obj):
# print('push', obj)
self._contents.append(obj)
def pop(self):
return self._contents.pop(-1)
INDENT_MARKER = object()
DEDENT_MARKER = object()
_SENTINEL = object()
DEBUG = False
class _Token(object):
def __init__(self, i_line, instruction=None, tok=_SENTINEL, priority=0, after=None, end_of_line=False):
'''
:param i_line:
:param instruction:
:param tok:
:param priority:
:param after:
:param end_of_line:
Marker to signal only after all the other tokens have been written.
'''
self.i_line = i_line
if tok is not _SENTINEL:
self.tok = tok
else:
if instruction is not None:
if inspect.iscode(instruction.argval):
self.tok = ''
else:
self.tok = str(instruction.argval)
else:
raise AssertionError('Either the tok or the instruction is needed.')
self.instruction = instruction
self.priority = priority
self.end_of_line = end_of_line
self._after_tokens = set()
self._after_handler_tokens = set()
if after:
self.mark_after(after)
def mark_after(self, v):
if isinstance(v, _Token):
self._after_tokens.add(v)
elif isinstance(v, _BaseHandler):
self._after_handler_tokens.add(v)
else:
raise AssertionError('Unhandled: %s' % (v,))
def get_after_tokens(self):
ret = self._after_tokens.copy()
for handler in self._after_handler_tokens:
ret.update(handler.tokens)
return ret
def __repr__(self):
return 'Token(%s, after: %s)' % (self.tok, self.get_after_tokens())
__str__ = __repr__
class _Writer(object):
def __init__(self):
self.line_to_contents = {}
self.all_tokens = set()
def get_line(self, line):
lst = self.line_to_contents.get(line)
if lst is None:
lst = self.line_to_contents[line] = []
return lst
def indent(self, line):
self.get_line(line).append(INDENT_MARKER)
def dedent(self, line):
self.get_line(line).append(DEDENT_MARKER)
def write(self, line, token):
if token in self.all_tokens:
return
self.all_tokens.add(token)
assert isinstance(token, _Token)
lst = self.get_line(line)
lst.append(token)
class _BaseHandler(object):
def __init__(self, i_line, instruction, stack, writer, disassembler):
self.i_line = i_line
self.instruction = instruction
self.stack = stack
self.writer = writer
self.disassembler = disassembler
self.tokens = []
self._handle()
def _write_tokens(self):
for token in self.tokens:
self.writer.write(token.i_line, token)
def _handle(self):
raise NotImplementedError(self)
def __repr__(self, *args, **kwargs):
try:
return "%s line:%s" % (self.instruction, self.i_line)
except:
return object.__repr__(self)
__str__ = __repr__
_op_name_to_handler = {}
def _register(cls):
_op_name_to_handler[cls.opname] = cls
return cls
class _BasePushHandler(_BaseHandler):
def _handle(self):
self.stack.push(self)
class _BaseLoadHandler(_BasePushHandler):
def _handle(self):
_BasePushHandler._handle(self)
self.tokens = [_Token(self.i_line, self.instruction)]
@_register
class _LoadBuildClass(_BasePushHandler):
opname = "LOAD_BUILD_CLASS"
@_register
class _LoadConst(_BaseLoadHandler):
opname = "LOAD_CONST"
@_register
class _LoadName(_BaseLoadHandler):
opname = "LOAD_NAME"
@_register
class _LoadGlobal(_BaseLoadHandler):
opname = "LOAD_GLOBAL"
@_register
class _LoadFast(_BaseLoadHandler):
opname = "LOAD_FAST"
@_register
class _GetIter(_BaseHandler):
'''
Implements TOS = iter(TOS).
'''
opname = "GET_ITER"
iter_target = None
def _handle(self):
self.iter_target = self.stack.pop()
self.tokens.extend(self.iter_target.tokens)
self.stack.push(self)
@_register
class _ForIter(_BaseHandler):
'''
TOS is an iterator. Call its __next__() method. If this yields a new value, push it on the stack
(leaving the iterator below it). If the iterator indicates it is exhausted TOS is popped, and
the byte code counter is incremented by delta.
'''
opname = "FOR_ITER"
iter_in = None
def _handle(self):
self.iter_in = self.stack.pop()
self.stack.push(self)
def store_in_name(self, store_name):
for_token = _Token(self.i_line, None, 'for ')
self.tokens.append(for_token)
prev = for_token
t_name = _Token(store_name.i_line, store_name.instruction, after=prev)
self.tokens.append(t_name)
prev = t_name
in_token = _Token(store_name.i_line, None, ' in ', after=prev)
self.tokens.append(in_token)
prev = in_token
max_line = store_name.i_line
if self.iter_in:
for t in self.iter_in.tokens:
t.mark_after(prev)
max_line = max(max_line, t.i_line)
prev = t
self.tokens.extend(self.iter_in.tokens)
colon_token = _Token(self.i_line, None, ':', after=prev)
self.tokens.append(colon_token)
prev = for_token
self._write_tokens()
@_register
class _StoreName(_BaseHandler):
'''
Implements name = TOS. namei is the index of name in the attribute co_names of the code object.
The compiler tries to use STORE_FAST or STORE_GLOBAL if possible.
'''
opname = "STORE_NAME"
def _handle(self):
v = self.stack.pop()
if isinstance(v, _ForIter):
v.store_in_name(self)
else:
if not isinstance(v, _MakeFunction) or v.is_lambda:
line = self.i_line
for t in v.tokens:
line = min(line, t.i_line)
t_name = _Token(line, self.instruction)
t_equal = _Token(line, None, '=', after=t_name)
self.tokens.append(t_name)
self.tokens.append(t_equal)
for t in v.tokens:
t.mark_after(t_equal)
self.tokens.extend(v.tokens)
self._write_tokens()
@_register
class _ReturnValue(_BaseHandler):
"""
Returns with TOS to the caller of the function.
"""
opname = "RETURN_VALUE"
def _handle(self):
v = self.stack.pop()
return_token = _Token(self.i_line, None, 'return ', end_of_line=True)
self.tokens.append(return_token)
for token in v.tokens:
token.mark_after(return_token)
self.tokens.extend(v.tokens)
self._write_tokens()
@_register
class _CallFunction(_BaseHandler):
"""
CALL_FUNCTION(argc)
Calls a callable object with positional arguments. argc indicates the number of positional
arguments. The top of the stack contains positional arguments, with the right-most argument
on top. Below the arguments is a callable object to call. CALL_FUNCTION pops all arguments
and the callable object off the stack, calls the callable object with those arguments, and
pushes the return value returned by the callable object.
Changed in version 3.6: This opcode is used only for calls with positional arguments.
"""
opname = "CALL_FUNCTION"
def _handle(self):
args = []
for _i in range(self.instruction.argval + 1):
arg = self.stack.pop()
args.append(arg)
it = reversed(args)
name = next(it)
max_line = name.i_line
for t in name.tokens:
self.tokens.append(t)
tok_open_parens = _Token(name.i_line, None, '(', after=name)
self.tokens.append(tok_open_parens)
prev = tok_open_parens
for i, arg in enumerate(it):
for t in arg.tokens:
t.mark_after(name)
t.mark_after(prev)
max_line = max(max_line, t.i_line)
self.tokens.append(t)
prev = arg
if i > 0:
comma_token = _Token(prev.i_line, None, ',', after=prev)
self.tokens.append(comma_token)
prev = comma_token
tok_close_parens = _Token(max_line, None, ')', after=prev)
self.tokens.append(tok_close_parens)
self._write_tokens()
self.stack.push(self)
@_register
class _MakeFunctionPy3(_BaseHandler):
"""
Pushes a new function object on the stack. From bottom to top, the consumed stack must consist
of values if the argument carries a specified flag value
0x01 a tuple of default values for positional-only and positional-or-keyword parameters in positional order
0x02 a dictionary of keyword-only parameters' default values
0x04 an annotation dictionary
0x08 a tuple containing cells for free variables, making a closure
the code associated with the function (at TOS1)
the qualified name of the function (at TOS)
"""
opname = "MAKE_FUNCTION"
is_lambda = False
def _handle(self):
stack = self.stack
self.qualified_name = stack.pop()
self.code = stack.pop()
default_node = None
if self.instruction.argval & 0x01:
default_node = stack.pop()
is_lambda = self.is_lambda = '<lambda>' in [x.tok for x in self.qualified_name.tokens]
if not is_lambda:
def_token = _Token(self.i_line, None, 'def ')
self.tokens.append(def_token)
for token in self.qualified_name.tokens:
self.tokens.append(token)
if not is_lambda:
token.mark_after(def_token)
prev = token
open_parens_token = _Token(self.i_line, None, '(', after=prev)
self.tokens.append(open_parens_token)
prev = open_parens_token
code = self.code.instruction.argval
if default_node:
defaults = ([_SENTINEL] * (len(code.co_varnames) - len(default_node.instruction.argval))) + list(default_node.instruction.argval)
else:
defaults = [_SENTINEL] * len(code.co_varnames)
for i, arg in enumerate(code.co_varnames):
if i > 0:
comma_token = _Token(prev.i_line, None, ', ', after=prev)
self.tokens.append(comma_token)
prev = comma_token
arg_token = _Token(self.i_line, None, arg, after=prev)
self.tokens.append(arg_token)
default = defaults[i]
if default is not _SENTINEL:
eq_token = _Token(default_node.i_line, None, '=', after=prev)
self.tokens.append(eq_token)
prev = eq_token
default_token = _Token(default_node.i_line, None, str(default), after=prev)
self.tokens.append(default_token)
prev = default_token
tok_close_parens = _Token(prev.i_line, None, '):', after=prev)
self.tokens.append(tok_close_parens)
self._write_tokens()
stack.push(self)
self.writer.indent(prev.i_line + 1)
self.writer.dedent(max(self.disassembler.merge_code(code)))
_MakeFunction = _MakeFunctionPy3
def _print_after_info(line_contents, stream=None):
if stream is None:
stream = sys.stdout
for token in line_contents:
after_tokens = token.get_after_tokens()
if after_tokens:
s = '%s after: %s\n' % (
repr(token.tok),
('"' + '", "'.join(t.tok for t in token.get_after_tokens()) + '"'))
stream.write(s)
else:
stream.write('%s (NO REQUISITES)' % repr(token.tok))
def _compose_line_contents(line_contents, previous_line_tokens):
lst = []
handled = set()
add_to_end_of_line = []
delete_indexes = []
for i, token in enumerate(line_contents):
if token.end_of_line:
add_to_end_of_line.append(token)
delete_indexes.append(i)
for i in reversed(delete_indexes):
del line_contents[i]
del delete_indexes
while line_contents:
added = False
delete_indexes = []
for i, token in enumerate(line_contents):
after_tokens = token.get_after_tokens()
for after in after_tokens:
if after not in handled and after not in previous_line_tokens:
break
else:
added = True
previous_line_tokens.add(token)
handled.add(token)
lst.append(token.tok)
delete_indexes.append(i)
for i in reversed(delete_indexes):
del line_contents[i]
if not added:
if add_to_end_of_line:
line_contents.extend(add_to_end_of_line)
del add_to_end_of_line[:]
continue
# Something is off, let's just add as is.
for token in line_contents:
if token not in handled:
lst.append(token.tok)
stream = StringIO()
_print_after_info(line_contents, stream)
pydev_log.critical('Error. After markers are not correct:\n%s', stream.getvalue())
break
return ''.join(lst)
class _PyCodeToSource(object):
def __init__(self, co, memo=None):
if memo is None:
memo = {}
self.memo = memo
self.co = co
self.instructions = list(iter_instructions(co))
self.stack = _Stack()
self.writer = _Writer()
def _process_next(self, i_line):
instruction = self.instructions.pop(0)
handler_class = _op_name_to_handler.get(instruction.opname)
if handler_class is not None:
s = handler_class(i_line, instruction, self.stack, self.writer, self)
if DEBUG:
print(s)
else:
if DEBUG:
print("UNHANDLED", instruction)
def build_line_to_contents(self):
co = self.co
op_offset_to_line = dict(dis.findlinestarts(co))
curr_line_index = 0
instructions = self.instructions
while instructions:
instruction = instructions[0]
new_line_index = op_offset_to_line.get(instruction.offset)
if new_line_index is not None:
if new_line_index is not None:
curr_line_index = new_line_index
self._process_next(curr_line_index)
return self.writer.line_to_contents
def merge_code(self, code):
if DEBUG:
print('merge code ----')
# for d in dir(code):
# if not d.startswith('_'):
# print(d, getattr(code, d))
line_to_contents = _PyCodeToSource(code, self.memo).build_line_to_contents()
lines = []
for line, contents in sorted(line_to_contents.items()):
lines.append(line)
self.writer.get_line(line).extend(contents)
if DEBUG:
print('end merge code ----')
return lines
def disassemble(self):
show_lines = False
line_to_contents = self.build_line_to_contents()
stream = StringIO()
last_line = 0
indent = ''
previous_line_tokens = set()
for i_line, contents in sorted(line_to_contents.items()):
while last_line < i_line - 1:
if show_lines:
stream.write(u"%s.\n" % (last_line + 1,))
else:
stream.write(u"\n")
last_line += 1
line_contents = []
dedents_found = 0
for part in contents:
if part is INDENT_MARKER:
if DEBUG:
print('found indent', i_line)
indent += ' '
continue
if part is DEDENT_MARKER:
if DEBUG:
print('found dedent', i_line)
dedents_found += 1
continue
line_contents.append(part)
s = indent + _compose_line_contents(line_contents, previous_line_tokens)
if show_lines:
stream.write(u"%s. %s\n" % (i_line, s))
else:
stream.write(u"%s\n" % s)
if dedents_found:
indent = indent[:-(4 * dedents_found)]
last_line = i_line
return stream.getvalue()
def code_obj_to_source(co):
"""
Converts a code object to source code to provide a suitable representation for the compiler when
the actual source code is not found.
This is a work in progress / proof of concept / not ready to be used.
"""
ret = _PyCodeToSource(co).disassemble()
if DEBUG:
print(ret)
return ret

View File

@ -1,925 +0,0 @@
import dis
import inspect
import sys
from collections import namedtuple
from _pydev_bundle import pydev_log
from opcode import (EXTENDED_ARG, HAVE_ARGUMENT, cmp_op, hascompare, hasconst,
hasfree, hasjrel, haslocal, hasname, opname)
from io import StringIO
class TryExceptInfo(object):
def __init__(self, try_line, ignore=False):
'''
:param try_line:
:param ignore:
Usually we should ignore any block that's not a try..except
(this can happen for finally blocks, with statements, etc, for
which we create temporary entries).
'''
self.try_line = try_line
self.ignore = ignore
self.except_line = -1
self.except_end_line = -1
self.raise_lines_in_except = []
# Note: these may not be available if generated from source instead of bytecode.
self.except_bytecode_offset = -1
self.except_end_bytecode_offset = -1
def is_line_in_try_block(self, line):
return self.try_line <= line < self.except_line
def is_line_in_except_block(self, line):
return self.except_line <= line <= self.except_end_line
def __str__(self):
lst = [
'{try:',
str(self.try_line),
' except ',
str(self.except_line),
' end block ',
str(self.except_end_line),
]
if self.raise_lines_in_except:
lst.append(' raises: %s' % (', '.join(str(x) for x in self.raise_lines_in_except),))
lst.append('}')
return ''.join(lst)
__repr__ = __str__
class ReturnInfo(object):
def __init__(self, return_line):
self.return_line = return_line
def __str__(self):
return '{return: %s}' % (self.return_line,)
__repr__ = __str__
def _get_line(op_offset_to_line, op_offset, firstlineno, search=False):
op_offset_original = op_offset
while op_offset >= 0:
ret = op_offset_to_line.get(op_offset)
if ret is not None:
return ret - firstlineno
if not search:
return ret
else:
op_offset -= 1
raise AssertionError('Unable to find line for offset: %s.Info: %s' % (
op_offset_original, op_offset_to_line))
def debug(s):
pass
_Instruction = namedtuple('_Instruction', 'opname, opcode, starts_line, argval, is_jump_target, offset, argrepr')
def _iter_as_bytecode_as_instructions_py2(co):
code = co.co_code
op_offset_to_line = dict(dis.findlinestarts(co))
labels = set(dis.findlabels(code))
bytecode_len = len(code)
i = 0
extended_arg = 0
free = None
op_to_name = opname
while i < bytecode_len:
c = code[i]
op = ord(c)
is_jump_target = i in labels
curr_op_name = op_to_name[op]
initial_bytecode_offset = i
i = i + 1
if op < HAVE_ARGUMENT:
yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), None, is_jump_target, initial_bytecode_offset, '')
else:
oparg = ord(code[i]) + ord(code[i + 1]) * 256 + extended_arg
extended_arg = 0
i = i + 2
if op == EXTENDED_ARG:
extended_arg = oparg * 65536
if op in hasconst:
yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), co.co_consts[oparg], is_jump_target, initial_bytecode_offset, repr(co.co_consts[oparg]))
elif op in hasname:
yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), co.co_names[oparg], is_jump_target, initial_bytecode_offset, str(co.co_names[oparg]))
elif op in hasjrel:
argval = i + oparg
yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), argval, is_jump_target, initial_bytecode_offset, "to " + repr(argval))
elif op in haslocal:
yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), co.co_varnames[oparg], is_jump_target, initial_bytecode_offset, str(co.co_varnames[oparg]))
elif op in hascompare:
yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), cmp_op[oparg], is_jump_target, initial_bytecode_offset, cmp_op[oparg])
elif op in hasfree:
if free is None:
free = co.co_cellvars + co.co_freevars
yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), free[oparg], is_jump_target, initial_bytecode_offset, str(free[oparg]))
else:
yield _Instruction(curr_op_name, op, _get_line(op_offset_to_line, initial_bytecode_offset, 0), oparg, is_jump_target, initial_bytecode_offset, str(oparg))
def iter_instructions(co):
if sys.version_info[0] < 3:
iter_in = _iter_as_bytecode_as_instructions_py2(co)
else:
iter_in = dis.Bytecode(co)
iter_in = list(iter_in)
bytecode_to_instruction = {}
for instruction in iter_in:
bytecode_to_instruction[instruction.offset] = instruction
if iter_in:
for instruction in iter_in:
yield instruction
def collect_return_info(co, use_func_first_line=False):
if not hasattr(co, 'co_lines') and not hasattr(co, 'co_lnotab'):
return []
if use_func_first_line:
firstlineno = co.co_firstlineno
else:
firstlineno = 0
lst = []
op_offset_to_line = dict(dis.findlinestarts(co))
for instruction in iter_instructions(co):
curr_op_name = instruction.opname
if curr_op_name == 'RETURN_VALUE':
lst.append(ReturnInfo(_get_line(op_offset_to_line, instruction.offset, firstlineno, search=True)))
return lst
if sys.version_info[:2] <= (3, 9):
class _TargetInfo(object):
def __init__(self, except_end_instruction, jump_if_not_exc_instruction=None):
self.except_end_instruction = except_end_instruction
self.jump_if_not_exc_instruction = jump_if_not_exc_instruction
def __str__(self):
msg = ['_TargetInfo(']
msg.append(self.except_end_instruction.opname)
if self.jump_if_not_exc_instruction:
msg.append(' - ')
msg.append(self.jump_if_not_exc_instruction.opname)
msg.append('(')
msg.append(str(self.jump_if_not_exc_instruction.argval))
msg.append(')')
msg.append(')')
return ''.join(msg)
def _get_except_target_info(instructions, exception_end_instruction_index, offset_to_instruction_idx):
next_3 = [j_instruction.opname for j_instruction in instructions[exception_end_instruction_index:exception_end_instruction_index + 3]]
# print('next_3:', [(j_instruction.opname, j_instruction.argval) for j_instruction in instructions[exception_end_instruction_index:exception_end_instruction_index + 3]])
if next_3 == ['POP_TOP', 'POP_TOP', 'POP_TOP']: # try..except without checking exception.
try:
jump_instruction = instructions[exception_end_instruction_index - 1]
if jump_instruction.opname not in ('JUMP_FORWARD', 'JUMP_ABSOLUTE'):
return None
except IndexError:
pass
if jump_instruction.opname == 'JUMP_ABSOLUTE':
# On latest versions of Python 3 the interpreter has a go-backwards step,
# used to show the initial line of a for/while, etc (which is this
# JUMP_ABSOLUTE)... we're not really interested in it, but rather on where
# it points to.
except_end_instruction = instructions[offset_to_instruction_idx[jump_instruction.argval]]
idx = offset_to_instruction_idx[except_end_instruction.argval]
# Search for the POP_EXCEPT which should be at the end of the block.
for pop_except_instruction in reversed(instructions[:idx]):
if pop_except_instruction.opname == 'POP_EXCEPT':
except_end_instruction = pop_except_instruction
return _TargetInfo(except_end_instruction)
else:
return None # i.e.: Continue outer loop
else:
# JUMP_FORWARD
i = offset_to_instruction_idx[jump_instruction.argval]
try:
# i.e.: the jump is to the instruction after the block finishes (so, we need to
# get the previous instruction as that should be the place where the exception
# block finishes).
except_end_instruction = instructions[i - 1]
except:
pydev_log.critical('Error when computing try..except block end.')
return None
return _TargetInfo(except_end_instruction)
elif next_3 and next_3[0] == 'DUP_TOP': # try..except AssertionError.
iter_in = instructions[exception_end_instruction_index + 1:]
for j, jump_if_not_exc_instruction in enumerate(iter_in):
if jump_if_not_exc_instruction.opname == 'JUMP_IF_NOT_EXC_MATCH':
# Python 3.9
except_end_instruction = instructions[offset_to_instruction_idx[jump_if_not_exc_instruction.argval]]
return _TargetInfo(except_end_instruction, jump_if_not_exc_instruction)
elif jump_if_not_exc_instruction.opname == 'COMPARE_OP' and jump_if_not_exc_instruction.argval == 'exception match':
# Python 3.8 and before
try:
next_instruction = iter_in[j + 1]
except:
continue
if next_instruction.opname == 'POP_JUMP_IF_FALSE':
except_end_instruction = instructions[offset_to_instruction_idx[next_instruction.argval]]
return _TargetInfo(except_end_instruction, next_instruction)
else:
return None # i.e.: Continue outer loop
else:
# i.e.: we're not interested in try..finally statements, only try..except.
return None
def collect_try_except_info(co, use_func_first_line=False):
# We no longer have 'END_FINALLY', so, we need to do things differently in Python 3.9
if not hasattr(co, 'co_lines') and not hasattr(co, 'co_lnotab'):
return []
if use_func_first_line:
firstlineno = co.co_firstlineno
else:
firstlineno = 0
try_except_info_lst = []
op_offset_to_line = dict(dis.findlinestarts(co))
offset_to_instruction_idx = {}
instructions = list(iter_instructions(co))
for i, instruction in enumerate(instructions):
offset_to_instruction_idx[instruction.offset] = i
for i, instruction in enumerate(instructions):
curr_op_name = instruction.opname
if curr_op_name in ('SETUP_FINALLY', 'SETUP_EXCEPT'): # SETUP_EXCEPT before Python 3.8, SETUP_FINALLY Python 3.8 onwards.
exception_end_instruction_index = offset_to_instruction_idx[instruction.argval]
jump_instruction = instructions[exception_end_instruction_index - 1]
if jump_instruction.opname not in ('JUMP_FORWARD', 'JUMP_ABSOLUTE'):
continue
except_end_instruction = None
indexes_checked = set()
indexes_checked.add(exception_end_instruction_index)
target_info = _get_except_target_info(instructions, exception_end_instruction_index, offset_to_instruction_idx)
while target_info is not None:
# Handle a try..except..except..except.
jump_instruction = target_info.jump_if_not_exc_instruction
except_end_instruction = target_info.except_end_instruction
if jump_instruction is not None:
check_index = offset_to_instruction_idx[jump_instruction.argval]
if check_index in indexes_checked:
break
indexes_checked.add(check_index)
target_info = _get_except_target_info(instructions, check_index, offset_to_instruction_idx)
else:
break
if except_end_instruction is not None:
try_except_info = TryExceptInfo(
_get_line(op_offset_to_line, instruction.offset, firstlineno, search=True),
ignore=False
)
try_except_info.except_bytecode_offset = instruction.argval
try_except_info.except_line = _get_line(
op_offset_to_line,
try_except_info.except_bytecode_offset,
firstlineno,
search=True
)
try_except_info.except_end_bytecode_offset = except_end_instruction.offset
try_except_info.except_end_line = _get_line(op_offset_to_line, except_end_instruction.offset, firstlineno, search=True)
try_except_info_lst.append(try_except_info)
for raise_instruction in instructions[i:offset_to_instruction_idx[try_except_info.except_end_bytecode_offset]]:
if raise_instruction.opname == 'RAISE_VARARGS':
if raise_instruction.argval == 0:
try_except_info.raise_lines_in_except.append(
_get_line(op_offset_to_line, raise_instruction.offset, firstlineno, search=True))
return try_except_info_lst
elif sys.version_info[:2] == (3, 10):
class _TargetInfo(object):
def __init__(self, except_end_instruction, jump_if_not_exc_instruction=None):
self.except_end_instruction = except_end_instruction
self.jump_if_not_exc_instruction = jump_if_not_exc_instruction
def __str__(self):
msg = ['_TargetInfo(']
msg.append(self.except_end_instruction.opname)
if self.jump_if_not_exc_instruction:
msg.append(' - ')
msg.append(self.jump_if_not_exc_instruction.opname)
msg.append('(')
msg.append(str(self.jump_if_not_exc_instruction.argval))
msg.append(')')
msg.append(')')
return ''.join(msg)
def _get_except_target_info(instructions, exception_end_instruction_index, offset_to_instruction_idx):
next_3 = [j_instruction.opname for j_instruction in instructions[exception_end_instruction_index:exception_end_instruction_index + 3]]
# print('next_3:', [(j_instruction.opname, j_instruction.argval) for j_instruction in instructions[exception_end_instruction_index:exception_end_instruction_index + 3]])
if next_3 == ['POP_TOP', 'POP_TOP', 'POP_TOP']: # try..except without checking exception.
# Previously there was a jump which was able to point where the exception would end. This
# is no longer true, now a bare except doesn't really have any indication in the bytecode
# where the end would be expected if the exception wasn't raised, so, we just blindly
# search for a POP_EXCEPT from the current position.
for pop_except_instruction in instructions[exception_end_instruction_index + 3:]:
if pop_except_instruction.opname == 'POP_EXCEPT':
except_end_instruction = pop_except_instruction
return _TargetInfo(except_end_instruction)
elif next_3 and next_3[0] == 'DUP_TOP': # try..except AssertionError.
iter_in = instructions[exception_end_instruction_index + 1:]
for jump_if_not_exc_instruction in iter_in:
if jump_if_not_exc_instruction.opname == 'JUMP_IF_NOT_EXC_MATCH':
# Python 3.9
except_end_instruction = instructions[offset_to_instruction_idx[jump_if_not_exc_instruction.argval]]
return _TargetInfo(except_end_instruction, jump_if_not_exc_instruction)
else:
return None # i.e.: Continue outer loop
else:
# i.e.: we're not interested in try..finally statements, only try..except.
return None
def collect_try_except_info(co, use_func_first_line=False):
# We no longer have 'END_FINALLY', so, we need to do things differently in Python 3.9
if not hasattr(co, 'co_lines') and not hasattr(co, 'co_lnotab'):
return []
if use_func_first_line:
firstlineno = co.co_firstlineno
else:
firstlineno = 0
try_except_info_lst = []
op_offset_to_line = dict(dis.findlinestarts(co))
offset_to_instruction_idx = {}
instructions = list(iter_instructions(co))
for i, instruction in enumerate(instructions):
offset_to_instruction_idx[instruction.offset] = i
for i, instruction in enumerate(instructions):
curr_op_name = instruction.opname
if curr_op_name == 'SETUP_FINALLY':
exception_end_instruction_index = offset_to_instruction_idx[instruction.argval]
jump_instruction = instructions[exception_end_instruction_index]
if jump_instruction.opname != 'DUP_TOP':
continue
except_end_instruction = None
indexes_checked = set()
indexes_checked.add(exception_end_instruction_index)
target_info = _get_except_target_info(instructions, exception_end_instruction_index, offset_to_instruction_idx)
while target_info is not None:
# Handle a try..except..except..except.
jump_instruction = target_info.jump_if_not_exc_instruction
except_end_instruction = target_info.except_end_instruction
if jump_instruction is not None:
check_index = offset_to_instruction_idx[jump_instruction.argval]
if check_index in indexes_checked:
break
indexes_checked.add(check_index)
target_info = _get_except_target_info(instructions, check_index, offset_to_instruction_idx)
else:
break
if except_end_instruction is not None:
try_except_info = TryExceptInfo(
_get_line(op_offset_to_line, instruction.offset, firstlineno, search=True),
ignore=False
)
try_except_info.except_bytecode_offset = instruction.argval
try_except_info.except_line = _get_line(
op_offset_to_line,
try_except_info.except_bytecode_offset,
firstlineno,
search=True
)
try_except_info.except_end_bytecode_offset = except_end_instruction.offset
# On Python 3.10 the final line of the except end isn't really correct, rather,
# it's engineered to be the same line of the except and not the end line of the
# block, so, the approach taken is to search for the biggest line between the
# except and the end instruction
except_end_line = -1
start_i = offset_to_instruction_idx[try_except_info.except_bytecode_offset]
end_i = offset_to_instruction_idx[except_end_instruction.offset]
for instruction in instructions[start_i: end_i + 1]:
found_at_line = op_offset_to_line.get(instruction.offset)
if found_at_line is not None and found_at_line > except_end_line:
except_end_line = found_at_line
try_except_info.except_end_line = except_end_line - firstlineno
try_except_info_lst.append(try_except_info)
for raise_instruction in instructions[i:offset_to_instruction_idx[try_except_info.except_end_bytecode_offset]]:
if raise_instruction.opname == 'RAISE_VARARGS':
if raise_instruction.argval == 0:
try_except_info.raise_lines_in_except.append(
_get_line(op_offset_to_line, raise_instruction.offset, firstlineno, search=True))
return try_except_info_lst
elif sys.version_info[:2] >= (3, 11):
def collect_try_except_info(co, use_func_first_line=False):
'''
Note: if the filename is available and we can get the source,
`collect_try_except_info_from_source` is preferred (this is kept as
a fallback for cases where sources aren't available).
'''
return []
import ast as ast_module
class _Visitor(ast_module.NodeVisitor):
def __init__(self):
self.try_except_infos = []
self._stack = []
self._in_except_stack = []
self.max_line = -1
def generic_visit(self, node):
if hasattr(node, 'lineno'):
if node.lineno > self.max_line:
self.max_line = node.lineno
return ast_module.NodeVisitor.generic_visit(self, node)
def visit_Try(self, node):
info = TryExceptInfo(node.lineno, ignore=True)
self._stack.append(info)
self.generic_visit(node)
assert info is self._stack.pop()
if not info.ignore:
self.try_except_infos.insert(0, info)
if sys.version_info[0] < 3:
visit_TryExcept = visit_Try
def visit_ExceptHandler(self, node):
info = self._stack[-1]
info.ignore = False
if info.except_line == -1:
info.except_line = node.lineno
self._in_except_stack.append(info)
self.generic_visit(node)
if hasattr(node, 'end_lineno'):
info.except_end_line = node.end_lineno
else:
info.except_end_line = self.max_line
self._in_except_stack.pop()
if sys.version_info[0] >= 3:
def visit_Raise(self, node):
for info in self._in_except_stack:
if node.exc is None:
info.raise_lines_in_except.append(node.lineno)
self.generic_visit(node)
else:
def visit_Raise(self, node):
for info in self._in_except_stack:
if node.type is None and node.tback is None:
info.raise_lines_in_except.append(node.lineno)
self.generic_visit(node)
def collect_try_except_info_from_source(filename):
with open(filename, 'rb') as stream:
contents = stream.read()
return collect_try_except_info_from_contents(contents, filename)
def collect_try_except_info_from_contents(contents, filename='<unknown>'):
ast = ast_module.parse(contents, filename)
visitor = _Visitor()
visitor.visit(ast)
return visitor.try_except_infos
RESTART_FROM_LOOKAHEAD = object()
SEPARATOR = object()
class _MsgPart(object):
def __init__(self, line, tok):
assert line >= 0
self.line = line
self.tok = tok
@classmethod
def add_to_line_to_contents(cls, obj, line_to_contents, line=None):
if isinstance(obj, (list, tuple)):
for o in obj:
cls.add_to_line_to_contents(o, line_to_contents, line=line)
return
if isinstance(obj, str):
assert line is not None
line = int(line)
lst = line_to_contents.setdefault(line, [])
lst.append(obj)
return
if isinstance(obj, _MsgPart):
if isinstance(obj.tok, (list, tuple)):
cls.add_to_line_to_contents(obj.tok, line_to_contents, line=obj.line)
return
if isinstance(obj.tok, str):
lst = line_to_contents.setdefault(obj.line, [])
lst.append(obj.tok)
return
raise AssertionError("Unhandled: %" % (obj,))
class _Disassembler(object):
def __init__(self, co, firstlineno, level=0):
self.co = co
self.firstlineno = firstlineno
self.level = level
self.instructions = list(iter_instructions(co))
op_offset_to_line = self.op_offset_to_line = dict(dis.findlinestarts(co))
# Update offsets so that all offsets have the line index (and update it based on
# the passed firstlineno).
line_index = co.co_firstlineno - firstlineno
for instruction in self.instructions:
new_line_index = op_offset_to_line.get(instruction.offset)
if new_line_index is not None:
line_index = new_line_index - firstlineno
op_offset_to_line[instruction.offset] = line_index
else:
op_offset_to_line[instruction.offset] = line_index
BIG_LINE_INT = 9999999
SMALL_LINE_INT = -1
def min_line(self, *args):
m = self.BIG_LINE_INT
for arg in args:
if isinstance(arg, (list, tuple)):
m = min(m, self.min_line(*arg))
elif isinstance(arg, _MsgPart):
m = min(m, arg.line)
elif hasattr(arg, 'offset'):
m = min(m, self.op_offset_to_line[arg.offset])
return m
def max_line(self, *args):
m = self.SMALL_LINE_INT
for arg in args:
if isinstance(arg, (list, tuple)):
m = max(m, self.max_line(*arg))
elif isinstance(arg, _MsgPart):
m = max(m, arg.line)
elif hasattr(arg, 'offset'):
m = max(m, self.op_offset_to_line[arg.offset])
return m
def _lookahead(self):
'''
This handles and converts some common constructs from bytecode to actual source code.
It may change the list of instructions.
'''
msg = self._create_msg_part
found = []
fullrepr = None
# Collect all the load instructions
for next_instruction in self.instructions:
if next_instruction.opname in ('LOAD_GLOBAL', 'LOAD_FAST', 'LOAD_CONST', 'LOAD_NAME'):
found.append(next_instruction)
else:
break
if not found:
return None
if next_instruction.opname == 'LOAD_ATTR':
prev_instruction = found[-1]
# Remove the current LOAD_ATTR
assert self.instructions.pop(len(found)) is next_instruction
# Add the LOAD_ATTR to the previous LOAD
self.instructions[len(found) - 1] = _Instruction(
prev_instruction.opname,
prev_instruction.opcode,
prev_instruction.starts_line,
prev_instruction.argval,
False, # prev_instruction.is_jump_target,
prev_instruction.offset,
(
msg(prev_instruction),
msg(prev_instruction, '.'),
msg(next_instruction)
),
)
return RESTART_FROM_LOOKAHEAD
if next_instruction.opname in ('CALL_FUNCTION', 'PRECALL'):
if len(found) == next_instruction.argval + 1:
force_restart = False
delta = 0
else:
force_restart = True
if len(found) > next_instruction.argval + 1:
delta = len(found) - (next_instruction.argval + 1)
else:
return None # This is odd
del_upto = delta + next_instruction.argval + 2 # +2 = NAME / CALL_FUNCTION
if next_instruction.opname == 'PRECALL':
del_upto += 1 # Also remove the CALL right after the PRECALL.
del self.instructions[delta:del_upto]
found = iter(found[delta:])
call_func = next(found)
args = list(found)
fullrepr = [
msg(call_func),
msg(call_func, '('),
]
prev = call_func
for i, arg in enumerate(args):
if i > 0:
fullrepr.append(msg(prev, ', '))
prev = arg
fullrepr.append(msg(arg))
fullrepr.append(msg(prev, ')'))
if force_restart:
self.instructions.insert(delta, _Instruction(
call_func.opname,
call_func.opcode,
call_func.starts_line,
call_func.argval,
False, # call_func.is_jump_target,
call_func.offset,
tuple(fullrepr),
))
return RESTART_FROM_LOOKAHEAD
elif next_instruction.opname == 'BUILD_TUPLE':
if len(found) == next_instruction.argval:
force_restart = False
delta = 0
else:
force_restart = True
if len(found) > next_instruction.argval:
delta = len(found) - (next_instruction.argval)
else:
return None # This is odd
del self.instructions[delta:delta + next_instruction.argval + 1] # +1 = BUILD_TUPLE
found = iter(found[delta:])
args = [instruction for instruction in found]
if args:
first_instruction = args[0]
else:
first_instruction = next_instruction
prev = first_instruction
fullrepr = []
fullrepr.append(msg(prev, '('))
for i, arg in enumerate(args):
if i > 0:
fullrepr.append(msg(prev, ', '))
prev = arg
fullrepr.append(msg(arg))
fullrepr.append(msg(prev, ')'))
if force_restart:
self.instructions.insert(delta, _Instruction(
first_instruction.opname,
first_instruction.opcode,
first_instruction.starts_line,
first_instruction.argval,
False, # first_instruction.is_jump_target,
first_instruction.offset,
tuple(fullrepr),
))
return RESTART_FROM_LOOKAHEAD
if fullrepr is not None and self.instructions:
if self.instructions[0].opname == 'POP_TOP':
self.instructions.pop(0)
if self.instructions[0].opname in ('STORE_FAST', 'STORE_NAME'):
next_instruction = self.instructions.pop(0)
return msg(next_instruction), msg(next_instruction, ' = '), fullrepr
if self.instructions[0].opname == 'RETURN_VALUE':
next_instruction = self.instructions.pop(0)
return msg(next_instruction, 'return ', line=self.min_line(next_instruction, fullrepr)), fullrepr
return fullrepr
def _decorate_jump_target(self, instruction, instruction_repr):
if instruction.is_jump_target:
return ('|', str(instruction.offset), '|', instruction_repr)
return instruction_repr
def _create_msg_part(self, instruction, tok=None, line=None):
dec = self._decorate_jump_target
if line is None or line in (self.BIG_LINE_INT, self.SMALL_LINE_INT):
line = self.op_offset_to_line[instruction.offset]
argrepr = instruction.argrepr
if isinstance(argrepr, str) and argrepr.startswith('NULL + '):
argrepr = argrepr[7:]
return _MsgPart(
line, tok if tok is not None else dec(instruction, argrepr))
def _next_instruction_to_str(self, line_to_contents):
# indent = ''
# if self.level > 0:
# indent += ' ' * self.level
# print(indent, 'handle', self.instructions[0])
if self.instructions:
ret = self._lookahead()
if ret:
return ret
msg = self._create_msg_part
instruction = self.instructions.pop(0)
if instruction.opname in 'RESUME':
return None
if instruction.opname in ('LOAD_GLOBAL', 'LOAD_FAST', 'LOAD_CONST', 'LOAD_NAME'):
next_instruction = self.instructions[0]
if next_instruction.opname in ('STORE_FAST', 'STORE_NAME'):
self.instructions.pop(0)
return (
msg(next_instruction),
msg(next_instruction, ' = '),
msg(instruction))
if next_instruction.opname == 'RETURN_VALUE':
self.instructions.pop(0)
return (msg(instruction, 'return ', line=self.min_line(instruction)), msg(instruction))
if next_instruction.opname == 'RAISE_VARARGS' and next_instruction.argval == 1:
self.instructions.pop(0)
return (msg(instruction, 'raise ', line=self.min_line(instruction)), msg(instruction))
if instruction.opname == 'LOAD_CONST':
if inspect.iscode(instruction.argval):
code_line_to_contents = _Disassembler(
instruction.argval, self.firstlineno, self.level + 1
).build_line_to_contents()
for contents in code_line_to_contents.values():
contents.insert(0, ' ')
for line, contents in code_line_to_contents.items():
line_to_contents.setdefault(line, []).extend(contents)
return msg(instruction, 'LOAD_CONST(code)')
if instruction.opname == 'RAISE_VARARGS':
if instruction.argval == 0:
return msg(instruction, 'raise')
if instruction.opname == 'SETUP_FINALLY':
return msg(instruction, ('try(', instruction.argrepr, '):'))
if instruction.argrepr:
return msg(instruction, (instruction.opname, '(', instruction.argrepr, ')'))
if instruction.argval:
return msg(instruction, '%s{%s}' % (instruction.opname, instruction.argval,))
return msg(instruction, instruction.opname)
def build_line_to_contents(self):
# print('----')
# for instruction in self.instructions:
# print(instruction)
# print('----\n\n')
line_to_contents = {}
instructions = self.instructions
while instructions:
s = self._next_instruction_to_str(line_to_contents)
if s is RESTART_FROM_LOOKAHEAD:
continue
if s is None:
continue
_MsgPart.add_to_line_to_contents(s, line_to_contents)
m = self.max_line(s)
if m != self.SMALL_LINE_INT:
line_to_contents.setdefault(m, []).append(SEPARATOR)
return line_to_contents
def disassemble(self):
line_to_contents = self.build_line_to_contents()
stream = StringIO()
last_line = 0
show_lines = False
for line, contents in sorted(line_to_contents.items()):
while last_line < line - 1:
if show_lines:
stream.write('%s.\n' % (last_line + 1,))
else:
stream.write('\n')
last_line += 1
if show_lines:
stream.write('%s. ' % (line,))
for i, content in enumerate(contents):
if content == SEPARATOR:
if i != len(contents) - 1:
stream.write(', ')
else:
stream.write(content)
stream.write('\n')
last_line = line
return stream.getvalue()
def code_to_bytecode_representation(co, use_func_first_line=False):
'''
A simple disassemble of bytecode.
It does not attempt to provide the full Python source code, rather, it provides a low-level
representation of the bytecode, respecting the lines (so, its target is making the bytecode
easier to grasp and not providing the original source code).
Note that it does show jump locations/targets and converts some common bytecode constructs to
Python code to make it a bit easier to understand.
'''
# Reference for bytecodes:
# https://docs.python.org/3/library/dis.html
if use_func_first_line:
firstlineno = co.co_firstlineno
else:
firstlineno = 0
return _Disassembler(co, firstlineno).disassemble()

View File

@ -1,208 +0,0 @@
CMD_RUN = 101
CMD_LIST_THREADS = 102
CMD_THREAD_CREATE = 103
CMD_THREAD_KILL = 104
CMD_THREAD_SUSPEND = 105
CMD_THREAD_RUN = 106
CMD_STEP_INTO = 107
CMD_STEP_OVER = 108
CMD_STEP_RETURN = 109
CMD_GET_VARIABLE = 110
CMD_SET_BREAK = 111
CMD_REMOVE_BREAK = 112
CMD_EVALUATE_EXPRESSION = 113
CMD_GET_FRAME = 114
CMD_EXEC_EXPRESSION = 115
CMD_WRITE_TO_CONSOLE = 116
CMD_CHANGE_VARIABLE = 117
CMD_RUN_TO_LINE = 118
CMD_RELOAD_CODE = 119
CMD_GET_COMPLETIONS = 120
# Note: renumbered (conflicted on merge)
CMD_CONSOLE_EXEC = 121
CMD_ADD_EXCEPTION_BREAK = 122
CMD_REMOVE_EXCEPTION_BREAK = 123
CMD_LOAD_SOURCE = 124
CMD_ADD_DJANGO_EXCEPTION_BREAK = 125
CMD_REMOVE_DJANGO_EXCEPTION_BREAK = 126
CMD_SET_NEXT_STATEMENT = 127
CMD_SMART_STEP_INTO = 128
CMD_EXIT = 129
CMD_SIGNATURE_CALL_TRACE = 130
CMD_SET_PY_EXCEPTION = 131
CMD_GET_FILE_CONTENTS = 132
CMD_SET_PROPERTY_TRACE = 133
# Pydev debug console commands
CMD_EVALUATE_CONSOLE_EXPRESSION = 134
CMD_RUN_CUSTOM_OPERATION = 135
CMD_GET_BREAKPOINT_EXCEPTION = 136
CMD_STEP_CAUGHT_EXCEPTION = 137
CMD_SEND_CURR_EXCEPTION_TRACE = 138
CMD_SEND_CURR_EXCEPTION_TRACE_PROCEEDED = 139
CMD_IGNORE_THROWN_EXCEPTION_AT = 140
CMD_ENABLE_DONT_TRACE = 141
CMD_SHOW_CONSOLE = 142
CMD_GET_ARRAY = 143
CMD_STEP_INTO_MY_CODE = 144
CMD_GET_CONCURRENCY_EVENT = 145
CMD_SHOW_RETURN_VALUES = 146
CMD_INPUT_REQUESTED = 147
CMD_GET_DESCRIPTION = 148
CMD_PROCESS_CREATED = 149
CMD_SHOW_CYTHON_WARNING = 150
CMD_LOAD_FULL_VALUE = 151
CMD_GET_THREAD_STACK = 152
# This is mostly for unit-tests to diagnose errors on ci.
CMD_THREAD_DUMP_TO_STDERR = 153
# Sent from the client to signal that we should stop when we start executing user code.
CMD_STOP_ON_START = 154
# When the debugger is stopped in an exception, this command will provide the details of the current exception (in the current thread).
CMD_GET_EXCEPTION_DETAILS = 155
# Allows configuring pydevd settings (can be called multiple times and only keys
# available in the json will be configured -- keys not passed will not change the
# previous configuration).
CMD_PYDEVD_JSON_CONFIG = 156
CMD_THREAD_SUSPEND_SINGLE_NOTIFICATION = 157
CMD_THREAD_RESUME_SINGLE_NOTIFICATION = 158
CMD_STEP_OVER_MY_CODE = 159
CMD_STEP_RETURN_MY_CODE = 160
CMD_SET_PY_EXCEPTION_JSON = 161
CMD_SET_PATH_MAPPING_JSON = 162
CMD_GET_SMART_STEP_INTO_VARIANTS = 163 # XXX: PyCharm has 160 for this (we're currently incompatible anyways).
CMD_REDIRECT_OUTPUT = 200
CMD_GET_NEXT_STATEMENT_TARGETS = 201
CMD_SET_PROJECT_ROOTS = 202
CMD_MODULE_EVENT = 203
CMD_PROCESS_EVENT = 204
CMD_AUTHENTICATE = 205
CMD_STEP_INTO_COROUTINE = 206
CMD_LOAD_SOURCE_FROM_FRAME_ID = 207
CMD_SET_FUNCTION_BREAK = 208
CMD_VERSION = 501
CMD_RETURN = 502
CMD_SET_PROTOCOL = 503
CMD_ERROR = 901
# this number can be changed if there's need to do so
# if the io is too big, we'll not send all (could make the debugger too non-responsive)
MAX_IO_MSG_SIZE = 10000
VERSION_STRING = "@@BUILD_NUMBER@@"
from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding
file_system_encoding = getfilesystemencoding()
filesystem_encoding_is_utf8 = file_system_encoding.lower() in ('utf-8', 'utf_8', 'utf8')
ID_TO_MEANING = {
'101': 'CMD_RUN',
'102': 'CMD_LIST_THREADS',
'103': 'CMD_THREAD_CREATE',
'104': 'CMD_THREAD_KILL',
'105': 'CMD_THREAD_SUSPEND',
'106': 'CMD_THREAD_RUN',
'107': 'CMD_STEP_INTO',
'108': 'CMD_STEP_OVER',
'109': 'CMD_STEP_RETURN',
'110': 'CMD_GET_VARIABLE',
'111': 'CMD_SET_BREAK',
'112': 'CMD_REMOVE_BREAK',
'113': 'CMD_EVALUATE_EXPRESSION',
'114': 'CMD_GET_FRAME',
'115': 'CMD_EXEC_EXPRESSION',
'116': 'CMD_WRITE_TO_CONSOLE',
'117': 'CMD_CHANGE_VARIABLE',
'118': 'CMD_RUN_TO_LINE',
'119': 'CMD_RELOAD_CODE',
'120': 'CMD_GET_COMPLETIONS',
'121': 'CMD_CONSOLE_EXEC',
'122': 'CMD_ADD_EXCEPTION_BREAK',
'123': 'CMD_REMOVE_EXCEPTION_BREAK',
'124': 'CMD_LOAD_SOURCE',
'125': 'CMD_ADD_DJANGO_EXCEPTION_BREAK',
'126': 'CMD_REMOVE_DJANGO_EXCEPTION_BREAK',
'127': 'CMD_SET_NEXT_STATEMENT',
'128': 'CMD_SMART_STEP_INTO',
'129': 'CMD_EXIT',
'130': 'CMD_SIGNATURE_CALL_TRACE',
'131': 'CMD_SET_PY_EXCEPTION',
'132': 'CMD_GET_FILE_CONTENTS',
'133': 'CMD_SET_PROPERTY_TRACE',
'134': 'CMD_EVALUATE_CONSOLE_EXPRESSION',
'135': 'CMD_RUN_CUSTOM_OPERATION',
'136': 'CMD_GET_BREAKPOINT_EXCEPTION',
'137': 'CMD_STEP_CAUGHT_EXCEPTION',
'138': 'CMD_SEND_CURR_EXCEPTION_TRACE',
'139': 'CMD_SEND_CURR_EXCEPTION_TRACE_PROCEEDED',
'140': 'CMD_IGNORE_THROWN_EXCEPTION_AT',
'141': 'CMD_ENABLE_DONT_TRACE',
'142': 'CMD_SHOW_CONSOLE',
'143': 'CMD_GET_ARRAY',
'144': 'CMD_STEP_INTO_MY_CODE',
'145': 'CMD_GET_CONCURRENCY_EVENT',
'146': 'CMD_SHOW_RETURN_VALUES',
'147': 'CMD_INPUT_REQUESTED',
'148': 'CMD_GET_DESCRIPTION',
'149': 'CMD_PROCESS_CREATED', # Note: this is actually a notification of a sub-process created.
'150': 'CMD_SHOW_CYTHON_WARNING',
'151': 'CMD_LOAD_FULL_VALUE',
'152': 'CMD_GET_THREAD_STACK',
'153': 'CMD_THREAD_DUMP_TO_STDERR',
'154': 'CMD_STOP_ON_START',
'155': 'CMD_GET_EXCEPTION_DETAILS',
'156': 'CMD_PYDEVD_JSON_CONFIG',
'157': 'CMD_THREAD_SUSPEND_SINGLE_NOTIFICATION',
'158': 'CMD_THREAD_RESUME_SINGLE_NOTIFICATION',
'159': 'CMD_STEP_OVER_MY_CODE',
'160': 'CMD_STEP_RETURN_MY_CODE',
'161': 'CMD_SET_PY_EXCEPTION_JSON',
'162': 'CMD_SET_PATH_MAPPING_JSON',
'163': 'CMD_GET_SMART_STEP_INTO_VARIANTS',
'200': 'CMD_REDIRECT_OUTPUT',
'201': 'CMD_GET_NEXT_STATEMENT_TARGETS',
'202': 'CMD_SET_PROJECT_ROOTS',
'203': 'CMD_MODULE_EVENT',
'204': 'CMD_PROCESS_EVENT', # DAP process event.
'205': 'CMD_AUTHENTICATE',
'206': 'CMD_STEP_INTO_COROUTINE',
'207': 'CMD_LOAD_SOURCE_FROM_FRAME_ID',
'501': 'CMD_VERSION',
'502': 'CMD_RETURN',
'503': 'CMD_SET_PROTOCOL',
'901': 'CMD_ERROR',
}
def constant_to_str(constant):
s = ID_TO_MEANING.get(str(constant))
if not s:
s = '<Unknown: %s>' % (constant,)
return s

View File

@ -1,185 +0,0 @@
import os
import sys
class ArgHandlerWithParam:
'''
Handler for some arguments which needs a value
'''
def __init__(self, arg_name, convert_val=None, default_val=None):
self.arg_name = arg_name
self.arg_v_rep = '--%s' % (arg_name,)
self.convert_val = convert_val
self.default_val = default_val
def to_argv(self, lst, setup):
v = setup.get(self.arg_name)
if v is not None and v != self.default_val:
lst.append(self.arg_v_rep)
lst.append('%s' % (v,))
def handle_argv(self, argv, i, setup):
assert argv[i] == self.arg_v_rep
del argv[i]
val = argv[i]
if self.convert_val:
val = self.convert_val(val)
setup[self.arg_name] = val
del argv[i]
class ArgHandlerBool:
'''
If a given flag is received, mark it as 'True' in setup.
'''
def __init__(self, arg_name, default_val=False):
self.arg_name = arg_name
self.arg_v_rep = '--%s' % (arg_name,)
self.default_val = default_val
def to_argv(self, lst, setup):
v = setup.get(self.arg_name)
if v:
lst.append(self.arg_v_rep)
def handle_argv(self, argv, i, setup):
assert argv[i] == self.arg_v_rep
del argv[i]
setup[self.arg_name] = True
def convert_ppid(ppid):
ret = int(ppid)
if ret != 0:
if ret == os.getpid():
raise AssertionError(
'ppid passed is the same as the current process pid (%s)!' % (ret,))
return ret
ACCEPTED_ARG_HANDLERS = [
ArgHandlerWithParam('port', int, 0),
ArgHandlerWithParam('ppid', convert_ppid, 0),
ArgHandlerWithParam('vm_type'),
ArgHandlerWithParam('client'),
ArgHandlerWithParam('access-token'),
ArgHandlerWithParam('client-access-token'),
ArgHandlerWithParam('debug-mode'),
ArgHandlerWithParam('preimport'),
# Logging
ArgHandlerWithParam('log-file'),
ArgHandlerWithParam('log-level', int, None),
ArgHandlerBool('server'),
ArgHandlerBool('multiproc'), # Used by PyCharm (reuses connection: ssh tunneling)
ArgHandlerBool('multiprocess'), # Used by PyDev (creates new connection to ide)
ArgHandlerBool('save-signatures'),
ArgHandlerBool('save-threading'),
ArgHandlerBool('save-asyncio'),
ArgHandlerBool('print-in-debugger-startup'),
ArgHandlerBool('cmd-line'),
ArgHandlerBool('module'),
ArgHandlerBool('skip-notify-stdin'),
# The ones below should've been just one setting to specify the protocol, but for compatibility
# reasons they're passed as a flag but are mutually exclusive.
ArgHandlerBool('json-dap'), # Protocol used by ptvsd to communicate with pydevd (a single json message in each read)
ArgHandlerBool('json-dap-http'), # Actual DAP (json messages over http protocol).
ArgHandlerBool('protocol-quoted-line'), # Custom protocol with quoted lines.
ArgHandlerBool('protocol-http'), # Custom protocol with http.
]
ARGV_REP_TO_HANDLER = {}
for handler in ACCEPTED_ARG_HANDLERS:
ARGV_REP_TO_HANDLER[handler.arg_v_rep] = handler
def get_pydevd_file():
import pydevd
f = pydevd.__file__
if f.endswith('.pyc'):
f = f[:-1]
elif f.endswith('$py.class'):
f = f[:-len('$py.class')] + '.py'
return f
def setup_to_argv(setup, skip_names=None):
'''
:param dict setup:
A dict previously gotten from process_command_line.
:param set skip_names:
The names in the setup which shouldn't be converted to argv.
:note: does not handle --file nor --DEBUG.
'''
if skip_names is None:
skip_names = set()
ret = [get_pydevd_file()]
for handler in ACCEPTED_ARG_HANDLERS:
if handler.arg_name in setup and handler.arg_name not in skip_names:
handler.to_argv(ret, setup)
return ret
def process_command_line(argv):
""" parses the arguments.
removes our arguments from the command line """
setup = {}
for handler in ACCEPTED_ARG_HANDLERS:
setup[handler.arg_name] = handler.default_val
setup['file'] = ''
setup['qt-support'] = ''
initial_argv = tuple(argv)
i = 0
del argv[0]
while i < len(argv):
handler = ARGV_REP_TO_HANDLER.get(argv[i])
if handler is not None:
handler.handle_argv(argv, i, setup)
elif argv[i].startswith('--qt-support'):
# The --qt-support is special because we want to keep backward compatibility:
# Previously, just passing '--qt-support' meant that we should use the auto-discovery mode
# whereas now, if --qt-support is passed, it should be passed as --qt-support=<mode>, where
# mode can be one of 'auto', 'none', 'pyqt5', 'pyqt4', 'pyside', 'pyside2'.
if argv[i] == '--qt-support':
setup['qt-support'] = 'auto'
elif argv[i].startswith('--qt-support='):
qt_support = argv[i][len('--qt-support='):]
valid_modes = ('none', 'auto', 'pyqt5', 'pyqt4', 'pyside', 'pyside2')
if qt_support not in valid_modes:
raise ValueError("qt-support mode invalid: " + qt_support)
if qt_support == 'none':
# On none, actually set an empty string to evaluate to False.
setup['qt-support'] = ''
else:
setup['qt-support'] = qt_support
else:
raise ValueError("Unexpected definition for qt-support flag: " + argv[i])
del argv[i]
elif argv[i] == '--file':
# --file is special because it's the last one (so, no handler for it).
del argv[i]
setup['file'] = argv[i]
i = len(argv) # pop out, file is our last argument
elif argv[i] == '--DEBUG':
sys.stderr.write('pydevd: --DEBUG parameter deprecated. Use `--debug-level=3` instead.\n')
else:
raise ValueError("Unexpected option: %s when processing: %s" % (argv[i], initial_argv))
return setup

Some files were not shown because too many files have changed in this diff Show More