diff --git a/src/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__main__pydevd_gen_debug_adapter_protocol.py b/src/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__main__pydevd_gen_debug_adapter_protocol.py index 6e5db8c65..b45fa5f9d 100644 --- a/src/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__main__pydevd_gen_debug_adapter_protocol.py +++ b/src/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/__main__pydevd_gen_debug_adapter_protocol.py @@ -493,6 +493,10 @@ def update_class_to_generate_enums(class_to_generate): def update_class_to_generate_objects(classes_to_generate, class_to_generate): properties = class_to_generate['properties'] for key, val in properties.items(): + if 'type' not in val: + val['type'] = 'TypeNA' + continue + if val['type'] == 'object': create_new = val.copy() create_new.update({ diff --git a/src/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/debugProtocol.json b/src/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/debugProtocol.json index e94c1be28..8bbe94419 100644 --- a/src/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/debugProtocol.json +++ b/src/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/debugProtocol.json @@ -214,6 +214,13 @@ "allThreadsStopped": { "type": "boolean", "description": "If 'allThreadsStopped' is true, a debug adapter can announce that all threads have stopped.\n- The client should use this information to enable that all threads can be expanded to access their stacktraces.\n- If the attribute is missing or false, only the thread with the given threadId can be expanded." + }, + "hitBreakpointIds": { + "type": "array", + "items": { + "type": "integer" + }, + "description": "Ids of the breakpoints that triggered the event. In most cases there will be only a single breakpoint but here are some examples for multiple breakpoints:\n- Different types of breakpoints map to the same location.\n- Multiple source breakpoints get collapsed to the same instruction by the compiler/runtime.\n- Multiple function breakpoints with different function names map to the same location." } }, "required": [ "reason" ] @@ -341,8 +348,15 @@ "properties": { "category": { "type": "string", - "description": "The output category. If not specified, 'console' is assumed.", - "_enum": [ "console", "stdout", "stderr", "telemetry" ] + "description": "The output category. If not specified or if the category is not understand by the client, 'console' is assumed.", + "_enum": [ "console", "important", "stdout", "stderr", "telemetry" ], + "enumDescriptions": [ + "Show the output in the client's default message UI, e.g. a 'debug console'. This category should only be used for informational output from the debugger (as opposed to the debuggee).", + "A hint for the client to show the ouput in the client's UI for important and highly visible information, e.g. as a popup notification. This category should only be used for important messages from the debugger (as opposed to the debuggee). Since this category value is a hint, clients might ignore the hint and assume the 'console' category.", + "Show the output as normal program output from the debuggee.", + "Show the output as error program output from the debuggee.", + "Send the output to telemetry instead of showing it to the user." + ] }, "output": { "type": "string", @@ -684,6 +698,38 @@ }] }, + "MemoryEvent": { + "allOf": [ { "$ref": "#/definitions/Event" }, { + "type": "object", + "description": "This event indicates that some memory range has been updated. It should only be sent if the debug adapter has received a value true for the `supportsMemoryEvent` capability of the `initialize` request.\nClients typically react to the event by re-issuing a `readMemory` request if they show the memory identified by the `memoryReference` and if the updated memory range overlaps the displayed range. Clients should not make assumptions how individual memory references relate to each other, so they should not assume that they are part of a single continuous address range and might overlap.\nDebug adapters can use this event to indicate that the contents of a memory range has changed due to some other DAP request like `setVariable` or `setExpression`. Debug adapters are not expected to emit this event for each and every memory change of a running program, because that information is typically not available from debuggers and it would flood clients with too many events.", + "properties": { + "event": { + "type": "string", + "enum": [ "memory" ] + }, + "body": { + "type": "object", + "properties": { + "memoryReference": { + "type": "string", + "description": "Memory reference of a memory range that has been updated." + }, + "offset": { + "type": "integer", + "description": "Starting offset in bytes where memory has been updated. Can be negative." + }, + "count": { + "type": "integer", + "description": "Number of bytes updated." + } + }, + "required": [ "memoryReference", "offset", "count" ] + } + }, + "required": [ "event", "body" ] + }] + }, + "RunInTerminalRequest": { "allOf": [ { "$ref": "#/definitions/Request" }, { "type": "object", @@ -832,6 +878,10 @@ "supportsInvalidatedEvent": { "type": "boolean", "description": "Client supports the invalidated event." + }, + "supportsMemoryEvent": { + "type": "boolean", + "description": "Client supports the memory event." } }, "required": [ "adapterID" ] @@ -964,7 +1014,16 @@ }, "RestartArguments": { "type": "object", - "description": "Arguments for 'restart' request." + "description": "Arguments for 'restart' request.", + "properties": { + "arguments": { + "oneOf": [ + { "$ref": "#/definitions/LaunchRequestArguments" }, + { "$ref": "#/definitions/AttachRequestArguments" } + ], + "description": "The latest version of the 'launch' or 'attach' configuration." + } + } }, "RestartResponse": { "allOf": [ { "$ref": "#/definitions/Response" }, { @@ -1000,6 +1059,10 @@ "terminateDebuggee": { "type": "boolean", "description": "Indicates whether the debuggee should be terminated when the debugger is disconnected.\nIf unspecified, the debug adapter is free to do whatever it thinks is best.\nThe attribute is only honored by a debug adapter if the capability 'supportTerminateDebuggee' is true." + }, + "suspendDebuggee": { + "type": "boolean", + "description": "Indicates whether the debuggee should stay suspended when the debugger is disconnected.\nIf unspecified, the debuggee should resume execution.\nThe attribute is only honored by a debug adapter if the capability 'supportSuspendDebuggee' is true." } } }, @@ -1278,7 +1341,21 @@ "SetExceptionBreakpointsResponse": { "allOf": [ { "$ref": "#/definitions/Response" }, { "type": "object", - "description": "Response to 'setExceptionBreakpoints' request. This is just an acknowledgement, so no body field is required." + "description": "Response to 'setExceptionBreakpoints' request.\nThe response contains an array of Breakpoint objects with information about each exception breakpoint or filter. The Breakpoint objects are in the same order as the elements of the 'filters', 'filterOptions', 'exceptionOptions' arrays given as arguments. If both 'filters' and 'filterOptions' are given, the returned array must start with 'filters' information first, followed by 'filterOptions' information.\nThe mandatory 'verified' property of a Breakpoint object signals whether the exception breakpoint or filter could be successfully created and whether the optional condition or hit count expressions are valid. In case of an error the 'message' property explains the problem. An optional 'id' property can be used to introduce a unique ID for the exception breakpoint or filter so that it can be updated subsequently by sending breakpoint events.\nFor backward compatibility both the 'breakpoints' array and the enclosing 'body' are optional. If these elements are missing a client will not be able to show problems for individual exception breakpoints or filters.", + "properties": { + "body": { + "type": "object", + "properties": { + "breakpoints": { + "type": "array", + "items": { + "$ref": "#/definitions/Breakpoint" + }, + "description": "Information about the exception breakpoints or filters.\nThe breakpoints returned are in the same order as the elements of the 'filters', 'filterOptions', 'exceptionOptions' arrays in the arguments. If both 'filters' and 'filterOptions' are given, the returned array must start with 'filters' information first, followed by 'filterOptions' information." + } + } + } + } }] }, @@ -1308,7 +1385,7 @@ }, "name": { "type": "string", - "description": "The name of the Variable's child to obtain data breakpoint information for.\nIf variableReference isn’t provided, this can be an expression." + "description": "The name of the Variable's child to obtain data breakpoint information for.\nIf variablesReference isn't provided, this can be an expression." } }, "required": [ "name" ] @@ -1461,7 +1538,7 @@ "ContinueRequest": { "allOf": [ { "$ref": "#/definitions/Request" }, { "type": "object", - "description": "The request starts the debuggee to run again.", + "description": "The request resumes execution of all threads. If the debug adapter supports single thread execution (see capability 'supportsSingleThreadExecutionRequests') setting the 'singleThread' argument to true resumes only the specified thread. If not all threads were resumed, the 'allThreadsContinued' attribute of the response must be set to false.", "properties": { "command": { "type": "string", @@ -1480,7 +1557,11 @@ "properties": { "threadId": { "type": "integer", - "description": "Continue execution for the specified thread (if possible).\nIf the backend cannot continue on a single thread but will continue on all threads, it should set the 'allThreadsContinued' attribute in the response to true." + "description": "Specifies the active thread. If the debug adapter supports single thread execution (see 'supportsSingleThreadExecutionRequests') and the optional argument 'singleThread' is true, only the thread with this ID is resumed." + }, + "singleThread": { + "type": "boolean", + "description": "If this optional flag is true, execution is resumed only for the thread with given 'threadId'." } }, "required": [ "threadId" ] @@ -1495,7 +1576,7 @@ "properties": { "allThreadsContinued": { "type": "boolean", - "description": "If true, the 'continue' request has ignored the specified thread and continued all threads instead.\nIf this attribute is missing a value of 'true' is assumed for backward compatibility." + "description": "The value true (or a missing property) signals to the client that all threads have been resumed. The value false must be returned if not all threads were resumed." } } } @@ -1507,7 +1588,7 @@ "NextRequest": { "allOf": [ { "$ref": "#/definitions/Request" }, { "type": "object", - "description": "The request starts the debuggee to run again for one step.\nThe debug adapter first sends the response and then a 'stopped' event (with reason 'step') after the step has completed.", + "description": "The request executes one step (in the given granularity) for the specified thread and allows all other threads to run freely by resuming them.\nIf the debug adapter supports single thread execution (see capability 'supportsSingleThreadExecutionRequests') setting the 'singleThread' argument to true prevents other suspended threads from resuming.\nThe debug adapter first sends the response and then a 'stopped' event (with reason 'step') after the step has completed.", "properties": { "command": { "type": "string", @@ -1526,7 +1607,11 @@ "properties": { "threadId": { "type": "integer", - "description": "Execute 'next' for this thread." + "description": "Specifies the thread for which to resume execution for one step (of the given granularity)." + }, + "singleThread": { + "type": "boolean", + "description": "If this optional flag is true, all other suspended threads are not resumed." }, "granularity": { "$ref": "#/definitions/SteppingGranularity", @@ -1545,7 +1630,7 @@ "StepInRequest": { "allOf": [ { "$ref": "#/definitions/Request" }, { "type": "object", - "description": "The request starts the debuggee to step into a function/method if possible.\nIf it cannot step into a target, 'stepIn' behaves like 'next'.\nThe debug adapter first sends the response and then a 'stopped' event (with reason 'step') after the step has completed.\nIf there are multiple function/method calls (or other targets) on the source line,\nthe optional argument 'targetId' can be used to control into which target the 'stepIn' should occur.\nThe list of possible targets for a given source line can be retrieved via the 'stepInTargets' request.", + "description": "The request resumes the given thread to step into a function/method and allows all other threads to run freely by resuming them.\nIf the debug adapter supports single thread execution (see capability 'supportsSingleThreadExecutionRequests') setting the 'singleThread' argument to true prevents other suspended threads from resuming.\nIf the request cannot step into a target, 'stepIn' behaves like the 'next' request.\nThe debug adapter first sends the response and then a 'stopped' event (with reason 'step') after the step has completed.\nIf there are multiple function/method calls (or other targets) on the source line,\nthe optional argument 'targetId' can be used to control into which target the 'stepIn' should occur.\nThe list of possible targets for a given source line can be retrieved via the 'stepInTargets' request.", "properties": { "command": { "type": "string", @@ -1564,7 +1649,11 @@ "properties": { "threadId": { "type": "integer", - "description": "Execute 'stepIn' for this thread." + "description": "Specifies the thread for which to resume execution for one step-into (of the given granularity)." + }, + "singleThread": { + "type": "boolean", + "description": "If this optional flag is true, all other suspended threads are not resumed." }, "targetId": { "type": "integer", @@ -1587,7 +1676,7 @@ "StepOutRequest": { "allOf": [ { "$ref": "#/definitions/Request" }, { "type": "object", - "description": "The request starts the debuggee to run again for one step.\nThe debug adapter first sends the response and then a 'stopped' event (with reason 'step') after the step has completed.", + "description": "The request resumes the given thread to step out (return) from a function/method and allows all other threads to run freely by resuming them.\nIf the debug adapter supports single thread execution (see capability 'supportsSingleThreadExecutionRequests') setting the 'singleThread' argument to true prevents other suspended threads from resuming.\nThe debug adapter first sends the response and then a 'stopped' event (with reason 'step') after the step has completed.", "properties": { "command": { "type": "string", @@ -1606,7 +1695,11 @@ "properties": { "threadId": { "type": "integer", - "description": "Execute 'stepOut' for this thread." + "description": "Specifies the thread for which to resume execution for one step-out (of the given granularity)." + }, + "singleThread": { + "type": "boolean", + "description": "If this optional flag is true, all other suspended threads are not resumed." }, "granularity": { "$ref": "#/definitions/SteppingGranularity", @@ -1625,7 +1718,7 @@ "StepBackRequest": { "allOf": [ { "$ref": "#/definitions/Request" }, { "type": "object", - "description": "The request starts the debuggee to run one step backwards.\nThe debug adapter first sends the response and then a 'stopped' event (with reason 'step') after the step has completed.\nClients should only call this request if the capability 'supportsStepBack' is true.", + "description": "The request executes one backward step (in the given granularity) for the specified thread and allows all other threads to run backward freely by resuming them.\nIf the debug adapter supports single thread execution (see capability 'supportsSingleThreadExecutionRequests') setting the 'singleThread' argument to true prevents other suspended threads from resuming.\nThe debug adapter first sends the response and then a 'stopped' event (with reason 'step') after the step has completed.\nClients should only call this request if the capability 'supportsStepBack' is true.", "properties": { "command": { "type": "string", @@ -1644,7 +1737,11 @@ "properties": { "threadId": { "type": "integer", - "description": "Execute 'stepBack' for this thread." + "description": "Specifies the thread for which to resume execution for one step backwards (of the given granularity)." + }, + "singleThread": { + "type": "boolean", + "description": "If this optional flag is true, all other suspended threads are not resumed." }, "granularity": { "$ref": "#/definitions/SteppingGranularity", @@ -1663,7 +1760,7 @@ "ReverseContinueRequest": { "allOf": [ { "$ref": "#/definitions/Request" }, { "type": "object", - "description": "The request starts the debuggee to run backward.\nClients should only call this request if the capability 'supportsStepBack' is true.", + "description": "The request resumes backward execution of all threads. If the debug adapter supports single thread execution (see capability 'supportsSingleThreadExecutionRequests') setting the 'singleThread' argument to true resumes only the specified thread. If not all threads were resumed, the 'allThreadsContinued' attribute of the response must be set to false.\nClients should only call this request if the capability 'supportsStepBack' is true.", "properties": { "command": { "type": "string", @@ -1682,8 +1779,13 @@ "properties": { "threadId": { "type": "integer", - "description": "Execute 'reverseContinue' for this thread." + "description": "Specifies the active thread. If the debug adapter supports single thread execution (see 'supportsSingleThreadExecutionRequests') and the optional argument 'singleThread' is true, only the thread with this ID is resumed." + }, + "singleThread": { + "type": "boolean", + "description": "If this optional flag is true, backward execution is resumed only for the thread with given 'threadId'." } + }, "required": [ "threadId" ] }, @@ -1986,7 +2088,7 @@ "SetVariableRequest": { "allOf": [ { "$ref": "#/definitions/Request" }, { "type": "object", - "description": "Set the variable with the given name in the variable container to a new value. Clients should only call this request if the capability 'supportsSetVariable' is true.", + "description": "Set the variable with the given name in the variable container to a new value. Clients should only call this request if the capability 'supportsSetVariable' is true.\nIf a debug adapter implements both setVariable and setExpression, a client will only use setExpression if the variable has an evaluateName property.", "properties": { "command": { "type": "string", @@ -2378,7 +2480,7 @@ "SetExpressionRequest": { "allOf": [ { "$ref": "#/definitions/Request" }, { "type": "object", - "description": "Evaluates the given 'value' expression and assigns it to the 'expression' which must be a modifiable l-value.\nThe expressions have access to any variables and arguments that are in scope of the specified frame.\nClients should only call this request if the capability 'supportsSetExpression' is true.", + "description": "Evaluates the given 'value' expression and assigns it to the 'expression' which must be a modifiable l-value.\nThe expressions have access to any variables and arguments that are in scope of the specified frame.\nClients should only call this request if the capability 'supportsSetExpression' is true.\nIf a debug adapter implements both setExpression and setVariable, a client will only use setExpression if the variable has an evaluateName property.", "properties": { "command": { "type": "string", @@ -2745,6 +2847,67 @@ }] }, + "WriteMemoryRequest": { + "allOf": [ { "$ref": "#/definitions/Request" }, { + "type": "object", + "description": "Writes bytes to memory at the provided location.\nClients should only call this request if the capability 'supportsWriteMemoryRequest' is true.", + "properties": { + "command": { + "type": "string", + "enum": [ "writeMemory" ] + }, + "arguments": { + "$ref": "#/definitions/WriteMemoryArguments" + } + }, + "required": [ "command", "arguments" ] + }] + }, + "WriteMemoryArguments": { + "type": "object", + "description": "Arguments for 'writeMemory' request.", + "properties": { + "memoryReference": { + "type": "string", + "description": "Memory reference to the base location to which data should be written." + }, + "offset": { + "type": "integer", + "description": "Optional offset (in bytes) to be applied to the reference location before writing data. Can be negative." + }, + "allowPartial": { + "type": "boolean", + "description": "Optional property to control partial writes. If true, the debug adapter should attempt to write memory even if the entire memory region is not writable. In such a case the debug adapter should stop after hitting the first byte of memory that cannot be written and return the number of bytes written in the response via the 'offset' and 'bytesWritten' properties.\nIf false or missing, a debug adapter should attempt to verify the region is writable before writing, and fail the response if it is not." + }, + "data": { + "type": "string", + "description": "Bytes to write, encoded using base64." + } + }, + "required": [ "memoryReference", "data" ] + }, + "WriteMemoryResponse": { + "allOf": [ { "$ref": "#/definitions/Response" }, { + "type": "object", + "description": "Response to 'writeMemory' request.", + "properties": { + "body": { + "type": "object", + "properties": { + "offset": { + "type": "integer", + "description": "Optional property that should be returned when 'allowPartial' is true to indicate the offset of the first byte of data successfully written. Can be negative." + }, + "bytesWritten": { + "type": "integer", + "description": "Optional property that should be returned when 'allowPartial' is true to indicate the number of bytes starting from address that were successfully written." + } + } + } + } + }] + }, + "DisassembleRequest": { "allOf": [ { "$ref": "#/definitions/Request" }, { "type": "object", @@ -2911,6 +3074,10 @@ "type": "boolean", "description": "The debug adapter supports the 'terminateDebuggee' attribute on the 'disconnect' request." }, + "supportSuspendDebuggee": { + "type": "boolean", + "description": "The debug adapter supports the 'suspendDebuggee' attribute on the 'disconnect' request." + }, "supportsDelayedStackTraceLoading": { "type": "boolean", "description": "The debug adapter supports the delayed loading of parts of the stack, which requires that both the 'startFrame' and 'levels' arguments and an optional 'totalFrames' result of the 'StackTrace' request are supported." @@ -2943,6 +3110,10 @@ "type": "boolean", "description": "The debug adapter supports the 'readMemory' request." }, + "supportsWriteMemoryRequest": { + "type": "boolean", + "description": "The debug adapter supports the 'writeMemory' request." + }, "supportsDisassembleRequest": { "type": "boolean", "description": "The debug adapter supports the 'disassemble' request." @@ -2970,6 +3141,10 @@ "supportsExceptionFilterOptions": { "type": "boolean", "description": "The debug adapter supports 'filterOptions' as an argument on the 'setExceptionBreakpoints' request." + }, + "supportsSingleThreadExecutionRequests": { + "type": "boolean", + "description": "The debug adapter supports the 'singleThread' property on the execution requests ('continue', 'next', 'stepIn', 'stepOut', 'reverseContinue', 'stepBack')." } } }, @@ -2986,6 +3161,10 @@ "type": "string", "description": "The name of the filter option. This will be shown in the UI." }, + "description": { + "type": "string", + "description": "An optional help text providing additional information about the exception filter. This string is typically shown as a hover and must be translated." + }, "default": { "type": "boolean", "description": "Initial value of the filter option. If not specified a value 'false' is assumed." @@ -2993,6 +3172,10 @@ "supportsCondition": { "type": "boolean", "description": "Controls whether a condition can be specified for this filter option. If false or missing, a condition can not be set." + }, + "conditionDescription": { + "type": "string", + "description": "An optional help text providing information about the condition. This string is shown as the placeholder text for a text box and must be translated." } }, "required": [ "filter", "label" ] @@ -3223,6 +3406,10 @@ "type": "integer", "description": "An optional end column of the range covered by the stack frame." }, + "canRestart": { + "type": "boolean", + "description": "Indicates whether this frame can be restarted with the 'restart' request. Clients should only use this if the debug adapter supports the 'restart' request (capability 'supportsRestartRequest' is true)." + }, "instructionPointerReference": { "type": "string", "description": "Optional memory reference for the current instruction pointer in this frame." @@ -3361,7 +3548,7 @@ "Indicates that the object is an interface.", "Indicates that the object is the most derived class.", "Indicates that the object is virtual, that means it is a synthetic object introducedby the\nadapter for rendering purposes, e.g. an index range for large arrays.", - "Indicates that a data breakpoint is registered for the object." + "Deprecated: Indicates that a data breakpoint is registered for the object. The 'hasDataBreakpoint' attribute should generally be used instead." ] }, "attributes": { @@ -3369,7 +3556,7 @@ "type": "array", "items": { "type": "string", - "_enum": [ "static", "constant", "readOnly", "rawString", "hasObjectId", "canHaveObjectId", "hasSideEffects" ], + "_enum": [ "static", "constant", "readOnly", "rawString", "hasObjectId", "canHaveObjectId", "hasSideEffects", "hasDataBreakpoint" ], "enumDescriptions": [ "Indicates that the object is static.", "Indicates that the object is a constant.", @@ -3377,7 +3564,8 @@ "Indicates that the object is a raw string.", "Indicates that the object can have an Object ID created for it.", "Indicates that the object has an Object ID associated with it.", - "Indicates that the evaluation had side effects." + "Indicates that the evaluation had side effects.", + "Indicates that the object has its value tracked by a data breakpoint." ] } }, diff --git a/src/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_schema.py b/src/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_schema.py index 14ab9c4c0..3159f63ed 100644 --- a/src/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_schema.py +++ b/src/debugpy/_vendored/pydevd/_pydevd_bundle/_debug_adapter/pydevd_schema.py @@ -808,6 +808,13 @@ class StoppedEvent(BaseSchema): "allThreadsStopped": { "type": "boolean", "description": "If 'allThreadsStopped' is true, a debug adapter can announce that all threads have stopped.\n- The client should use this information to enable that all threads can be expanded to access their stacktraces.\n- If the attribute is missing or false, only the thread with the given threadId can be expanded." + }, + "hitBreakpointIds": { + "type": "array", + "items": { + "type": "integer" + }, + "description": "Ids of the breakpoints that triggered the event. In most cases there will be only a single breakpoint but here are some examples for multiple breakpoints:\n- Different types of breakpoints map to the same location.\n- Multiple source breakpoints get collapsed to the same instruction by the compiler/runtime.\n- Multiple function breakpoints with different function names map to the same location." } }, "required": [ @@ -1207,12 +1214,20 @@ class OutputEvent(BaseSchema): "properties": { "category": { "type": "string", - "description": "The output category. If not specified, 'console' is assumed.", + "description": "The output category. If not specified or if the category is not understand by the client, 'console' is assumed.", "_enum": [ "console", + "important", "stdout", "stderr", "telemetry" + ], + "enumDescriptions": [ + "Show the output in the client's default message UI, e.g. a 'debug console'. This category should only be used for informational output from the debugger (as opposed to the debuggee).", + "A hint for the client to show the ouput in the client's UI for important and highly visible information, e.g. as a popup notification. This category should only be used for important messages from the debugger (as opposed to the debuggee). Since this category value is a hint, clients might ignore the hint and assume the 'console' category.", + "Show the output as normal program output from the debuggee.", + "Show the output as error program output from the debuggee.", + "Send the output to telemetry instead of showing it to the user." ] }, "output": { @@ -2110,6 +2125,103 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un return dct +@register_event('memory') +@register +class MemoryEvent(BaseSchema): + """ + This event indicates that some memory range has been updated. It should only be sent if the debug + adapter has received a value true for the `supportsMemoryEvent` capability of the `initialize` + request. + + Clients typically react to the event by re-issuing a `readMemory` request if they show the memory + identified by the `memoryReference` and if the updated memory range overlaps the displayed range. + Clients should not make assumptions how individual memory references relate to each other, so they + should not assume that they are part of a single continuous address range and might overlap. + + Debug adapters can use this event to indicate that the contents of a memory range has changed due to + some other DAP request like `setVariable` or `setExpression`. Debug adapters are not expected to + emit this event for each and every memory change of a running program, because that information is + typically not available from debuggers and it would flood clients with too many events. + + Note: automatically generated code. Do not edit manually. + """ + + __props__ = { + "seq": { + "type": "integer", + "description": "Sequence number (also known as message ID). For protocol messages of type 'request' this ID can be used to cancel the request." + }, + "type": { + "type": "string", + "enum": [ + "event" + ] + }, + "event": { + "type": "string", + "enum": [ + "memory" + ] + }, + "body": { + "type": "object", + "properties": { + "memoryReference": { + "type": "string", + "description": "Memory reference of a memory range that has been updated." + }, + "offset": { + "type": "integer", + "description": "Starting offset in bytes where memory has been updated. Can be negative." + }, + "count": { + "type": "integer", + "description": "Number of bytes updated." + } + }, + "required": [ + "memoryReference", + "offset", + "count" + ] + } + } + __refs__ = set(['body']) + + __slots__ = list(__props__.keys()) + ['kwargs'] + + def __init__(self, body, seq=-1, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) + """ + :param string type: + :param string event: + :param MemoryEventBody body: + :param integer seq: Sequence number (also known as message ID). For protocol messages of type 'request' this ID can be used to cancel the request. + """ + self.type = 'event' + self.event = 'memory' + if body is None: + self.body = MemoryEventBody() + else: + self.body = MemoryEventBody(update_ids_from_dap=update_ids_from_dap, **body) if body.__class__ != MemoryEventBody else body + self.seq = seq + self.kwargs = kwargs + + + def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be unused) + type = self.type # noqa (assign to builtin) + event = self.event + body = self.body + seq = self.seq + dct = { + 'type': type, + 'event': event, + 'body': body.to_dict(update_ids_to_dap=update_ids_to_dap), + 'seq': seq, + } + dct.update(self.kwargs) + return dct + + @register_request('runInTerminal') @register class RunInTerminalRequest(BaseSchema): @@ -2520,13 +2632,17 @@ class InitializeRequestArguments(BaseSchema): "supportsInvalidatedEvent": { "type": "boolean", "description": "Client supports the invalidated event." + }, + "supportsMemoryEvent": { + "type": "boolean", + "description": "Client supports the memory event." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] - def __init__(self, adapterID, clientID=None, clientName=None, locale=None, linesStartAt1=None, columnsStartAt1=None, pathFormat=None, supportsVariableType=None, supportsVariablePaging=None, supportsRunInTerminalRequest=None, supportsMemoryReferences=None, supportsProgressReporting=None, supportsInvalidatedEvent=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) + def __init__(self, adapterID, clientID=None, clientName=None, locale=None, linesStartAt1=None, columnsStartAt1=None, pathFormat=None, supportsVariableType=None, supportsVariablePaging=None, supportsRunInTerminalRequest=None, supportsMemoryReferences=None, supportsProgressReporting=None, supportsInvalidatedEvent=None, supportsMemoryEvent=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) """ :param string adapterID: The ID of the debug adapter. :param string clientID: The ID of the (frontend) client using this adapter. @@ -2541,6 +2657,7 @@ def __init__(self, adapterID, clientID=None, clientName=None, locale=None, lines :param boolean supportsMemoryReferences: Client supports memory references. :param boolean supportsProgressReporting: Client supports progress reporting. :param boolean supportsInvalidatedEvent: Client supports the invalidated event. + :param boolean supportsMemoryEvent: Client supports the memory event. """ self.adapterID = adapterID self.clientID = clientID @@ -2555,6 +2672,7 @@ def __init__(self, adapterID, clientID=None, clientName=None, locale=None, lines self.supportsMemoryReferences = supportsMemoryReferences self.supportsProgressReporting = supportsProgressReporting self.supportsInvalidatedEvent = supportsInvalidatedEvent + self.supportsMemoryEvent = supportsMemoryEvent self.kwargs = kwargs @@ -2572,6 +2690,7 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un supportsMemoryReferences = self.supportsMemoryReferences supportsProgressReporting = self.supportsProgressReporting supportsInvalidatedEvent = self.supportsInvalidatedEvent + supportsMemoryEvent = self.supportsMemoryEvent dct = { 'adapterID': adapterID, } @@ -2599,6 +2718,8 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un dct['supportsProgressReporting'] = supportsProgressReporting if supportsInvalidatedEvent is not None: dct['supportsInvalidatedEvent'] = supportsInvalidatedEvent + if supportsMemoryEvent is not None: + dct['supportsMemoryEvent'] = supportsMemoryEvent dct.update(self.kwargs) return dct @@ -3443,22 +3564,37 @@ class RestartArguments(BaseSchema): Note: automatically generated code. Do not edit manually. """ - __props__ = {} + __props__ = { + "arguments": { + "oneOf": [ + { + "$ref": "#/definitions/LaunchRequestArguments" + }, + { + "$ref": "#/definitions/AttachRequestArguments" + } + ], + "description": "The latest version of the 'launch' or 'attach' configuration." + } + } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] - def __init__(self, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) + def __init__(self, arguments=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) """ - + :param TypeNA arguments: The latest version of the 'launch' or 'attach' configuration. """ - + self.arguments = arguments self.kwargs = kwargs def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be unused) + arguments = self.arguments dct = { } + if arguments is not None: + dct['arguments'] = arguments dct.update(self.kwargs) return dct @@ -3663,33 +3799,44 @@ class DisconnectArguments(BaseSchema): "terminateDebuggee": { "type": "boolean", "description": "Indicates whether the debuggee should be terminated when the debugger is disconnected.\nIf unspecified, the debug adapter is free to do whatever it thinks is best.\nThe attribute is only honored by a debug adapter if the capability 'supportTerminateDebuggee' is true." + }, + "suspendDebuggee": { + "type": "boolean", + "description": "Indicates whether the debuggee should stay suspended when the debugger is disconnected.\nIf unspecified, the debuggee should resume execution.\nThe attribute is only honored by a debug adapter if the capability 'supportSuspendDebuggee' is true." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] - def __init__(self, restart=None, terminateDebuggee=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) + def __init__(self, restart=None, terminateDebuggee=None, suspendDebuggee=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) """ :param boolean restart: A value of true indicates that this 'disconnect' request is part of a restart sequence. :param boolean terminateDebuggee: Indicates whether the debuggee should be terminated when the debugger is disconnected. If unspecified, the debug adapter is free to do whatever it thinks is best. The attribute is only honored by a debug adapter if the capability 'supportTerminateDebuggee' is true. + :param boolean suspendDebuggee: Indicates whether the debuggee should stay suspended when the debugger is disconnected. + If unspecified, the debuggee should resume execution. + The attribute is only honored by a debug adapter if the capability 'supportSuspendDebuggee' is true. """ self.restart = restart self.terminateDebuggee = terminateDebuggee + self.suspendDebuggee = suspendDebuggee self.kwargs = kwargs def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be unused) restart = self.restart terminateDebuggee = self.terminateDebuggee + suspendDebuggee = self.suspendDebuggee dct = { } if restart is not None: dct['restart'] = restart if terminateDebuggee is not None: dct['terminateDebuggee'] = terminateDebuggee + if suspendDebuggee is not None: + dct['suspendDebuggee'] = suspendDebuggee dct.update(self.kwargs) return dct @@ -4915,8 +5062,23 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un @register class SetExceptionBreakpointsResponse(BaseSchema): """ - Response to 'setExceptionBreakpoints' request. This is just an acknowledgement, so no body field is - required. + Response to 'setExceptionBreakpoints' request. + + The response contains an array of Breakpoint objects with information about each exception + breakpoint or filter. The Breakpoint objects are in the same order as the elements of the 'filters', + 'filterOptions', 'exceptionOptions' arrays given as arguments. If both 'filters' and 'filterOptions' + are given, the returned array must start with 'filters' information first, followed by + 'filterOptions' information. + + The mandatory 'verified' property of a Breakpoint object signals whether the exception breakpoint or + filter could be successfully created and whether the optional condition or hit count expressions are + valid. In case of an error the 'message' property explains the problem. An optional 'id' property + can be used to introduce a unique ID for the exception breakpoint or filter so that it can be + updated subsequently by sending breakpoint events. + + For backward compatibility both the 'breakpoints' array and the enclosing 'body' are optional. If + these elements are missing a client will not be able to show problems for individual exception + breakpoints or filters. Note: automatically generated code. Do not edit manually. """ @@ -4955,19 +5117,19 @@ class SetExceptionBreakpointsResponse(BaseSchema): ] }, "body": { - "type": [ - "array", - "boolean", - "integer", - "null", - "number", - "object", - "string" - ], - "description": "Contains request result if success is true and optional error details if success is false." + "type": "object", + "properties": { + "breakpoints": { + "type": "array", + "items": { + "$ref": "#/definitions/Breakpoint" + }, + "description": "Information about the exception breakpoints or filters.\nThe breakpoints returned are in the same order as the elements of the 'filters', 'filterOptions', 'exceptionOptions' arrays in the arguments. If both 'filters' and 'filterOptions' are given, the returned array must start with 'filters' information first, followed by 'filterOptions' information." + } + } } } - __refs__ = set() + __refs__ = set(['body']) __slots__ = list(__props__.keys()) + ['kwargs'] @@ -4983,7 +5145,7 @@ def __init__(self, request_seq, success, command, seq=-1, message=None, body=Non :param string message: Contains the raw error in short form if 'success' is false. This raw error might be interpreted by the frontend and is not shown in the UI. Some predefined values exist. - :param ['array', 'boolean', 'integer', 'null', 'number', 'object', 'string'] body: Contains request result if success is true and optional error details if success is false. + :param SetExceptionBreakpointsResponseBody body: """ self.type = 'response' self.request_seq = request_seq @@ -4991,7 +5153,10 @@ def __init__(self, request_seq, success, command, seq=-1, message=None, body=Non self.command = command self.seq = seq self.message = message - self.body = body + if body is None: + self.body = SetExceptionBreakpointsResponseBody() + else: + self.body = SetExceptionBreakpointsResponseBody(update_ids_from_dap=update_ids_from_dap, **body) if body.__class__ != SetExceptionBreakpointsResponseBody else body self.kwargs = kwargs @@ -5013,7 +5178,7 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un if message is not None: dct['message'] = message if body is not None: - dct['body'] = body + dct['body'] = body.to_dict(update_ids_to_dap=update_ids_to_dap) dct.update(self.kwargs) return dct @@ -5101,7 +5266,7 @@ class DataBreakpointInfoArguments(BaseSchema): }, "name": { "type": "string", - "description": "The name of the Variable's child to obtain data breakpoint information for.\nIf variableReference isn\u2019t provided, this can be an expression." + "description": "The name of the Variable's child to obtain data breakpoint information for.\nIf variablesReference isn't provided, this can be an expression." } } __refs__ = set() @@ -5111,7 +5276,7 @@ class DataBreakpointInfoArguments(BaseSchema): def __init__(self, name, variablesReference=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) """ :param string name: The name of the Variable's child to obtain data breakpoint information for. - If variableReference isn’t provided, this can be an expression. + If variablesReference isn't provided, this can be an expression. :param integer variablesReference: Reference to the Variable container if the data breakpoint is requested for a child of the container. """ self.name = name @@ -5735,7 +5900,10 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un @register class ContinueRequest(BaseSchema): """ - The request starts the debuggee to run again. + The request resumes execution of all threads. If the debug adapter supports single thread execution + (see capability 'supportsSingleThreadExecutionRequests') setting the 'singleThread' argument to true + resumes only the specified thread. If not all threads were resumed, the 'allThreadsContinued' + attribute of the response must be set to false. Note: automatically generated code. Do not edit manually. """ @@ -5808,19 +5976,24 @@ class ContinueArguments(BaseSchema): __props__ = { "threadId": { "type": "integer", - "description": "Continue execution for the specified thread (if possible).\nIf the backend cannot continue on a single thread but will continue on all threads, it should set the 'allThreadsContinued' attribute in the response to true." + "description": "Specifies the active thread. If the debug adapter supports single thread execution (see 'supportsSingleThreadExecutionRequests') and the optional argument 'singleThread' is true, only the thread with this ID is resumed." + }, + "singleThread": { + "type": "boolean", + "description": "If this optional flag is true, execution is resumed only for the thread with given 'threadId'." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] - def __init__(self, threadId, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) + def __init__(self, threadId, singleThread=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) """ - :param integer threadId: Continue execution for the specified thread (if possible). - If the backend cannot continue on a single thread but will continue on all threads, it should set the 'allThreadsContinued' attribute in the response to true. + :param integer threadId: Specifies the active thread. If the debug adapter supports single thread execution (see 'supportsSingleThreadExecutionRequests') and the optional argument 'singleThread' is true, only the thread with this ID is resumed. + :param boolean singleThread: If this optional flag is true, execution is resumed only for the thread with given 'threadId'. """ self.threadId = threadId + self.singleThread = singleThread if update_ids_from_dap: self.threadId = self._translate_id_from_dap(self.threadId) self.kwargs = kwargs @@ -5834,12 +6007,15 @@ def update_dict_ids_from_dap(cls, dct): def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be unused) threadId = self.threadId + singleThread = self.singleThread if update_ids_to_dap: if threadId is not None: threadId = self._translate_id_to_dap(threadId) dct = { 'threadId': threadId, } + if singleThread is not None: + dct['singleThread'] = singleThread dct.update(self.kwargs) return dct @@ -5897,7 +6073,7 @@ class ContinueResponse(BaseSchema): "properties": { "allThreadsContinued": { "type": "boolean", - "description": "If true, the 'continue' request has ignored the specified thread and continued all threads instead.\nIf this attribute is missing a value of 'true' is assumed for backward compatibility." + "description": "The value true (or a missing property) signals to the client that all threads have been resumed. The value false must be returned if not all threads were resumed." } } } @@ -5959,7 +6135,12 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un @register class NextRequest(BaseSchema): """ - The request starts the debuggee to run again for one step. + The request executes one step (in the given granularity) for the specified thread and allows all + other threads to run freely by resuming them. + + If the debug adapter supports single thread execution (see capability + 'supportsSingleThreadExecutionRequests') setting the 'singleThread' argument to true prevents other + suspended threads from resuming. The debug adapter first sends the response and then a 'stopped' event (with reason 'step') after the step has completed. @@ -6035,7 +6216,11 @@ class NextArguments(BaseSchema): __props__ = { "threadId": { "type": "integer", - "description": "Execute 'next' for this thread." + "description": "Specifies the thread for which to resume execution for one step (of the given granularity)." + }, + "singleThread": { + "type": "boolean", + "description": "If this optional flag is true, all other suspended threads are not resumed." }, "granularity": { "description": "Optional granularity to step. If no granularity is specified, a granularity of 'statement' is assumed.", @@ -6046,12 +6231,14 @@ class NextArguments(BaseSchema): __slots__ = list(__props__.keys()) + ['kwargs'] - def __init__(self, threadId, granularity=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) + def __init__(self, threadId, singleThread=None, granularity=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) """ - :param integer threadId: Execute 'next' for this thread. + :param integer threadId: Specifies the thread for which to resume execution for one step (of the given granularity). + :param boolean singleThread: If this optional flag is true, all other suspended threads are not resumed. :param SteppingGranularity granularity: Optional granularity to step. If no granularity is specified, a granularity of 'statement' is assumed. """ self.threadId = threadId + self.singleThread = singleThread if granularity is not None: assert granularity in SteppingGranularity.VALID_VALUES self.granularity = granularity @@ -6068,6 +6255,7 @@ def update_dict_ids_from_dap(cls, dct): def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be unused) threadId = self.threadId + singleThread = self.singleThread granularity = self.granularity if update_ids_to_dap: if threadId is not None: @@ -6075,6 +6263,8 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un dct = { 'threadId': threadId, } + if singleThread is not None: + dct['singleThread'] = singleThread if granularity is not None: dct['granularity'] = granularity dct.update(self.kwargs) @@ -6197,9 +6387,14 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un @register class StepInRequest(BaseSchema): """ - The request starts the debuggee to step into a function/method if possible. + The request resumes the given thread to step into a function/method and allows all other threads to + run freely by resuming them. - If it cannot step into a target, 'stepIn' behaves like 'next'. + If the debug adapter supports single thread execution (see capability + 'supportsSingleThreadExecutionRequests') setting the 'singleThread' argument to true prevents other + suspended threads from resuming. + + If the request cannot step into a target, 'stepIn' behaves like the 'next' request. The debug adapter first sends the response and then a 'stopped' event (with reason 'step') after the step has completed. @@ -6282,7 +6477,11 @@ class StepInArguments(BaseSchema): __props__ = { "threadId": { "type": "integer", - "description": "Execute 'stepIn' for this thread." + "description": "Specifies the thread for which to resume execution for one step-into (of the given granularity)." + }, + "singleThread": { + "type": "boolean", + "description": "If this optional flag is true, all other suspended threads are not resumed." }, "targetId": { "type": "integer", @@ -6297,13 +6496,15 @@ class StepInArguments(BaseSchema): __slots__ = list(__props__.keys()) + ['kwargs'] - def __init__(self, threadId, targetId=None, granularity=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) + def __init__(self, threadId, singleThread=None, targetId=None, granularity=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) """ - :param integer threadId: Execute 'stepIn' for this thread. + :param integer threadId: Specifies the thread for which to resume execution for one step-into (of the given granularity). + :param boolean singleThread: If this optional flag is true, all other suspended threads are not resumed. :param integer targetId: Optional id of the target to step into. :param SteppingGranularity granularity: Optional granularity to step. If no granularity is specified, a granularity of 'statement' is assumed. """ self.threadId = threadId + self.singleThread = singleThread self.targetId = targetId if granularity is not None: assert granularity in SteppingGranularity.VALID_VALUES @@ -6321,6 +6522,7 @@ def update_dict_ids_from_dap(cls, dct): def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be unused) threadId = self.threadId + singleThread = self.singleThread targetId = self.targetId granularity = self.granularity if update_ids_to_dap: @@ -6329,6 +6531,8 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un dct = { 'threadId': threadId, } + if singleThread is not None: + dct['singleThread'] = singleThread if targetId is not None: dct['targetId'] = targetId if granularity is not None: @@ -6453,7 +6657,12 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un @register class StepOutRequest(BaseSchema): """ - The request starts the debuggee to run again for one step. + The request resumes the given thread to step out (return) from a function/method and allows all + other threads to run freely by resuming them. + + If the debug adapter supports single thread execution (see capability + 'supportsSingleThreadExecutionRequests') setting the 'singleThread' argument to true prevents other + suspended threads from resuming. The debug adapter first sends the response and then a 'stopped' event (with reason 'step') after the step has completed. @@ -6529,7 +6738,11 @@ class StepOutArguments(BaseSchema): __props__ = { "threadId": { "type": "integer", - "description": "Execute 'stepOut' for this thread." + "description": "Specifies the thread for which to resume execution for one step-out (of the given granularity)." + }, + "singleThread": { + "type": "boolean", + "description": "If this optional flag is true, all other suspended threads are not resumed." }, "granularity": { "description": "Optional granularity to step. If no granularity is specified, a granularity of 'statement' is assumed.", @@ -6540,12 +6753,14 @@ class StepOutArguments(BaseSchema): __slots__ = list(__props__.keys()) + ['kwargs'] - def __init__(self, threadId, granularity=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) + def __init__(self, threadId, singleThread=None, granularity=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) """ - :param integer threadId: Execute 'stepOut' for this thread. + :param integer threadId: Specifies the thread for which to resume execution for one step-out (of the given granularity). + :param boolean singleThread: If this optional flag is true, all other suspended threads are not resumed. :param SteppingGranularity granularity: Optional granularity to step. If no granularity is specified, a granularity of 'statement' is assumed. """ self.threadId = threadId + self.singleThread = singleThread if granularity is not None: assert granularity in SteppingGranularity.VALID_VALUES self.granularity = granularity @@ -6562,6 +6777,7 @@ def update_dict_ids_from_dap(cls, dct): def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be unused) threadId = self.threadId + singleThread = self.singleThread granularity = self.granularity if update_ids_to_dap: if threadId is not None: @@ -6569,6 +6785,8 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un dct = { 'threadId': threadId, } + if singleThread is not None: + dct['singleThread'] = singleThread if granularity is not None: dct['granularity'] = granularity dct.update(self.kwargs) @@ -6691,7 +6909,12 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un @register class StepBackRequest(BaseSchema): """ - The request starts the debuggee to run one step backwards. + The request executes one backward step (in the given granularity) for the specified thread and + allows all other threads to run backward freely by resuming them. + + If the debug adapter supports single thread execution (see capability + 'supportsSingleThreadExecutionRequests') setting the 'singleThread' argument to true prevents other + suspended threads from resuming. The debug adapter first sends the response and then a 'stopped' event (with reason 'step') after the step has completed. @@ -6769,7 +6992,11 @@ class StepBackArguments(BaseSchema): __props__ = { "threadId": { "type": "integer", - "description": "Execute 'stepBack' for this thread." + "description": "Specifies the thread for which to resume execution for one step backwards (of the given granularity)." + }, + "singleThread": { + "type": "boolean", + "description": "If this optional flag is true, all other suspended threads are not resumed." }, "granularity": { "description": "Optional granularity to step. If no granularity is specified, a granularity of 'statement' is assumed.", @@ -6780,12 +7007,14 @@ class StepBackArguments(BaseSchema): __slots__ = list(__props__.keys()) + ['kwargs'] - def __init__(self, threadId, granularity=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) + def __init__(self, threadId, singleThread=None, granularity=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) """ - :param integer threadId: Execute 'stepBack' for this thread. + :param integer threadId: Specifies the thread for which to resume execution for one step backwards (of the given granularity). + :param boolean singleThread: If this optional flag is true, all other suspended threads are not resumed. :param SteppingGranularity granularity: Optional granularity to step. If no granularity is specified, a granularity of 'statement' is assumed. """ self.threadId = threadId + self.singleThread = singleThread if granularity is not None: assert granularity in SteppingGranularity.VALID_VALUES self.granularity = granularity @@ -6802,6 +7031,7 @@ def update_dict_ids_from_dap(cls, dct): def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be unused) threadId = self.threadId + singleThread = self.singleThread granularity = self.granularity if update_ids_to_dap: if threadId is not None: @@ -6809,6 +7039,8 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un dct = { 'threadId': threadId, } + if singleThread is not None: + dct['singleThread'] = singleThread if granularity is not None: dct['granularity'] = granularity dct.update(self.kwargs) @@ -6931,7 +7163,10 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un @register class ReverseContinueRequest(BaseSchema): """ - The request starts the debuggee to run backward. + The request resumes backward execution of all threads. If the debug adapter supports single thread + execution (see capability 'supportsSingleThreadExecutionRequests') setting the 'singleThread' + argument to true resumes only the specified thread. If not all threads were resumed, the + 'allThreadsContinued' attribute of the response must be set to false. Clients should only call this request if the capability 'supportsStepBack' is true. @@ -7006,18 +7241,24 @@ class ReverseContinueArguments(BaseSchema): __props__ = { "threadId": { "type": "integer", - "description": "Execute 'reverseContinue' for this thread." + "description": "Specifies the active thread. If the debug adapter supports single thread execution (see 'supportsSingleThreadExecutionRequests') and the optional argument 'singleThread' is true, only the thread with this ID is resumed." + }, + "singleThread": { + "type": "boolean", + "description": "If this optional flag is true, backward execution is resumed only for the thread with given 'threadId'." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] - def __init__(self, threadId, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) + def __init__(self, threadId, singleThread=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) """ - :param integer threadId: Execute 'reverseContinue' for this thread. + :param integer threadId: Specifies the active thread. If the debug adapter supports single thread execution (see 'supportsSingleThreadExecutionRequests') and the optional argument 'singleThread' is true, only the thread with this ID is resumed. + :param boolean singleThread: If this optional flag is true, backward execution is resumed only for the thread with given 'threadId'. """ self.threadId = threadId + self.singleThread = singleThread if update_ids_from_dap: self.threadId = self._translate_id_from_dap(self.threadId) self.kwargs = kwargs @@ -7031,12 +7272,15 @@ def update_dict_ids_from_dap(cls, dct): def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be unused) threadId = self.threadId + singleThread = self.singleThread if update_ids_to_dap: if threadId is not None: threadId = self._translate_id_to_dap(threadId) dct = { 'threadId': threadId, } + if singleThread is not None: + dct['singleThread'] = singleThread dct.update(self.kwargs) return dct @@ -8634,6 +8878,9 @@ class SetVariableRequest(BaseSchema): """ Set the variable with the given name in the variable container to a new value. Clients should only call this request if the capability 'supportsSetVariable' is true. + + If a debug adapter implements both setVariable and setExpression, a client will only use + setExpression if the variable has an evaluateName property. Note: automatically generated code. Do not edit manually. """ @@ -10269,6 +10516,9 @@ class SetExpressionRequest(BaseSchema): The expressions have access to any variables and arguments that are in scope of the specified frame. Clients should only call this request if the capability 'supportsSetExpression' is true. + + If a debug adapter implements both setExpression and setVariable, a client will only use + setExpression if the variable has an evaluateName property. Note: automatically generated code. Do not edit manually. """ @@ -11746,6 +11996,246 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un return dct +@register_request('writeMemory') +@register +class WriteMemoryRequest(BaseSchema): + """ + Writes bytes to memory at the provided location. + + Clients should only call this request if the capability 'supportsWriteMemoryRequest' is true. + + Note: automatically generated code. Do not edit manually. + """ + + __props__ = { + "seq": { + "type": "integer", + "description": "Sequence number (also known as message ID). For protocol messages of type 'request' this ID can be used to cancel the request." + }, + "type": { + "type": "string", + "enum": [ + "request" + ] + }, + "command": { + "type": "string", + "enum": [ + "writeMemory" + ] + }, + "arguments": { + "type": "WriteMemoryArguments" + } + } + __refs__ = set(['arguments']) + + __slots__ = list(__props__.keys()) + ['kwargs'] + + def __init__(self, arguments, seq=-1, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) + """ + :param string type: + :param string command: + :param WriteMemoryArguments arguments: + :param integer seq: Sequence number (also known as message ID). For protocol messages of type 'request' this ID can be used to cancel the request. + """ + self.type = 'request' + self.command = 'writeMemory' + if arguments is None: + self.arguments = WriteMemoryArguments() + else: + self.arguments = WriteMemoryArguments(update_ids_from_dap=update_ids_from_dap, **arguments) if arguments.__class__ != WriteMemoryArguments else arguments + self.seq = seq + self.kwargs = kwargs + + + def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be unused) + type = self.type # noqa (assign to builtin) + command = self.command + arguments = self.arguments + seq = self.seq + dct = { + 'type': type, + 'command': command, + 'arguments': arguments.to_dict(update_ids_to_dap=update_ids_to_dap), + 'seq': seq, + } + dct.update(self.kwargs) + return dct + + +@register +class WriteMemoryArguments(BaseSchema): + """ + Arguments for 'writeMemory' request. + + Note: automatically generated code. Do not edit manually. + """ + + __props__ = { + "memoryReference": { + "type": "string", + "description": "Memory reference to the base location to which data should be written." + }, + "offset": { + "type": "integer", + "description": "Optional offset (in bytes) to be applied to the reference location before writing data. Can be negative." + }, + "allowPartial": { + "type": "boolean", + "description": "Optional property to control partial writes. If true, the debug adapter should attempt to write memory even if the entire memory region is not writable. In such a case the debug adapter should stop after hitting the first byte of memory that cannot be written and return the number of bytes written in the response via the 'offset' and 'bytesWritten' properties.\nIf false or missing, a debug adapter should attempt to verify the region is writable before writing, and fail the response if it is not." + }, + "data": { + "type": "string", + "description": "Bytes to write, encoded using base64." + } + } + __refs__ = set() + + __slots__ = list(__props__.keys()) + ['kwargs'] + + def __init__(self, memoryReference, data, offset=None, allowPartial=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) + """ + :param string memoryReference: Memory reference to the base location to which data should be written. + :param string data: Bytes to write, encoded using base64. + :param integer offset: Optional offset (in bytes) to be applied to the reference location before writing data. Can be negative. + :param boolean allowPartial: Optional property to control partial writes. If true, the debug adapter should attempt to write memory even if the entire memory region is not writable. In such a case the debug adapter should stop after hitting the first byte of memory that cannot be written and return the number of bytes written in the response via the 'offset' and 'bytesWritten' properties. + If false or missing, a debug adapter should attempt to verify the region is writable before writing, and fail the response if it is not. + """ + self.memoryReference = memoryReference + self.data = data + self.offset = offset + self.allowPartial = allowPartial + self.kwargs = kwargs + + + def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be unused) + memoryReference = self.memoryReference + data = self.data + offset = self.offset + allowPartial = self.allowPartial + dct = { + 'memoryReference': memoryReference, + 'data': data, + } + if offset is not None: + dct['offset'] = offset + if allowPartial is not None: + dct['allowPartial'] = allowPartial + dct.update(self.kwargs) + return dct + + +@register_response('writeMemory') +@register +class WriteMemoryResponse(BaseSchema): + """ + Response to 'writeMemory' request. + + Note: automatically generated code. Do not edit manually. + """ + + __props__ = { + "seq": { + "type": "integer", + "description": "Sequence number (also known as message ID). For protocol messages of type 'request' this ID can be used to cancel the request." + }, + "type": { + "type": "string", + "enum": [ + "response" + ] + }, + "request_seq": { + "type": "integer", + "description": "Sequence number of the corresponding request." + }, + "success": { + "type": "boolean", + "description": "Outcome of the request.\nIf true, the request was successful and the 'body' attribute may contain the result of the request.\nIf the value is false, the attribute 'message' contains the error in short form and the 'body' may contain additional information (see 'ErrorResponse.body.error')." + }, + "command": { + "type": "string", + "description": "The command requested." + }, + "message": { + "type": "string", + "description": "Contains the raw error in short form if 'success' is false.\nThis raw error might be interpreted by the frontend and is not shown in the UI.\nSome predefined values exist.", + "_enum": [ + "cancelled" + ], + "enumDescriptions": [ + "request was cancelled." + ] + }, + "body": { + "type": "object", + "properties": { + "offset": { + "type": "integer", + "description": "Optional property that should be returned when 'allowPartial' is true to indicate the offset of the first byte of data successfully written. Can be negative." + }, + "bytesWritten": { + "type": "integer", + "description": "Optional property that should be returned when 'allowPartial' is true to indicate the number of bytes starting from address that were successfully written." + } + } + } + } + __refs__ = set(['body']) + + __slots__ = list(__props__.keys()) + ['kwargs'] + + def __init__(self, request_seq, success, command, seq=-1, message=None, body=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) + """ + :param string type: + :param integer request_seq: Sequence number of the corresponding request. + :param boolean success: Outcome of the request. + If true, the request was successful and the 'body' attribute may contain the result of the request. + If the value is false, the attribute 'message' contains the error in short form and the 'body' may contain additional information (see 'ErrorResponse.body.error'). + :param string command: The command requested. + :param integer seq: Sequence number (also known as message ID). For protocol messages of type 'request' this ID can be used to cancel the request. + :param string message: Contains the raw error in short form if 'success' is false. + This raw error might be interpreted by the frontend and is not shown in the UI. + Some predefined values exist. + :param WriteMemoryResponseBody body: + """ + self.type = 'response' + self.request_seq = request_seq + self.success = success + self.command = command + self.seq = seq + self.message = message + if body is None: + self.body = WriteMemoryResponseBody() + else: + self.body = WriteMemoryResponseBody(update_ids_from_dap=update_ids_from_dap, **body) if body.__class__ != WriteMemoryResponseBody else body + self.kwargs = kwargs + + + def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be unused) + type = self.type # noqa (assign to builtin) + request_seq = self.request_seq + success = self.success + command = self.command + seq = self.seq + message = self.message + body = self.body + dct = { + 'type': type, + 'request_seq': request_seq, + 'success': success, + 'command': command, + 'seq': seq, + } + if message is not None: + dct['message'] = message + if body is not None: + dct['body'] = body.to_dict(update_ids_to_dap=update_ids_to_dap) + dct.update(self.kwargs) + return dct + + @register_request('disassemble') @register class DisassembleRequest(BaseSchema): @@ -12102,6 +12592,10 @@ class Capabilities(BaseSchema): "type": "boolean", "description": "The debug adapter supports the 'terminateDebuggee' attribute on the 'disconnect' request." }, + "supportSuspendDebuggee": { + "type": "boolean", + "description": "The debug adapter supports the 'suspendDebuggee' attribute on the 'disconnect' request." + }, "supportsDelayedStackTraceLoading": { "type": "boolean", "description": "The debug adapter supports the delayed loading of parts of the stack, which requires that both the 'startFrame' and 'levels' arguments and an optional 'totalFrames' result of the 'StackTrace' request are supported." @@ -12134,6 +12628,10 @@ class Capabilities(BaseSchema): "type": "boolean", "description": "The debug adapter supports the 'readMemory' request." }, + "supportsWriteMemoryRequest": { + "type": "boolean", + "description": "The debug adapter supports the 'writeMemory' request." + }, "supportsDisassembleRequest": { "type": "boolean", "description": "The debug adapter supports the 'disassemble' request." @@ -12161,13 +12659,17 @@ class Capabilities(BaseSchema): "supportsExceptionFilterOptions": { "type": "boolean", "description": "The debug adapter supports 'filterOptions' as an argument on the 'setExceptionBreakpoints' request." + }, + "supportsSingleThreadExecutionRequests": { + "type": "boolean", + "description": "The debug adapter supports the 'singleThread' property on the execution requests ('continue', 'next', 'stepIn', 'stepOut', 'reverseContinue', 'stepBack')." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] - def __init__(self, supportsConfigurationDoneRequest=None, supportsFunctionBreakpoints=None, supportsConditionalBreakpoints=None, supportsHitConditionalBreakpoints=None, supportsEvaluateForHovers=None, exceptionBreakpointFilters=None, supportsStepBack=None, supportsSetVariable=None, supportsRestartFrame=None, supportsGotoTargetsRequest=None, supportsStepInTargetsRequest=None, supportsCompletionsRequest=None, completionTriggerCharacters=None, supportsModulesRequest=None, additionalModuleColumns=None, supportedChecksumAlgorithms=None, supportsRestartRequest=None, supportsExceptionOptions=None, supportsValueFormattingOptions=None, supportsExceptionInfoRequest=None, supportTerminateDebuggee=None, supportsDelayedStackTraceLoading=None, supportsLoadedSourcesRequest=None, supportsLogPoints=None, supportsTerminateThreadsRequest=None, supportsSetExpression=None, supportsTerminateRequest=None, supportsDataBreakpoints=None, supportsReadMemoryRequest=None, supportsDisassembleRequest=None, supportsCancelRequest=None, supportsBreakpointLocationsRequest=None, supportsClipboardContext=None, supportsSteppingGranularity=None, supportsInstructionBreakpoints=None, supportsExceptionFilterOptions=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) + def __init__(self, supportsConfigurationDoneRequest=None, supportsFunctionBreakpoints=None, supportsConditionalBreakpoints=None, supportsHitConditionalBreakpoints=None, supportsEvaluateForHovers=None, exceptionBreakpointFilters=None, supportsStepBack=None, supportsSetVariable=None, supportsRestartFrame=None, supportsGotoTargetsRequest=None, supportsStepInTargetsRequest=None, supportsCompletionsRequest=None, completionTriggerCharacters=None, supportsModulesRequest=None, additionalModuleColumns=None, supportedChecksumAlgorithms=None, supportsRestartRequest=None, supportsExceptionOptions=None, supportsValueFormattingOptions=None, supportsExceptionInfoRequest=None, supportTerminateDebuggee=None, supportSuspendDebuggee=None, supportsDelayedStackTraceLoading=None, supportsLoadedSourcesRequest=None, supportsLogPoints=None, supportsTerminateThreadsRequest=None, supportsSetExpression=None, supportsTerminateRequest=None, supportsDataBreakpoints=None, supportsReadMemoryRequest=None, supportsWriteMemoryRequest=None, supportsDisassembleRequest=None, supportsCancelRequest=None, supportsBreakpointLocationsRequest=None, supportsClipboardContext=None, supportsSteppingGranularity=None, supportsInstructionBreakpoints=None, supportsExceptionFilterOptions=None, supportsSingleThreadExecutionRequests=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) """ :param boolean supportsConfigurationDoneRequest: The debug adapter supports the 'configurationDone' request. :param boolean supportsFunctionBreakpoints: The debug adapter supports function breakpoints. @@ -12190,6 +12692,7 @@ def __init__(self, supportsConfigurationDoneRequest=None, supportsFunctionBreakp :param boolean supportsValueFormattingOptions: The debug adapter supports a 'format' attribute on the stackTraceRequest, variablesRequest, and evaluateRequest. :param boolean supportsExceptionInfoRequest: The debug adapter supports the 'exceptionInfo' request. :param boolean supportTerminateDebuggee: The debug adapter supports the 'terminateDebuggee' attribute on the 'disconnect' request. + :param boolean supportSuspendDebuggee: The debug adapter supports the 'suspendDebuggee' attribute on the 'disconnect' request. :param boolean supportsDelayedStackTraceLoading: The debug adapter supports the delayed loading of parts of the stack, which requires that both the 'startFrame' and 'levels' arguments and an optional 'totalFrames' result of the 'StackTrace' request are supported. :param boolean supportsLoadedSourcesRequest: The debug adapter supports the 'loadedSources' request. :param boolean supportsLogPoints: The debug adapter supports logpoints by interpreting the 'logMessage' attribute of the SourceBreakpoint. @@ -12198,6 +12701,7 @@ def __init__(self, supportsConfigurationDoneRequest=None, supportsFunctionBreakp :param boolean supportsTerminateRequest: The debug adapter supports the 'terminate' request. :param boolean supportsDataBreakpoints: The debug adapter supports data breakpoints. :param boolean supportsReadMemoryRequest: The debug adapter supports the 'readMemory' request. + :param boolean supportsWriteMemoryRequest: The debug adapter supports the 'writeMemory' request. :param boolean supportsDisassembleRequest: The debug adapter supports the 'disassemble' request. :param boolean supportsCancelRequest: The debug adapter supports the 'cancel' request. :param boolean supportsBreakpointLocationsRequest: The debug adapter supports the 'breakpointLocations' request. @@ -12205,6 +12709,7 @@ def __init__(self, supportsConfigurationDoneRequest=None, supportsFunctionBreakp :param boolean supportsSteppingGranularity: The debug adapter supports stepping granularities (argument 'granularity') for the stepping requests. :param boolean supportsInstructionBreakpoints: The debug adapter supports adding breakpoints based on instruction references. :param boolean supportsExceptionFilterOptions: The debug adapter supports 'filterOptions' as an argument on the 'setExceptionBreakpoints' request. + :param boolean supportsSingleThreadExecutionRequests: The debug adapter supports the 'singleThread' property on the execution requests ('continue', 'next', 'stepIn', 'stepOut', 'reverseContinue', 'stepBack'). """ self.supportsConfigurationDoneRequest = supportsConfigurationDoneRequest self.supportsFunctionBreakpoints = supportsFunctionBreakpoints @@ -12236,6 +12741,7 @@ def __init__(self, supportsConfigurationDoneRequest=None, supportsFunctionBreakp self.supportsValueFormattingOptions = supportsValueFormattingOptions self.supportsExceptionInfoRequest = supportsExceptionInfoRequest self.supportTerminateDebuggee = supportTerminateDebuggee + self.supportSuspendDebuggee = supportSuspendDebuggee self.supportsDelayedStackTraceLoading = supportsDelayedStackTraceLoading self.supportsLoadedSourcesRequest = supportsLoadedSourcesRequest self.supportsLogPoints = supportsLogPoints @@ -12244,6 +12750,7 @@ def __init__(self, supportsConfigurationDoneRequest=None, supportsFunctionBreakp self.supportsTerminateRequest = supportsTerminateRequest self.supportsDataBreakpoints = supportsDataBreakpoints self.supportsReadMemoryRequest = supportsReadMemoryRequest + self.supportsWriteMemoryRequest = supportsWriteMemoryRequest self.supportsDisassembleRequest = supportsDisassembleRequest self.supportsCancelRequest = supportsCancelRequest self.supportsBreakpointLocationsRequest = supportsBreakpointLocationsRequest @@ -12251,6 +12758,7 @@ def __init__(self, supportsConfigurationDoneRequest=None, supportsFunctionBreakp self.supportsSteppingGranularity = supportsSteppingGranularity self.supportsInstructionBreakpoints = supportsInstructionBreakpoints self.supportsExceptionFilterOptions = supportsExceptionFilterOptions + self.supportsSingleThreadExecutionRequests = supportsSingleThreadExecutionRequests self.kwargs = kwargs @@ -12284,6 +12792,7 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un supportsValueFormattingOptions = self.supportsValueFormattingOptions supportsExceptionInfoRequest = self.supportsExceptionInfoRequest supportTerminateDebuggee = self.supportTerminateDebuggee + supportSuspendDebuggee = self.supportSuspendDebuggee supportsDelayedStackTraceLoading = self.supportsDelayedStackTraceLoading supportsLoadedSourcesRequest = self.supportsLoadedSourcesRequest supportsLogPoints = self.supportsLogPoints @@ -12292,6 +12801,7 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un supportsTerminateRequest = self.supportsTerminateRequest supportsDataBreakpoints = self.supportsDataBreakpoints supportsReadMemoryRequest = self.supportsReadMemoryRequest + supportsWriteMemoryRequest = self.supportsWriteMemoryRequest supportsDisassembleRequest = self.supportsDisassembleRequest supportsCancelRequest = self.supportsCancelRequest supportsBreakpointLocationsRequest = self.supportsBreakpointLocationsRequest @@ -12299,6 +12809,7 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un supportsSteppingGranularity = self.supportsSteppingGranularity supportsInstructionBreakpoints = self.supportsInstructionBreakpoints supportsExceptionFilterOptions = self.supportsExceptionFilterOptions + supportsSingleThreadExecutionRequests = self.supportsSingleThreadExecutionRequests dct = { } if supportsConfigurationDoneRequest is not None: @@ -12343,6 +12854,8 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un dct['supportsExceptionInfoRequest'] = supportsExceptionInfoRequest if supportTerminateDebuggee is not None: dct['supportTerminateDebuggee'] = supportTerminateDebuggee + if supportSuspendDebuggee is not None: + dct['supportSuspendDebuggee'] = supportSuspendDebuggee if supportsDelayedStackTraceLoading is not None: dct['supportsDelayedStackTraceLoading'] = supportsDelayedStackTraceLoading if supportsLoadedSourcesRequest is not None: @@ -12359,6 +12872,8 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un dct['supportsDataBreakpoints'] = supportsDataBreakpoints if supportsReadMemoryRequest is not None: dct['supportsReadMemoryRequest'] = supportsReadMemoryRequest + if supportsWriteMemoryRequest is not None: + dct['supportsWriteMemoryRequest'] = supportsWriteMemoryRequest if supportsDisassembleRequest is not None: dct['supportsDisassembleRequest'] = supportsDisassembleRequest if supportsCancelRequest is not None: @@ -12373,6 +12888,8 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un dct['supportsInstructionBreakpoints'] = supportsInstructionBreakpoints if supportsExceptionFilterOptions is not None: dct['supportsExceptionFilterOptions'] = supportsExceptionFilterOptions + if supportsSingleThreadExecutionRequests is not None: + dct['supportsSingleThreadExecutionRequests'] = supportsSingleThreadExecutionRequests dct.update(self.kwargs) return dct @@ -12395,6 +12912,10 @@ class ExceptionBreakpointsFilter(BaseSchema): "type": "string", "description": "The name of the filter option. This will be shown in the UI." }, + "description": { + "type": "string", + "description": "An optional help text providing additional information about the exception filter. This string is typically shown as a hover and must be translated." + }, "default": { "type": "boolean", "description": "Initial value of the filter option. If not specified a value 'false' is assumed." @@ -12402,39 +12923,53 @@ class ExceptionBreakpointsFilter(BaseSchema): "supportsCondition": { "type": "boolean", "description": "Controls whether a condition can be specified for this filter option. If false or missing, a condition can not be set." + }, + "conditionDescription": { + "type": "string", + "description": "An optional help text providing information about the condition. This string is shown as the placeholder text for a text box and must be translated." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] - def __init__(self, filter, label, default=None, supportsCondition=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) + def __init__(self, filter, label, description=None, default=None, supportsCondition=None, conditionDescription=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) """ :param string filter: The internal ID of the filter option. This value is passed to the 'setExceptionBreakpoints' request. :param string label: The name of the filter option. This will be shown in the UI. + :param string description: An optional help text providing additional information about the exception filter. This string is typically shown as a hover and must be translated. :param boolean default: Initial value of the filter option. If not specified a value 'false' is assumed. :param boolean supportsCondition: Controls whether a condition can be specified for this filter option. If false or missing, a condition can not be set. + :param string conditionDescription: An optional help text providing information about the condition. This string is shown as the placeholder text for a text box and must be translated. """ self.filter = filter self.label = label + self.description = description self.default = default self.supportsCondition = supportsCondition + self.conditionDescription = conditionDescription self.kwargs = kwargs def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be unused) filter = self.filter # noqa (assign to builtin) label = self.label + description = self.description default = self.default supportsCondition = self.supportsCondition + conditionDescription = self.conditionDescription dct = { 'filter': filter, 'label': label, } + if description is not None: + dct['description'] = description if default is not None: dct['default'] = default if supportsCondition is not None: dct['supportsCondition'] = supportsCondition + if conditionDescription is not None: + dct['conditionDescription'] = conditionDescription dct.update(self.kwargs) return dct @@ -13032,6 +13567,10 @@ class StackFrame(BaseSchema): "type": "integer", "description": "An optional end column of the range covered by the stack frame." }, + "canRestart": { + "type": "boolean", + "description": "Indicates whether this frame can be restarted with the 'restart' request. Clients should only use this if the debug adapter supports the 'restart' request (capability 'supportsRestartRequest' is true)." + }, "instructionPointerReference": { "type": "string", "description": "Optional memory reference for the current instruction pointer in this frame." @@ -13057,7 +13596,7 @@ class StackFrame(BaseSchema): __slots__ = list(__props__.keys()) + ['kwargs'] - def __init__(self, id, name, line, column, source=None, endLine=None, endColumn=None, instructionPointerReference=None, moduleId=None, presentationHint=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) + def __init__(self, id, name, line, column, source=None, endLine=None, endColumn=None, canRestart=None, instructionPointerReference=None, moduleId=None, presentationHint=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) """ :param integer id: An identifier for the stack frame. It must be unique across all threads. This id can be used to retrieve the scopes of the frame with the 'scopesRequest' or to restart the execution of a stackframe. @@ -13067,6 +13606,7 @@ def __init__(self, id, name, line, column, source=None, endLine=None, endColumn= :param Source source: The optional source of the frame. :param integer endLine: An optional end line of the range covered by the stack frame. :param integer endColumn: An optional end column of the range covered by the stack frame. + :param boolean canRestart: Indicates whether this frame can be restarted with the 'restart' request. Clients should only use this if the debug adapter supports the 'restart' request (capability 'supportsRestartRequest' is true). :param string instructionPointerReference: Optional memory reference for the current instruction pointer in this frame. :param ['integer', 'string'] moduleId: The module associated with this frame, if any. :param string presentationHint: An optional hint for how to present this frame in the UI. @@ -13082,6 +13622,7 @@ def __init__(self, id, name, line, column, source=None, endLine=None, endColumn= self.source = Source(update_ids_from_dap=update_ids_from_dap, **source) if source.__class__ != Source else source self.endLine = endLine self.endColumn = endColumn + self.canRestart = canRestart self.instructionPointerReference = instructionPointerReference self.moduleId = moduleId self.presentationHint = presentationHint @@ -13104,6 +13645,7 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un source = self.source endLine = self.endLine endColumn = self.endColumn + canRestart = self.canRestart instructionPointerReference = self.instructionPointerReference moduleId = self.moduleId presentationHint = self.presentationHint @@ -13122,6 +13664,8 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un dct['endLine'] = endLine if endColumn is not None: dct['endColumn'] = endColumn + if canRestart is not None: + dct['canRestart'] = canRestart if instructionPointerReference is not None: dct['instructionPointerReference'] = instructionPointerReference if moduleId is not None: @@ -13475,7 +14019,7 @@ class VariablePresentationHint(BaseSchema): "Indicates that the object is an interface.", "Indicates that the object is the most derived class.", "Indicates that the object is virtual, that means it is a synthetic object introducedby the\nadapter for rendering purposes, e.g. an index range for large arrays.", - "Indicates that a data breakpoint is registered for the object." + "Deprecated: Indicates that a data breakpoint is registered for the object. The 'hasDataBreakpoint' attribute should generally be used instead." ] }, "attributes": { @@ -13490,7 +14034,8 @@ class VariablePresentationHint(BaseSchema): "rawString", "hasObjectId", "canHaveObjectId", - "hasSideEffects" + "hasSideEffects", + "hasDataBreakpoint" ], "enumDescriptions": [ "Indicates that the object is static.", @@ -13499,7 +14044,8 @@ class VariablePresentationHint(BaseSchema): "Indicates that the object is a raw string.", "Indicates that the object can have an Object ID created for it.", "Indicates that the object has an Object ID associated with it.", - "Indicates that the evaluation had side effects." + "Indicates that the evaluation had side effects.", + "Indicates that the object has its value tracked by a data breakpoint." ] } }, @@ -16395,13 +16941,20 @@ class StoppedEventBody(BaseSchema): "allThreadsStopped": { "type": "boolean", "description": "If 'allThreadsStopped' is true, a debug adapter can announce that all threads have stopped.\n- The client should use this information to enable that all threads can be expanded to access their stacktraces.\n- If the attribute is missing or false, only the thread with the given threadId can be expanded." + }, + "hitBreakpointIds": { + "type": "array", + "items": { + "type": "integer" + }, + "description": "Ids of the breakpoints that triggered the event. In most cases there will be only a single breakpoint but here are some examples for multiple breakpoints:\n- Different types of breakpoints map to the same location.\n- Multiple source breakpoints get collapsed to the same instruction by the compiler/runtime.\n- Multiple function breakpoints with different function names map to the same location." } } __refs__ = set() __slots__ = list(__props__.keys()) + ['kwargs'] - def __init__(self, reason, description=None, threadId=None, preserveFocusHint=None, text=None, allThreadsStopped=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) + def __init__(self, reason, description=None, threadId=None, preserveFocusHint=None, text=None, allThreadsStopped=None, hitBreakpointIds=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) """ :param string reason: The reason for the event. For backward compatibility this string is shown in the UI if the 'description' attribute is missing (but it must not be translated). @@ -16412,6 +16965,10 @@ def __init__(self, reason, description=None, threadId=None, preserveFocusHint=No :param boolean allThreadsStopped: If 'allThreadsStopped' is true, a debug adapter can announce that all threads have stopped. - The client should use this information to enable that all threads can be expanded to access their stacktraces. - If the attribute is missing or false, only the thread with the given threadId can be expanded. + :param array hitBreakpointIds: Ids of the breakpoints that triggered the event. In most cases there will be only a single breakpoint but here are some examples for multiple breakpoints: + - Different types of breakpoints map to the same location. + - Multiple source breakpoints get collapsed to the same instruction by the compiler/runtime. + - Multiple function breakpoints with different function names map to the same location. """ self.reason = reason self.description = description @@ -16419,6 +16976,7 @@ def __init__(self, reason, description=None, threadId=None, preserveFocusHint=No self.preserveFocusHint = preserveFocusHint self.text = text self.allThreadsStopped = allThreadsStopped + self.hitBreakpointIds = hitBreakpointIds if update_ids_from_dap: self.threadId = self._translate_id_from_dap(self.threadId) self.kwargs = kwargs @@ -16437,6 +16995,9 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un preserveFocusHint = self.preserveFocusHint text = self.text allThreadsStopped = self.allThreadsStopped + hitBreakpointIds = self.hitBreakpointIds + if hitBreakpointIds and hasattr(hitBreakpointIds[0], "to_dict"): + hitBreakpointIds = [x.to_dict() for x in hitBreakpointIds] if update_ids_to_dap: if threadId is not None: threadId = self._translate_id_to_dap(threadId) @@ -16453,6 +17014,8 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un dct['text'] = text if allThreadsStopped is not None: dct['allThreadsStopped'] = allThreadsStopped + if hitBreakpointIds is not None: + dct['hitBreakpointIds'] = hitBreakpointIds dct.update(self.kwargs) return dct @@ -16679,12 +17242,20 @@ class OutputEventBody(BaseSchema): __props__ = { "category": { "type": "string", - "description": "The output category. If not specified, 'console' is assumed.", + "description": "The output category. If not specified or if the category is not understand by the client, 'console' is assumed.", "_enum": [ "console", + "important", "stdout", "stderr", "telemetry" + ], + "enumDescriptions": [ + "Show the output in the client's default message UI, e.g. a 'debug console'. This category should only be used for informational output from the debugger (as opposed to the debuggee).", + "A hint for the client to show the ouput in the client's UI for important and highly visible information, e.g. as a popup notification. This category should only be used for important messages from the debugger (as opposed to the debuggee). Since this category value is a hint, clients might ignore the hint and assume the 'console' category.", + "Show the output as normal program output from the debuggee.", + "Show the output as error program output from the debuggee.", + "Send the output to telemetry instead of showing it to the user." ] }, "output": { @@ -16741,7 +17312,7 @@ class OutputEventBody(BaseSchema): def __init__(self, output, category=None, group=None, variablesReference=None, source=None, line=None, column=None, data=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) """ :param string output: The output to report. - :param string category: The output category. If not specified, 'console' is assumed. + :param string category: The output category. If not specified or if the category is not understand by the client, 'console' is assumed. :param string group: Support for keeping an output log organized by grouping related messages. :param integer variablesReference: If an attribute 'variablesReference' exists and its value is > 0, the output contains objects which can be retrieved by passing 'variablesReference' to the 'variables' request. The value should be less than or equal to 2147483647 (2^31-1). :param Source source: An optional source location where the output was produced. @@ -17342,6 +17913,57 @@ def update_dict_ids_to_dap(cls, dct): return dct +@register +class MemoryEventBody(BaseSchema): + """ + "body" of MemoryEvent + + Note: automatically generated code. Do not edit manually. + """ + + __props__ = { + "memoryReference": { + "type": "string", + "description": "Memory reference of a memory range that has been updated." + }, + "offset": { + "type": "integer", + "description": "Starting offset in bytes where memory has been updated. Can be negative." + }, + "count": { + "type": "integer", + "description": "Number of bytes updated." + } + } + __refs__ = set() + + __slots__ = list(__props__.keys()) + ['kwargs'] + + def __init__(self, memoryReference, offset, count, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) + """ + :param string memoryReference: Memory reference of a memory range that has been updated. + :param integer offset: Starting offset in bytes where memory has been updated. Can be negative. + :param integer count: Number of bytes updated. + """ + self.memoryReference = memoryReference + self.offset = offset + self.count = count + self.kwargs = kwargs + + + def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be unused) + memoryReference = self.memoryReference + offset = self.offset + count = self.count + dct = { + 'memoryReference': memoryReference, + 'offset': offset, + 'count': count, + } + dct.update(self.kwargs) + return dct + + @register class RunInTerminalRequestArgumentsEnv(BaseSchema): """ @@ -17545,6 +18167,51 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un return dct +@register +class SetExceptionBreakpointsResponseBody(BaseSchema): + """ + "body" of SetExceptionBreakpointsResponse + + Note: automatically generated code. Do not edit manually. + """ + + __props__ = { + "breakpoints": { + "type": "array", + "items": { + "$ref": "#/definitions/Breakpoint" + }, + "description": "Information about the exception breakpoints or filters.\nThe breakpoints returned are in the same order as the elements of the 'filters', 'filterOptions', 'exceptionOptions' arrays in the arguments. If both 'filters' and 'filterOptions' are given, the returned array must start with 'filters' information first, followed by 'filterOptions' information." + } + } + __refs__ = set() + + __slots__ = list(__props__.keys()) + ['kwargs'] + + def __init__(self, breakpoints=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) + """ + :param array breakpoints: Information about the exception breakpoints or filters. + The breakpoints returned are in the same order as the elements of the 'filters', 'filterOptions', 'exceptionOptions' arrays in the arguments. If both 'filters' and 'filterOptions' are given, the returned array must start with 'filters' information first, followed by 'filterOptions' information. + """ + self.breakpoints = breakpoints + if update_ids_from_dap and self.breakpoints: + for o in self.breakpoints: + Breakpoint.update_dict_ids_from_dap(o) + self.kwargs = kwargs + + + def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be unused) + breakpoints = self.breakpoints + if breakpoints and hasattr(breakpoints[0], "to_dict"): + breakpoints = [x.to_dict() for x in breakpoints] + dct = { + } + if breakpoints is not None: + dct['breakpoints'] = [Breakpoint.update_dict_ids_to_dap(o) for o in breakpoints] if (update_ids_to_dap and breakpoints) else breakpoints + dct.update(self.kwargs) + return dct + + @register class DataBreakpointInfoResponseBody(BaseSchema): """ @@ -17714,7 +18381,7 @@ class ContinueResponseBody(BaseSchema): __props__ = { "allThreadsContinued": { "type": "boolean", - "description": "If true, the 'continue' request has ignored the specified thread and continued all threads instead.\nIf this attribute is missing a value of 'true' is assumed for backward compatibility." + "description": "The value true (or a missing property) signals to the client that all threads have been resumed. The value false must be returned if not all threads were resumed." } } __refs__ = set() @@ -17723,8 +18390,7 @@ class ContinueResponseBody(BaseSchema): def __init__(self, allThreadsContinued=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) """ - :param boolean allThreadsContinued: If true, the 'continue' request has ignored the specified thread and continued all threads instead. - If this attribute is missing a value of 'true' is assumed for backward compatibility. + :param boolean allThreadsContinued: The value true (or a missing property) signals to the client that all threads have been resumed. The value false must be returned if not all threads were resumed. """ self.allThreadsContinued = allThreadsContinued self.kwargs = kwargs @@ -18626,6 +19292,51 @@ def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be un return dct +@register +class WriteMemoryResponseBody(BaseSchema): + """ + "body" of WriteMemoryResponse + + Note: automatically generated code. Do not edit manually. + """ + + __props__ = { + "offset": { + "type": "integer", + "description": "Optional property that should be returned when 'allowPartial' is true to indicate the offset of the first byte of data successfully written. Can be negative." + }, + "bytesWritten": { + "type": "integer", + "description": "Optional property that should be returned when 'allowPartial' is true to indicate the number of bytes starting from address that were successfully written." + } + } + __refs__ = set() + + __slots__ = list(__props__.keys()) + ['kwargs'] + + def __init__(self, offset=None, bytesWritten=None, update_ids_from_dap=False, **kwargs): # noqa (update_ids_from_dap may be unused) + """ + :param integer offset: Optional property that should be returned when 'allowPartial' is true to indicate the offset of the first byte of data successfully written. Can be negative. + :param integer bytesWritten: Optional property that should be returned when 'allowPartial' is true to indicate the number of bytes starting from address that were successfully written. + """ + self.offset = offset + self.bytesWritten = bytesWritten + self.kwargs = kwargs + + + def to_dict(self, update_ids_to_dap=False): # noqa (update_ids_to_dap may be unused) + offset = self.offset + bytesWritten = self.bytesWritten + dct = { + } + if offset is not None: + dct['offset'] = offset + if bytesWritten is not None: + dct['bytesWritten'] = bytesWritten + dct.update(self.kwargs) + return dct + + @register class DisassembleResponseBody(BaseSchema): """ diff --git a/src/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.c b/src/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.c index 4e553a6b8..3dad57bd5 100644 --- a/src/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.c +++ b/src/debugpy/_vendored/pydevd/_pydevd_bundle/pydevd_cython.c @@ -1,4 +1,4 @@ -/* Generated by Cython 0.29.24 */ +/* Generated by Cython 0.29.26 */ /* BEGIN: Cython Metadata { @@ -27,8 +27,8 @@ END: Cython Metadata */ #elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) #error Cython requires Python 2.6+ or Python 3.3+. #else -#define CYTHON_ABI "0_29_24" -#define CYTHON_HEX_VERSION 0x001D18F0 +#define CYTHON_ABI "0_29_26" +#define CYTHON_HEX_VERSION 0x001D1AF0 #define CYTHON_FUTURE_DIVISION 0 #include #ifndef offsetof @@ -175,7 +175,7 @@ END: Cython Metadata */ #ifndef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 1 #endif - #if PY_VERSION_HEX < 0x030300F0 + #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2 #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #elif !defined(CYTHON_USE_UNICODE_WRITER) @@ -194,7 +194,7 @@ END: Cython Metadata */ #define CYTHON_FAST_THREAD_STATE 1 #endif #ifndef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 1 + #define CYTHON_FAST_PYCALL (PY_VERSION_HEX < 0x030B00A1) #endif #ifndef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) @@ -213,7 +213,9 @@ END: Cython Metadata */ #define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) #endif #if CYTHON_USE_PYLONG_INTERNALS - #include "longintrepr.h" + #if PY_MAJOR_VERSION < 3 + #include "longintrepr.h" + #endif #undef SHIFT #undef BASE #undef MASK @@ -330,9 +332,68 @@ END: Cython Metadata */ #define __Pyx_DefaultClassType PyClass_Type #else #define __Pyx_BUILTIN_MODULE_NAME "builtins" -#if PY_VERSION_HEX >= 0x030800A4 && PY_VERSION_HEX < 0x030800B2 - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a, 0, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyType_Type +#if PY_VERSION_HEX >= 0x030B00A1 + static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int k, int l, int s, int f, + PyObject *code, PyObject *c, PyObject* n, PyObject *v, + PyObject *fv, PyObject *cell, PyObject* fn, + PyObject *name, int fline, PyObject *lnos) { + PyObject *kwds=NULL, *argcount=NULL, *posonlyargcount=NULL, *kwonlyargcount=NULL; + PyObject *nlocals=NULL, *stacksize=NULL, *flags=NULL, *replace=NULL, *call_result=NULL, *empty=NULL; + const char *fn_cstr=NULL; + const char *name_cstr=NULL; + PyCodeObject* co=NULL; + PyObject *type, *value, *traceback; + PyErr_Fetch(&type, &value, &traceback); + if (!(kwds=PyDict_New())) goto end; + if (!(argcount=PyLong_FromLong(a))) goto end; + if (PyDict_SetItemString(kwds, "co_argcount", argcount) != 0) goto end; + if (!(posonlyargcount=PyLong_FromLong(0))) goto end; + if (PyDict_SetItemString(kwds, "co_posonlyargcount", posonlyargcount) != 0) goto end; + if (!(kwonlyargcount=PyLong_FromLong(k))) goto end; + if (PyDict_SetItemString(kwds, "co_kwonlyargcount", kwonlyargcount) != 0) goto end; + if (!(nlocals=PyLong_FromLong(l))) goto end; + if (PyDict_SetItemString(kwds, "co_nlocals", nlocals) != 0) goto end; + if (!(stacksize=PyLong_FromLong(s))) goto end; + if (PyDict_SetItemString(kwds, "co_stacksize", stacksize) != 0) goto end; + if (!(flags=PyLong_FromLong(f))) goto end; + if (PyDict_SetItemString(kwds, "co_flags", flags) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_code", code) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_consts", c) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_names", n) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_varnames", v) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_freevars", fv) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_cellvars", cell) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_linetable", lnos) != 0) goto end; + if (!(fn_cstr=PyUnicode_AsUTF8AndSize(fn, NULL))) goto end; + if (!(name_cstr=PyUnicode_AsUTF8AndSize(name, NULL))) goto end; + if (!(co = PyCode_NewEmpty(fn_cstr, name_cstr, fline))) goto end; + if (!(replace = PyObject_GetAttrString((PyObject*)co, "replace"))) goto cleanup_code_too; + if (!(empty = PyTuple_New(0))) goto cleanup_code_too; // unfortunately __pyx_empty_tuple isn't available here + if (!(call_result = PyObject_Call(replace, empty, kwds))) goto cleanup_code_too; + Py_XDECREF((PyObject*)co); + co = (PyCodeObject*)call_result; + call_result = NULL; + if (0) { + cleanup_code_too: + Py_XDECREF((PyObject*)co); + co = NULL; + } + end: + Py_XDECREF(kwds); + Py_XDECREF(argcount); + Py_XDECREF(posonlyargcount); + Py_XDECREF(kwonlyargcount); + Py_XDECREF(nlocals); + Py_XDECREF(stacksize); + Py_XDECREF(replace); + Py_XDECREF(call_result); + Py_XDECREF(empty); + if (type) { + PyErr_Restore(type, value, traceback); + } + return co; + } #else #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) @@ -570,10 +631,10 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { #if PY_VERSION_HEX < 0x030200A4 typedef long Py_hash_t; #define __Pyx_PyInt_FromHash_t PyInt_FromLong - #define __Pyx_PyInt_AsHash_t PyInt_AsLong + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t #else #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t - #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t #endif #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func)) @@ -731,6 +792,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); #if CYTHON_ASSUME_SAFE_MACROS #define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) #else @@ -1141,6 +1203,7 @@ static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, #ifndef Py_MEMBER_SIZE #define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) #endif +#if CYTHON_FAST_PYCALL static size_t __pyx_pyframe_localsplus_offset = 0; #include "frameobject.h" #define __Pxy_PyFrame_Initialize_Offsets()\ @@ -1148,6 +1211,7 @@ static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) #define __Pyx_PyFrame_GetLocalsplus(frame)\ (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) +#endif // CYTHON_FAST_PYCALL #endif /* PyObjectCall.proto */ @@ -35198,6 +35262,9 @@ static PyTypeObject __pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalTh #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 0, /*tp_print*/ #endif + #if CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM+0 >= 0x06000000 + 0, /*tp_pypy_flags*/ + #endif }; static PyObject *__pyx_tp_new_14_pydevd_bundle_13pydevd_cython__TryExceptContainerObj(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { @@ -35335,6 +35402,9 @@ static PyTypeObject __pyx_type_14_pydevd_bundle_13pydevd_cython__TryExceptContai #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 0, /*tp_print*/ #endif + #if CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM+0 >= 0x06000000 + 0, /*tp_pypy_flags*/ + #endif }; static struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame __pyx_vtable_14_pydevd_bundle_13pydevd_cython_PyDBFrame; @@ -35469,6 +35539,9 @@ static PyTypeObject __pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBFrame = { #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 0, /*tp_print*/ #endif + #if CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM+0 >= 0x06000000 + 0, /*tp_pypy_flags*/ + #endif }; static PyObject *__pyx_tp_new_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { @@ -35589,6 +35662,9 @@ static PyTypeObject __pyx_type_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 0, /*tp_print*/ #endif + #if CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM+0 >= 0x06000000 + 0, /*tp_pypy_flags*/ + #endif }; static PyObject *__pyx_tp_new_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerOnlyUnhandledExceptions(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { @@ -35728,6 +35804,9 @@ static PyTypeObject __pyx_type_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTr #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 0, /*tp_print*/ #endif + #if CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM+0 >= 0x06000000 + 0, /*tp_pypy_flags*/ + #endif }; static PyObject *__pyx_tp_new_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTracerNoBackFrame(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { @@ -35970,6 +36049,9 @@ static PyTypeObject __pyx_type_14_pydevd_bundle_13pydevd_cython_TopLevelThreadTr #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 0, /*tp_print*/ #endif + #if CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM+0 >= 0x06000000 + 0, /*tp_pypy_flags*/ + #endif }; static PyObject *__pyx_tp_new_14_pydevd_bundle_13pydevd_cython_ThreadTracer(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { @@ -36107,6 +36189,9 @@ static PyTypeObject __pyx_type_14_pydevd_bundle_13pydevd_cython_ThreadTracer = { #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 0, /*tp_print*/ #endif + #if CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM+0 >= 0x06000000 + 0, /*tp_pypy_flags*/ + #endif }; static PyMethodDef __pyx_methods[] = { @@ -40764,7 +40849,7 @@ static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int } if (!use_cline) { c_line = 0; - PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + (void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); } else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { c_line = 0; @@ -40861,30 +40946,31 @@ static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( const char *funcname, int c_line, int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyObject *py_srcfile = 0; - PyObject *py_funcname = 0; + PyCodeObject *py_code = NULL; + PyObject *py_funcname = NULL; #if PY_MAJOR_VERSION < 3 + PyObject *py_srcfile = NULL; py_srcfile = PyString_FromString(filename); - #else - py_srcfile = PyUnicode_FromString(filename); - #endif if (!py_srcfile) goto bad; + #endif if (c_line) { #if PY_MAJOR_VERSION < 3 py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; #else py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; + funcname = PyUnicode_AsUTF8(py_funcname); + if (!funcname) goto bad; #endif } else { #if PY_MAJOR_VERSION < 3 py_funcname = PyString_FromString(funcname); - #else - py_funcname = PyUnicode_FromString(funcname); + if (!py_funcname) goto bad; #endif } - if (!py_funcname) goto bad; + #if PY_MAJOR_VERSION < 3 py_code = __Pyx_PyCode_New( 0, 0, @@ -40903,11 +40989,16 @@ static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( __pyx_empty_bytes /*PyObject *lnotab*/ ); Py_DECREF(py_srcfile); - Py_DECREF(py_funcname); + #else + py_code = PyCode_NewEmpty(filename, funcname, py_line); + #endif + Py_XDECREF(py_funcname); // XDECREF since it's only set on Py3 if cline return py_code; bad: - Py_XDECREF(py_srcfile); Py_XDECREF(py_funcname); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_srcfile); + #endif return NULL; } static void __Pyx_AddTraceback(const char *funcname, int c_line, @@ -41793,6 +41884,23 @@ static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { Py_DECREF(x); return ival; } +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { + if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { + return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o); +#if PY_MAJOR_VERSION < 3 + } else if (likely(PyInt_CheckExact(o))) { + return PyInt_AS_LONG(o); +#endif + } else { + Py_ssize_t ival; + PyObject *x; + x = PyNumber_Index(o); + if (!x) return -1; + ival = PyInt_AsLong(x); + Py_DECREF(x); + return ival; + } +} static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); } diff --git a/src/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.c b/src/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.c index 1de9e69e6..d33a77fac 100644 --- a/src/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.c +++ b/src/debugpy/_vendored/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.c @@ -1,4 +1,4 @@ -/* Generated by Cython 0.29.24 */ +/* Generated by Cython 0.29.26 */ /* BEGIN: Cython Metadata { @@ -32,8 +32,8 @@ END: Cython Metadata */ #elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000) #error Cython requires Python 2.6+ or Python 3.3+. #else -#define CYTHON_ABI "0_29_24" -#define CYTHON_HEX_VERSION 0x001D18F0 +#define CYTHON_ABI "0_29_26" +#define CYTHON_HEX_VERSION 0x001D1AF0 #define CYTHON_FUTURE_DIVISION 0 #include #ifndef offsetof @@ -180,7 +180,7 @@ END: Cython Metadata */ #ifndef CYTHON_USE_UNICODE_INTERNALS #define CYTHON_USE_UNICODE_INTERNALS 1 #endif - #if PY_VERSION_HEX < 0x030300F0 + #if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2 #undef CYTHON_USE_UNICODE_WRITER #define CYTHON_USE_UNICODE_WRITER 0 #elif !defined(CYTHON_USE_UNICODE_WRITER) @@ -199,7 +199,7 @@ END: Cython Metadata */ #define CYTHON_FAST_THREAD_STATE 1 #endif #ifndef CYTHON_FAST_PYCALL - #define CYTHON_FAST_PYCALL 1 + #define CYTHON_FAST_PYCALL (PY_VERSION_HEX < 0x030B00A1) #endif #ifndef CYTHON_PEP489_MULTI_PHASE_INIT #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000) @@ -218,7 +218,9 @@ END: Cython Metadata */ #define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) #endif #if CYTHON_USE_PYLONG_INTERNALS - #include "longintrepr.h" + #if PY_MAJOR_VERSION < 3 + #include "longintrepr.h" + #endif #undef SHIFT #undef BASE #undef MASK @@ -335,9 +337,68 @@ END: Cython Metadata */ #define __Pyx_DefaultClassType PyClass_Type #else #define __Pyx_BUILTIN_MODULE_NAME "builtins" -#if PY_VERSION_HEX >= 0x030800A4 && PY_VERSION_HEX < 0x030800B2 - #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ - PyCode_New(a, 0, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyType_Type +#if PY_VERSION_HEX >= 0x030B00A1 + static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int k, int l, int s, int f, + PyObject *code, PyObject *c, PyObject* n, PyObject *v, + PyObject *fv, PyObject *cell, PyObject* fn, + PyObject *name, int fline, PyObject *lnos) { + PyObject *kwds=NULL, *argcount=NULL, *posonlyargcount=NULL, *kwonlyargcount=NULL; + PyObject *nlocals=NULL, *stacksize=NULL, *flags=NULL, *replace=NULL, *call_result=NULL, *empty=NULL; + const char *fn_cstr=NULL; + const char *name_cstr=NULL; + PyCodeObject* co=NULL; + PyObject *type, *value, *traceback; + PyErr_Fetch(&type, &value, &traceback); + if (!(kwds=PyDict_New())) goto end; + if (!(argcount=PyLong_FromLong(a))) goto end; + if (PyDict_SetItemString(kwds, "co_argcount", argcount) != 0) goto end; + if (!(posonlyargcount=PyLong_FromLong(0))) goto end; + if (PyDict_SetItemString(kwds, "co_posonlyargcount", posonlyargcount) != 0) goto end; + if (!(kwonlyargcount=PyLong_FromLong(k))) goto end; + if (PyDict_SetItemString(kwds, "co_kwonlyargcount", kwonlyargcount) != 0) goto end; + if (!(nlocals=PyLong_FromLong(l))) goto end; + if (PyDict_SetItemString(kwds, "co_nlocals", nlocals) != 0) goto end; + if (!(stacksize=PyLong_FromLong(s))) goto end; + if (PyDict_SetItemString(kwds, "co_stacksize", stacksize) != 0) goto end; + if (!(flags=PyLong_FromLong(f))) goto end; + if (PyDict_SetItemString(kwds, "co_flags", flags) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_code", code) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_consts", c) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_names", n) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_varnames", v) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_freevars", fv) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_cellvars", cell) != 0) goto end; + if (PyDict_SetItemString(kwds, "co_linetable", lnos) != 0) goto end; + if (!(fn_cstr=PyUnicode_AsUTF8AndSize(fn, NULL))) goto end; + if (!(name_cstr=PyUnicode_AsUTF8AndSize(name, NULL))) goto end; + if (!(co = PyCode_NewEmpty(fn_cstr, name_cstr, fline))) goto end; + if (!(replace = PyObject_GetAttrString((PyObject*)co, "replace"))) goto cleanup_code_too; + if (!(empty = PyTuple_New(0))) goto cleanup_code_too; // unfortunately __pyx_empty_tuple isn't available here + if (!(call_result = PyObject_Call(replace, empty, kwds))) goto cleanup_code_too; + Py_XDECREF((PyObject*)co); + co = (PyCodeObject*)call_result; + call_result = NULL; + if (0) { + cleanup_code_too: + Py_XDECREF((PyObject*)co); + co = NULL; + } + end: + Py_XDECREF(kwds); + Py_XDECREF(argcount); + Py_XDECREF(posonlyargcount); + Py_XDECREF(kwonlyargcount); + Py_XDECREF(nlocals); + Py_XDECREF(stacksize); + Py_XDECREF(replace); + Py_XDECREF(call_result); + Py_XDECREF(empty); + if (type) { + PyErr_Restore(type, value, traceback); + } + return co; + } #else #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) @@ -575,10 +636,10 @@ static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) { #if PY_VERSION_HEX < 0x030200A4 typedef long Py_hash_t; #define __Pyx_PyInt_FromHash_t PyInt_FromLong - #define __Pyx_PyInt_AsHash_t PyInt_AsLong + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t #else #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t - #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t + #define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t #endif #if PY_MAJOR_VERSION >= 3 #define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func)) @@ -758,6 +819,7 @@ static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj)) static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*); #if CYTHON_ASSUME_SAFE_MACROS #define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) #else @@ -1152,6 +1214,7 @@ static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, #ifndef Py_MEMBER_SIZE #define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member) #endif +#if CYTHON_FAST_PYCALL static size_t __pyx_pyframe_localsplus_offset = 0; #include "frameobject.h" #define __Pxy_PyFrame_Initialize_Offsets()\ @@ -1159,6 +1222,7 @@ static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus))) #define __Pyx_PyFrame_GetLocalsplus(frame)\ (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset)) +#endif // CYTHON_FAST_PYCALL #endif /* PyObjectCall.proto */ @@ -15178,6 +15242,9 @@ static PyTypeObject __pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator_Thr #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 0, /*tp_print*/ #endif + #if CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM+0 >= 0x06000000 + 0, /*tp_pypy_flags*/ + #endif }; static PyObject *__pyx_tp_new_18_pydevd_frame_eval_22pydevd_frame_evaluator_FuncCodeInfo(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { @@ -15393,6 +15460,9 @@ static PyTypeObject __pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator_Fun #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 0, /*tp_print*/ #endif + #if CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM+0 >= 0x06000000 + 0, /*tp_pypy_flags*/ + #endif }; static PyObject *__pyx_tp_new_18_pydevd_frame_eval_22pydevd_frame_evaluator__CodeLineInfo(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { @@ -15560,6 +15630,9 @@ static PyTypeObject __pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator__Co #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 0, /*tp_print*/ #endif + #if CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM+0 >= 0x06000000 + 0, /*tp_pypy_flags*/ + #endif }; static struct __pyx_vtabstruct_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue __pyx_vtable_18_pydevd_frame_eval_22pydevd_frame_evaluator__CacheValue; @@ -15766,6 +15839,9 @@ static PyTypeObject __pyx_type_18_pydevd_frame_eval_22pydevd_frame_evaluator__Ca #if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000 0, /*tp_print*/ #endif + #if CYTHON_COMPILING_IN_PYPY && PYPY_VERSION_NUM+0 >= 0x06000000 + 0, /*tp_pypy_flags*/ + #endif }; static PyMethodDef __pyx_methods[] = { @@ -19270,7 +19346,7 @@ static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int } if (!use_cline) { c_line = 0; - PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + (void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); } else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) { c_line = 0; @@ -19367,30 +19443,31 @@ static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( const char *funcname, int c_line, int py_line, const char *filename) { - PyCodeObject *py_code = 0; - PyObject *py_srcfile = 0; - PyObject *py_funcname = 0; + PyCodeObject *py_code = NULL; + PyObject *py_funcname = NULL; #if PY_MAJOR_VERSION < 3 + PyObject *py_srcfile = NULL; py_srcfile = PyString_FromString(filename); - #else - py_srcfile = PyUnicode_FromString(filename); - #endif if (!py_srcfile) goto bad; + #endif if (c_line) { #if PY_MAJOR_VERSION < 3 py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; #else py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + if (!py_funcname) goto bad; + funcname = PyUnicode_AsUTF8(py_funcname); + if (!funcname) goto bad; #endif } else { #if PY_MAJOR_VERSION < 3 py_funcname = PyString_FromString(funcname); - #else - py_funcname = PyUnicode_FromString(funcname); + if (!py_funcname) goto bad; #endif } - if (!py_funcname) goto bad; + #if PY_MAJOR_VERSION < 3 py_code = __Pyx_PyCode_New( 0, 0, @@ -19409,11 +19486,16 @@ static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( __pyx_empty_bytes /*PyObject *lnotab*/ ); Py_DECREF(py_srcfile); - Py_DECREF(py_funcname); + #else + py_code = PyCode_NewEmpty(filename, funcname, py_line); + #endif + Py_XDECREF(py_funcname); // XDECREF since it's only set on Py3 if cline return py_code; bad: - Py_XDECREF(py_srcfile); Py_XDECREF(py_funcname); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(py_srcfile); + #endif return NULL; } static void __Pyx_AddTraceback(const char *funcname, int c_line, @@ -20299,6 +20381,23 @@ static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { Py_DECREF(x); return ival; } +static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) { + if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) { + return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o); +#if PY_MAJOR_VERSION < 3 + } else if (likely(PyInt_CheckExact(o))) { + return PyInt_AS_LONG(o); +#endif + } else { + Py_ssize_t ival; + PyObject *x; + x = PyNumber_Index(o); + if (!x) return -1; + ival = PyInt_AsLong(x); + Py_DECREF(x); + return ival; + } +} static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) { return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False); } diff --git a/src/debugpy/_vendored/pydevd/tests_python/debugger_fixtures.py b/src/debugpy/_vendored/pydevd/tests_python/debugger_fixtures.py index 5e21a620f..d5a3809d5 100644 --- a/src/debugpy/_vendored/pydevd/tests_python/debugger_fixtures.py +++ b/src/debugpy/_vendored/pydevd/tests_python/debugger_fixtures.py @@ -495,10 +495,10 @@ def test_file(self, **kwargs): version = [int(x) for x in django.get_version().split('.')][:2] if version == [1, 7]: django_folder = 'my_django_proj_17' - elif version in ([2, 1], [2, 2], [3, 0], [3, 1], [3, 2]): + elif version in ([2, 1], [2, 2], [3, 0], [3, 1], [3, 2], [4, 0]): django_folder = 'my_django_proj_21' else: - raise AssertionError('Can only check django 1.7, 2.1, 2.2, 3.0, 3.1 and 3.2 right now. Found: %s' % (version,)) + raise AssertionError('Can only check django 1.7, 2.1, 2.2, 3.0, 3.1, 3.2 and 4.0 right now. Found: %s' % (version,)) WriterThread.DJANGO_FOLDER = django_folder for key, value in kwargs.items(): diff --git a/src/debugpy/_vendored/pydevd/tests_python/my_django_proj_21/my_app/urls.py b/src/debugpy/_vendored/pydevd/tests_python/my_django_proj_21/my_app/urls.py index ada7ec44b..4f021744f 100644 --- a/src/debugpy/_vendored/pydevd/tests_python/my_django_proj_21/my_app/urls.py +++ b/src/debugpy/_vendored/pydevd/tests_python/my_django_proj_21/my_app/urls.py @@ -1,4 +1,7 @@ -from django.conf.urls import url +try: + from django.conf.urls import url +except ImportError: + from django.urls import re_path as url from . import views diff --git a/src/debugpy/_vendored/pydevd/tests_python/test_convert_utilities.py b/src/debugpy/_vendored/pydevd/tests_python/test_convert_utilities.py index 63f14e11b..3fcba4a1f 100644 --- a/src/debugpy/_vendored/pydevd/tests_python/test_convert_utilities.py +++ b/src/debugpy/_vendored/pydevd/tests_python/test_convert_utilities.py @@ -8,6 +8,13 @@ import sys +@pytest.fixture(autouse=True) +def _reset_ide_os(): + yield + from pydevd_file_utils import set_ide_os + set_ide_os('WINDOWS' if sys.platform == 'win32' else 'UNIX') + + def test_convert_utilities(tmpdir): import pydevd_file_utils