mirror of
https://github.com/apple/swift.git
synced 2025-12-21 12:14:44 +01:00
Revert "[syntax-coloring] Rework the syntax map to use offset + length and simplify the delta logic" (#10633)
Resolving rdar://32988175.
This commit is contained in:
@@ -206,15 +206,6 @@ SyntaxModelContext::SyntaxModelContext(SourceFile &SrcFile)
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
case tok::unknown: {
|
|
||||||
if (Tok.getRawText().startswith("\"")) {
|
|
||||||
// This is an invalid string literal
|
|
||||||
Kind = SyntaxNodeKind::String;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
default:
|
default:
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -227,15 +227,6 @@ func f(x: Int) -> Int {
|
|||||||
// CHECK: <str>"This is string </str>\<anchor>(</anchor>genFn({(a:<type>Int</type> -> <type>Int</type>) <kw>in</kw> a})<anchor>)</anchor><str> interpolation"</str>
|
// CHECK: <str>"This is string </str>\<anchor>(</anchor>genFn({(a:<type>Int</type> -> <type>Int</type>) <kw>in</kw> a})<anchor>)</anchor><str> interpolation"</str>
|
||||||
"This is string \(genFn({(a:Int -> Int) in a})) interpolation"
|
"This is string \(genFn({(a:Int -> Int) in a})) interpolation"
|
||||||
|
|
||||||
// CHECK: <str>"This is unterminated</str>
|
|
||||||
"This is unterminated
|
|
||||||
|
|
||||||
// CHECK: <str>"This is unterminated with ignored \(interpolation) in it</str>
|
|
||||||
"This is unterminated with ignored \(interpolation) in it
|
|
||||||
|
|
||||||
// CHECK: <str>"This is terminated with invalid \(interpolation" + "in it"</str>
|
|
||||||
"This is terminated with invalid \(interpolation" + "in it"
|
|
||||||
|
|
||||||
// CHECK: <str>"""
|
// CHECK: <str>"""
|
||||||
// CHECK-NEXT: This is a multiline string.
|
// CHECK-NEXT: This is a multiline string.
|
||||||
// CHECK-NEXT: """</str>
|
// CHECK-NEXT: """</str>
|
||||||
@@ -245,19 +236,9 @@ func f(x: Int) -> Int {
|
|||||||
|
|
||||||
// CHECK: <str>"""
|
// CHECK: <str>"""
|
||||||
// CHECK-NEXT: This is a multiline</str>\<anchor>(</anchor> <str>"interpolated"</str> <anchor>)</anchor><str>string
|
// CHECK-NEXT: This is a multiline</str>\<anchor>(</anchor> <str>"interpolated"</str> <anchor>)</anchor><str>string
|
||||||
// CHECK-NEXT: </str>\<anchor>(</anchor>
|
|
||||||
// CHECK-NEXT: <str>"""
|
|
||||||
// CHECK-NEXT: inner
|
|
||||||
// CHECK-NEXT: """</str>
|
|
||||||
// CHECK-NEXT: <anchor>)</anchor><str>
|
|
||||||
// CHECK-NEXT: """</str>
|
// CHECK-NEXT: """</str>
|
||||||
"""
|
"""
|
||||||
This is a multiline\( "interpolated" )string
|
This is a multiline\( "interpolated" )string
|
||||||
\(
|
|
||||||
"""
|
|
||||||
inner
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
"""
|
"""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,16 +0,0 @@
|
|||||||
// RUN: %target-swift-ide-test -syntax-coloring -source-filename %s | %FileCheck %s
|
|
||||||
// RUN: %target-swift-ide-test -syntax-coloring -typecheck -source-filename %s | %FileCheck %s
|
|
||||||
|
|
||||||
// CHECK: <kw>let</kw> x = <str>"""
|
|
||||||
// CHECK-NEXT: This is an unterminated
|
|
||||||
// CHECK-NEXT: \( "multiline" )
|
|
||||||
// CHECK-NEXT: string followed by code
|
|
||||||
// CHECK-NEXT: ""
|
|
||||||
// CHECK-NEXT: func foo() {}
|
|
||||||
// CHECK-NEXT: </str>
|
|
||||||
let x = """
|
|
||||||
This is an unterminated
|
|
||||||
\( "multiline" )
|
|
||||||
string followed by code
|
|
||||||
""
|
|
||||||
func foo() {}
|
|
||||||
@@ -13,6 +13,8 @@
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
|
key.offset: 0,
|
||||||
|
key.length: 3,
|
||||||
key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
||||||
key.substructure: [
|
key.substructure: [
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,5 +0,0 @@
|
|||||||
|
|
||||||
let x = /*
|
|
||||||
|
|
||||||
*/ 2
|
|
||||||
func foo() {}
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
let y = /*
|
|
||||||
|
|
||||||
*// /*
|
|
||||||
|
|
||||||
*/ 2
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
let a = "value"
|
|
||||||
let x = """
|
|
||||||
|
|
||||||
\(
|
|
||||||
a
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
func foo () -> String {
|
|
||||||
return "foo"
|
|
||||||
}
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
/// This function does stuff
|
|
||||||
///
|
|
||||||
/// - parameter first: The first parameter
|
|
||||||
///
|
|
||||||
/// - returns: The return value
|
|
||||||
func foo(first: Int) -> String {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
let x = "Changing this string should only affect this line"
|
|
||||||
|
|
||||||
/// This function does other stuff
|
|
||||||
///
|
|
||||||
/// - parameter first: The first parameter
|
|
||||||
///
|
|
||||||
/// - returns: The return value
|
|
||||||
func bar(first: Int) -> String {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
let l = 2
|
|
||||||
l
|
|
||||||
\( 56
|
|
||||||
)
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
|
|
||||||
/// A comment
|
|
||||||
/// - Note: important things
|
|
||||||
let x = 42
|
|
||||||
|
|
||||||
struct Point {
|
|
||||||
let x: Int = x
|
|
||||||
let y: Int = x
|
|
||||||
|
|
||||||
func mag2() {
|
|
||||||
return x*x + y*y;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
|
|
||||||
let x = 421
|
|
||||||
@@ -1,118 +0,0 @@
|
|||||||
// RUN: %sourcekitd-test -req=open -print-raw-response %S/Inputs/syntaxmap-edit-block-comment.swift == -req=edit -print-raw-response %S/Inputs/syntaxmap-edit-block-comment.swift -pos=4:2 -replace=" " -length=1 == -req=edit -print-raw-response %S/Inputs/syntaxmap-edit-block-comment.swift -pos=4:2 -replace="/" -length=1 == -req=edit -print-raw-response %S/Inputs/syntaxmap-edit-block-comment.swift -pos=1:1 -replace="//" -length=2 | %sed_clean > %t.response
|
|
||||||
// RUN: %FileCheck -input-file=%t.response %s
|
|
||||||
|
|
||||||
// Initial state
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 0,
|
|
||||||
// CHECK-NEXT: key.length: 34,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 3,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 7,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.comment,
|
|
||||||
// CHECK-NEXT: key.offset: 11,
|
|
||||||
// CHECK-NEXT: key.length: 6
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.number,
|
|
||||||
// CHECK-NEXT: key.offset: 18,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 20,
|
|
||||||
// CHECK-NEXT: key.length: 4
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 25,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
|
|
||||||
// After removing the '/' from the comment close
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 3,
|
|
||||||
// CHECK-NEXT: key.length: 31,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 3,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 7,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.comment,
|
|
||||||
// CHECK-NEXT: key.offset: 11,
|
|
||||||
// CHECK-NEXT: key.length: 23
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
|
|
||||||
// After adding the '/' back to the comment close
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 3,
|
|
||||||
// CHECK-NEXT: key.length: 31,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 3,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 7,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.comment,
|
|
||||||
// CHECK-NEXT: key.offset: 11,
|
|
||||||
// CHECK-NEXT: key.length: 6
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.number,
|
|
||||||
// CHECK-NEXT: key.offset: 18,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 20,
|
|
||||||
// CHECK-NEXT: key.length: 4
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 25,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
|
|
||||||
// after adding a single-line comment on the line above
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 0,
|
|
||||||
// CHECK-NEXT: key.length: 3,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.comment,
|
|
||||||
// CHECK-NEXT: key.offset: 0,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
@@ -1,100 +0,0 @@
|
|||||||
// RUN: %sourcekitd-test -req=open -print-raw-response %S/Inputs/syntaxmap-edit-chained-comment.swift == -req=edit -print-raw-response %S/Inputs/syntaxmap-edit-chained-comment.swift -pos=1:9 -replace=" " -length=1 == -req=edit -print-raw-response %S/Inputs/syntaxmap-edit-chained-comment.swift -pos=1:9 -replace="/" -length=1 | %sed_clean > %t.response
|
|
||||||
// RUN: %FileCheck -input-file=%t.response %s
|
|
||||||
|
|
||||||
// Initial state
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 0,
|
|
||||||
// CHECK-NEXT: key.length: 25,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 0,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 4,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.comment,
|
|
||||||
// CHECK-NEXT: key.offset: 8,
|
|
||||||
// CHECK-NEXT: key.length: 6
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.comment,
|
|
||||||
// CHECK-NEXT: key.offset: 16,
|
|
||||||
// CHECK-NEXT: key.length: 6
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.number,
|
|
||||||
// CHECK-NEXT: key.offset: 23,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
|
|
||||||
|
|
||||||
// After replacing the '/' from the comment open with ' '
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 0,
|
|
||||||
// CHECK-NEXT: key.length: 25,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 0,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 4,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.comment,
|
|
||||||
// CHECK-NEXT: key.offset: 13,
|
|
||||||
// CHECK-NEXT: key.length: 6
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.number,
|
|
||||||
// CHECK-NEXT: key.offset: 23,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
|
|
||||||
// After adding the '/' back to the comment open
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 0,
|
|
||||||
// CHECK-NEXT: key.length: 25,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 0,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 4,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.comment,
|
|
||||||
// CHECK-NEXT: key.offset: 8,
|
|
||||||
// CHECK-NEXT: key.length: 6
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.comment,
|
|
||||||
// CHECK-NEXT: key.offset: 16,
|
|
||||||
// CHECK-NEXT: key.length: 6
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.number,
|
|
||||||
// CHECK-NEXT: key.offset: 23,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
@@ -29,7 +29,7 @@
|
|||||||
}
|
}
|
||||||
{
|
{
|
||||||
key.offset: 1,
|
key.offset: 1,
|
||||||
key.length: 34,
|
key.length: 36,
|
||||||
key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
||||||
key.syntaxmap: [
|
key.syntaxmap: [
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -28,6 +28,8 @@
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
|
key.offset: 36,
|
||||||
|
key.length: 0,
|
||||||
key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
||||||
key.syntaxmap: [
|
key.syntaxmap: [
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,162 +0,0 @@
|
|||||||
// RUN: %sourcekitd-test -req=open -print-raw-response %S/Inputs/syntaxmap-edit-multiline-string.swift == -req=edit -print-raw-response %S/Inputs/syntaxmap-edit-multiline-string.swift -pos=8:1 -replace='"""' -length=3 == -req=edit -print-raw-response %S/Inputs/syntaxmap-edit-multiline-string.swift -pos=6:2 -replace=')' -length=1 == -req=edit -print-raw-response %S/Inputs/syntaxmap-edit-multiline-string.swift -pos=2:10 -replace=' ' -length=1 | %sed_clean > %t.response
|
|
||||||
// RUN: %FileCheck -input-file=%t.response %s
|
|
||||||
|
|
||||||
// Original file contents
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 0,
|
|
||||||
// CHECK-NEXT: key.length: 84,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 0,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 4,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.string,
|
|
||||||
// CHECK-NEXT: key.offset: 8,
|
|
||||||
// CHECK-NEXT: key.length: 7
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 16,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 20,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.string,
|
|
||||||
// CHECK-NEXT: key.offset: 24,
|
|
||||||
// CHECK-NEXT: key.length: 60
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
|
|
||||||
// After terminating the multiline string
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 16,
|
|
||||||
// CHECK-NEXT: key.length: 68,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 16,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 20,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.string,
|
|
||||||
// CHECK-NEXT: key.offset: 24,
|
|
||||||
// CHECK-NEXT: key.length: 5
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.string_interpolation_anchor,
|
|
||||||
// CHECK-NEXT: key.offset: 30,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 32,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.string_interpolation_anchor,
|
|
||||||
// CHECK-NEXT: key.offset: 34,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.string,
|
|
||||||
// CHECK-NEXT: key.offset: 35,
|
|
||||||
// CHECK-NEXT: key.length: 6
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 43,
|
|
||||||
// CHECK-NEXT: key.length: 4
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 48,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.typeidentifier,
|
|
||||||
// CHECK-NEXT: key.offset: 58,
|
|
||||||
// CHECK-NEXT: key.length: 6
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 69,
|
|
||||||
// CHECK-NEXT: key.length: 6
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.string,
|
|
||||||
// CHECK-NEXT: key.offset: 76,
|
|
||||||
// CHECK-NEXT: key.length: 5
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
|
|
||||||
// After adding a character after the interpolation
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 34,
|
|
||||||
// CHECK-NEXT: key.length: 8,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.string_interpolation_anchor,
|
|
||||||
// CHECK-NEXT: key.offset: 34,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.string,
|
|
||||||
// CHECK-NEXT: key.offset: 35,
|
|
||||||
// CHECK-NEXT: key.length: 6
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
|
|
||||||
// After replacing the middle opening quote with a space
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 16,
|
|
||||||
// CHECK-NEXT: key.length: 68,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 16,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 20,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.string,
|
|
||||||
// CHECK-NEXT: key.offset: 24,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 32,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.string,
|
|
||||||
// CHECK-NEXT: key.offset: 38,
|
|
||||||
// CHECK-NEXT: key.length: 46
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
@@ -1,210 +0,0 @@
|
|||||||
// RUN: %sourcekitd-test -req=open -print-raw-response %S/Inputs/syntaxmap-edit-nested-token.swift == -req=edit -print-raw-response %S/Inputs/syntaxmap-edit-nested-token.swift -pos=10:43 -replace='impact' -length=6 | %sed_clean > %t.response
|
|
||||||
// RUN: %FileCheck -input-file=%t.response %s
|
|
||||||
|
|
||||||
// Original file contents
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 0,
|
|
||||||
// CHECK-NEXT: key.length: 386,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.doccomment,
|
|
||||||
// CHECK-NEXT: key.offset: 0,
|
|
||||||
// CHECK-NEXT: key.length: 29
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.doccomment,
|
|
||||||
// CHECK-NEXT: key.offset: 29,
|
|
||||||
// CHECK-NEXT: key.length: 4
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.doccomment,
|
|
||||||
// CHECK-NEXT: key.offset: 33,
|
|
||||||
// CHECK-NEXT: key.length: 6
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.doccomment.field,
|
|
||||||
// CHECK-NEXT: key.offset: 39,
|
|
||||||
// CHECK-NEXT: key.length: 9
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.doccomment,
|
|
||||||
// CHECK-NEXT: key.offset: 48,
|
|
||||||
// CHECK-NEXT: key.length: 28
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.doccomment,
|
|
||||||
// CHECK-NEXT: key.offset: 76,
|
|
||||||
// CHECK-NEXT: key.length: 4
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.doccomment,
|
|
||||||
// CHECK-NEXT: key.offset: 80,
|
|
||||||
// CHECK-NEXT: key.length: 6
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.doccomment.field,
|
|
||||||
// CHECK-NEXT: key.offset: 86,
|
|
||||||
// CHECK-NEXT: key.length: 7
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.doccomment,
|
|
||||||
// CHECK-NEXT: key.offset: 93,
|
|
||||||
// CHECK-NEXT: key.length: 19
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 112,
|
|
||||||
// CHECK-NEXT: key.length: 4
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 117,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 121,
|
|
||||||
// CHECK-NEXT: key.length: 5
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.typeidentifier,
|
|
||||||
// CHECK-NEXT: key.offset: 128,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.typeidentifier,
|
|
||||||
// CHECK-NEXT: key.offset: 136,
|
|
||||||
// CHECK-NEXT: key.length: 6
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 147,
|
|
||||||
// CHECK-NEXT: key.length: 6
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.string,
|
|
||||||
// CHECK-NEXT: key.offset: 154,
|
|
||||||
// CHECK-NEXT: key.length: 2
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 160,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 164,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.string,
|
|
||||||
// CHECK-NEXT: key.offset: 168,
|
|
||||||
// CHECK-NEXT: key.length: 51
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.doccomment,
|
|
||||||
// CHECK-NEXT: key.offset: 221,
|
|
||||||
// CHECK-NEXT: key.length: 35
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.doccomment,
|
|
||||||
// CHECK-NEXT: key.offset: 256,
|
|
||||||
// CHECK-NEXT: key.length: 4
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.doccomment,
|
|
||||||
// CHECK-NEXT: key.offset: 260,
|
|
||||||
// CHECK-NEXT: key.length: 6
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.doccomment.field,
|
|
||||||
// CHECK-NEXT: key.offset: 266,
|
|
||||||
// CHECK-NEXT: key.length: 9
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.doccomment,
|
|
||||||
// CHECK-NEXT: key.offset: 275,
|
|
||||||
// CHECK-NEXT: key.length: 28
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.doccomment,
|
|
||||||
// CHECK-NEXT: key.offset: 303,
|
|
||||||
// CHECK-NEXT: key.length: 4
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.doccomment,
|
|
||||||
// CHECK-NEXT: key.offset: 307,
|
|
||||||
// CHECK-NEXT: key.length: 6
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.doccomment.field,
|
|
||||||
// CHECK-NEXT: key.offset: 313,
|
|
||||||
// CHECK-NEXT: key.length: 7
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.doccomment,
|
|
||||||
// CHECK-NEXT: key.offset: 320,
|
|
||||||
// CHECK-NEXT: key.length: 19
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 339,
|
|
||||||
// CHECK-NEXT: key.length: 4
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 344,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 348,
|
|
||||||
// CHECK-NEXT: key.length: 5
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.typeidentifier,
|
|
||||||
// CHECK-NEXT: key.offset: 355,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.typeidentifier,
|
|
||||||
// CHECK-NEXT: key.offset: 363,
|
|
||||||
// CHECK-NEXT: key.length: 6
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 374,
|
|
||||||
// CHECK-NEXT: key.length: 6
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.string,
|
|
||||||
// CHECK-NEXT: key.offset: 381,
|
|
||||||
// CHECK-NEXT: key.length: 2
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
|
|
||||||
// After editing a string in between nested comments
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 160,
|
|
||||||
// CHECK-NEXT: key.length: 60,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 160,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 164,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.string,
|
|
||||||
// CHECK-NEXT: key.offset: 168,
|
|
||||||
// CHECK-NEXT: key.length: 51
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
@@ -1,78 +0,0 @@
|
|||||||
// RUN: %sourcekitd-test -req=open -print-raw-response %S/Inputs/syntaxmap-edit-remove.swift == -req=edit -print-raw-response %S/Inputs/syntaxmap-edit-remove.swift -pos=3:3 -length=1 -replace='' == -req=edit -print-raw-response %S/Inputs/syntaxmap-edit-remove.swift -pos=2:1 -replace='' -length=1 == -req=edit -print-raw-response %S/Inputs/syntaxmap-edit-remove.swift -pos=1:9 -length=1 -replace='' | %sed_clean > %t.response
|
|
||||||
// RUN: %FileCheck -input-file=%t.response %s
|
|
||||||
|
|
||||||
// Initial state
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 0,
|
|
||||||
// CHECK-NEXT: key.length: 20,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 0,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 4,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.number,
|
|
||||||
// CHECK-NEXT: key.offset: 8,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 10,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.number,
|
|
||||||
// CHECK-NEXT: key.offset: 15,
|
|
||||||
// CHECK-NEXT: key.length: 2
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
|
|
||||||
// After removing the space before the '56'
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 12,
|
|
||||||
// CHECK-NEXT: key.length: 5,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.number,
|
|
||||||
// CHECK-NEXT: key.offset: 14,
|
|
||||||
// CHECK-NEXT: key.length: 2
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
|
|
||||||
// After deleting the identifier 'l'
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 10,
|
|
||||||
// CHECK-NEXT: key.length: 1,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
|
|
||||||
// After deleting the last token on the first line
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 0,
|
|
||||||
// CHECK-NEXT: key.length: 9,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 0,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 4,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
@@ -1,283 +0,0 @@
|
|||||||
// RUN: %sourcekitd-test -req=open -print-raw-response %S/Inputs/syntaxmap-multiple-edits.swift == -req=edit -print-raw-response -pos=6:13 -length=1 -replace=" " %S/Inputs/syntaxmap-multiple-edits.swift == -req="edit" -pos=14:1 -length=0 -replace="let y = 2" -print-raw-response %S/Inputs/syntaxmap-multiple-edits.swift == -req="edit" -pos=8:10 -length=7 -replace='Int64 = 3; let z = 2' -print-raw-response %S/Inputs/syntaxmap-multiple-edits.swift == -req="edit" -pos=4:9 -length=2 -replace='50 * 95 - 100' -print-raw-response %S/Inputs/syntaxmap-multiple-edits.swift == -req="edit" -pos=1:1 -length=0 -replace='func firstFunc(x: Int) {}' -print-raw-response %S/Inputs/syntaxmap-multiple-edits.swift | %sed_clean > %t.response
|
|
||||||
// RUN: %FileCheck -input-file=%t.response %s
|
|
||||||
|
|
||||||
// Initial syntax map
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 0,
|
|
||||||
// CHECK-NEXT: key.length: 152,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.doccomment,
|
|
||||||
// CHECK-NEXT: key.offset: 2,
|
|
||||||
// CHECK-NEXT: key.length: 14
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.doccomment,
|
|
||||||
// CHECK-NEXT: key.offset: 16,
|
|
||||||
// CHECK-NEXT: key.length: 6
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.doccomment.field,
|
|
||||||
// CHECK-NEXT: key.offset: 22,
|
|
||||||
// CHECK-NEXT: key.length: 4
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.doccomment,
|
|
||||||
// CHECK-NEXT: key.offset: 26,
|
|
||||||
// CHECK-NEXT: key.length: 19
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 45,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 49,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.number,
|
|
||||||
// CHECK-NEXT: key.offset: 53,
|
|
||||||
// CHECK-NEXT: key.length: 2
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 57,
|
|
||||||
// CHECK-NEXT: key.length: 6
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 64,
|
|
||||||
// CHECK-NEXT: key.length: 5
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 74,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 78,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.typeidentifier,
|
|
||||||
// CHECK-NEXT: key.offset: 81,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 87,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 91,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 95,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.typeidentifier,
|
|
||||||
// CHECK-NEXT: key.offset: 98,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 104,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 109,
|
|
||||||
// CHECK-NEXT: key.length: 4
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 114,
|
|
||||||
// CHECK-NEXT: key.length: 4
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 127,
|
|
||||||
// CHECK-NEXT: key.length: 6
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 134,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 136,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 140,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 142,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
|
|
||||||
// After replacing a space with a space
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 57,
|
|
||||||
// CHECK-NEXT: key.length: 15,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 57,
|
|
||||||
// CHECK-NEXT: key.length: 6
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 64,
|
|
||||||
// CHECK-NEXT: key.length: 5
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
|
|
||||||
// After adding code at the end of the file
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 151,
|
|
||||||
// CHECK-NEXT: key.length: 10,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 151,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 155,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.number,
|
|
||||||
// CHECK-NEXT: key.offset: 159,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
|
|
||||||
|
|
||||||
// After inserting more than we removed
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 89,
|
|
||||||
// CHECK-NEXT: key.length: 30,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 91,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 95,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.typeidentifier,
|
|
||||||
// CHECK-NEXT: key.offset: 98,
|
|
||||||
// CHECK-NEXT: key.length: 5
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.number,
|
|
||||||
// CHECK-NEXT: key.offset: 106,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 109,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 113,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.number,
|
|
||||||
// CHECK-NEXT: key.offset: 117,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
|
|
||||||
// After inserting less than we removed
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 45,
|
|
||||||
// CHECK-NEXT: key.length: 22,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 45,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 49,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.number,
|
|
||||||
// CHECK-NEXT: key.offset: 53,
|
|
||||||
// CHECK-NEXT: key.length: 2
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.number,
|
|
||||||
// CHECK-NEXT: key.offset: 58,
|
|
||||||
// CHECK-NEXT: key.length: 2
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.number,
|
|
||||||
// CHECK-NEXT: key.offset: 63,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
|
|
||||||
// After adding code at the start of the file
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 0,
|
|
||||||
// CHECK-NEXT: key.length: 27,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 0,
|
|
||||||
// CHECK-NEXT: key.length: 4
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 5,
|
|
||||||
// CHECK-NEXT: key.length: 9
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 15,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.typeidentifier,
|
|
||||||
// CHECK-NEXT: key.offset: 18,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
|
|
||||||
@@ -1,49 +0,0 @@
|
|||||||
// RUN: %sourcekitd-test -req=open -print-raw-response %S/Inputs/syntaxmap-partial-delete.swift == -req=edit -print-raw-response -pos=2:10 -length=2 -replace='' %S/Inputs/syntaxmap-partial-delete.swift | %sed_clean > %t.response
|
|
||||||
// RUN: %FileCheck -input-file=%t.response %s
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 0,
|
|
||||||
// CHECK-NEXT: key.length: 13,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 1,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 5,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.number,
|
|
||||||
// CHECK-NEXT: key.offset: 9,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
|
|
||||||
|
|
||||||
// After removing 2 chars from number literal
|
|
||||||
|
|
||||||
// CHECK: {{^}}{
|
|
||||||
// CHECK-NEXT: key.offset: 1,
|
|
||||||
// CHECK-NEXT: key.length: 10,
|
|
||||||
// CHECK-NEXT: key.diagnostic_stage: source.diagnostic.stage.swift.parse,
|
|
||||||
// CHECK-NEXT: key.syntaxmap: [
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.keyword,
|
|
||||||
// CHECK-NEXT: key.offset: 1,
|
|
||||||
// CHECK-NEXT: key.length: 3
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.identifier,
|
|
||||||
// CHECK-NEXT: key.offset: 5,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: },
|
|
||||||
// CHECK-NEXT: {
|
|
||||||
// CHECK-NEXT: key.kind: source.lang.swift.syntaxtype.number,
|
|
||||||
// CHECK-NEXT: key.offset: 9,
|
|
||||||
// CHECK-NEXT: key.length: 1
|
|
||||||
// CHECK-NEXT: }
|
|
||||||
// CHECK-NEXT: ],
|
|
||||||
@@ -257,223 +257,110 @@ void mergeSplitRanges(unsigned Off1, unsigned Len1, unsigned Off2, unsigned Len2
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct SwiftEditorCharRange {
|
|
||||||
size_t Offset;
|
|
||||||
size_t Length;
|
|
||||||
size_t endOffset() const { return Offset + Length; }
|
|
||||||
bool isEmpty() const { return !Length; }
|
|
||||||
};
|
|
||||||
|
|
||||||
struct SwiftSyntaxToken {
|
struct SwiftSyntaxToken {
|
||||||
unsigned Offset;
|
unsigned Column;
|
||||||
unsigned Length:24;
|
unsigned Length:24;
|
||||||
SyntaxNodeKind Kind:8;
|
SyntaxNodeKind Kind:8;
|
||||||
|
|
||||||
SwiftSyntaxToken(unsigned Offset, unsigned Length, SyntaxNodeKind Kind)
|
SwiftSyntaxToken(unsigned Column, unsigned Length,
|
||||||
: Offset(Offset), Length(Length), Kind(Kind) {}
|
SyntaxNodeKind Kind)
|
||||||
|
:Column(Column), Length(Length), Kind(Kind) { }
|
||||||
unsigned endOffset() const {
|
|
||||||
return Offset + Length;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool operator==(const SwiftSyntaxToken &Other) const {
|
|
||||||
return Offset == Other.Offset && Length == Other.Length &&
|
|
||||||
Kind == Other.Kind;
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Represents a the syntax highlighted token ranges in a source file
|
class SwiftSyntaxMap {
|
||||||
struct SwiftSyntaxMap {
|
typedef std::vector<SwiftSyntaxToken> SwiftSyntaxLineMap;
|
||||||
std::vector<SwiftSyntaxToken> Tokens;
|
std::vector<SwiftSyntaxLineMap> Lines;
|
||||||
|
|
||||||
SwiftSyntaxMap(unsigned Capacity = 0) {
|
public:
|
||||||
if (Capacity)
|
bool matchesFirstTokenOnLine(unsigned Line,
|
||||||
Tokens.reserve(Capacity);
|
const SwiftSyntaxToken &Token) const {
|
||||||
}
|
assert(Line > 0);
|
||||||
|
if (Lines.size() < Line)
|
||||||
void addToken(const SwiftSyntaxToken &Token) {
|
|
||||||
assert(Tokens.empty() || Token.Offset >= Tokens.back().Offset);
|
|
||||||
Tokens.push_back(Token);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Merge this nested token into the last token that was added
|
|
||||||
void mergeToken(const SwiftSyntaxToken &Token) {
|
|
||||||
if (Tokens.empty()) {
|
|
||||||
Tokens.push_back(Token);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
auto &LastTok = Tokens.back();
|
|
||||||
mergeSplitRanges(LastTok.Offset, LastTok.Length, Token.Offset, Token.Length,
|
|
||||||
[&](unsigned BeforeOff, unsigned BeforeLen,
|
|
||||||
unsigned AfterOff, unsigned AfterLen) {
|
|
||||||
auto LastKind = LastTok.Kind;
|
|
||||||
Tokens.pop_back();
|
|
||||||
if (BeforeLen)
|
|
||||||
Tokens.emplace_back(BeforeOff, BeforeLen, LastKind);
|
|
||||||
Tokens.push_back(Token);
|
|
||||||
if (AfterLen)
|
|
||||||
Tokens.emplace_back(AfterOff, AfterLen, LastKind);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Adjusts the token offsets and lengths in this syntax map to account for
|
|
||||||
/// replacing Len bytes at the given Offset with NewLen bytes. Tokens before
|
|
||||||
/// the replacement stay the same, tokens after it are shifted, and tokens
|
|
||||||
/// that intersect it have their length set to 0. It also currently expands
|
|
||||||
/// the start and end of the Affected range to at least the line boundaires of
|
|
||||||
/// the replacement range to support an assumption in existing clients.
|
|
||||||
///
|
|
||||||
/// Returns the affected range (the range of the new bytes + any intersected
|
|
||||||
/// tokens) in the new buffer.
|
|
||||||
SwiftEditorCharRange
|
|
||||||
adjustForReplacement(unsigned Offset, unsigned Len, unsigned NewLen, StringRef NewText) {
|
|
||||||
// Extend the affected range to its nearest line boundaries in the old text,
|
|
||||||
// based on the line boundaries of the new text
|
|
||||||
unsigned AffectedStart = getPrevLineBoundary(NewText, Offset);
|
|
||||||
unsigned NewEndLineBoundary =
|
|
||||||
getNextLineBoundary(NewText, Offset + NewLen, /*hasLength=*/NewLen > 0);
|
|
||||||
unsigned AffectedEnd = NewEndLineBoundary - NewLen + Len;
|
|
||||||
|
|
||||||
// Adjust the tokens
|
|
||||||
auto Token = Tokens.begin();
|
|
||||||
while (Token != Tokens.end() && Token->endOffset() <= AffectedStart)
|
|
||||||
++Token; // Completely before the affected range - no change
|
|
||||||
|
|
||||||
while (Token != Tokens.end() && Token->Offset < AffectedEnd) {
|
|
||||||
// This token intersects – extend the affected range if we need to
|
|
||||||
if (Token->Offset < AffectedStart)
|
|
||||||
AffectedStart = Token->Offset;
|
|
||||||
if (Token->endOffset() > AffectedEnd)
|
|
||||||
AffectedEnd = Token->endOffset();
|
|
||||||
|
|
||||||
// Set length to 0 to force a mismatch in forEachChanged below
|
|
||||||
Token->Length = 0;
|
|
||||||
++Token;
|
|
||||||
}
|
|
||||||
|
|
||||||
while (Token != Tokens.end()) {
|
|
||||||
Token->Offset += NewLen - Len; // Completely after - shift
|
|
||||||
++Token;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Adjust the AffectedEnd to its position in the NewText
|
|
||||||
AffectedEnd += NewLen - Len;
|
|
||||||
|
|
||||||
// Return the Affected range in NewText
|
|
||||||
return {AffectedStart, AffectedEnd - AffectedStart};
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Passes each token in this SwiftSyntaxMap to the given EditorConsumer
|
|
||||||
void forEach(EditorConsumer &Consumer) {
|
|
||||||
for (auto &Token: Tokens) {
|
|
||||||
auto Kind = SwiftLangSupport::getUIDForSyntaxNodeKind(Token.Kind);
|
|
||||||
Consumer.handleSyntaxMap(Token.Offset, Token.Length, Kind);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Finds the delta between the given SwiftSyntaxMap, Prev, and this one.
|
|
||||||
/// It passes each token not in Prev to the given EditorConsumer and also
|
|
||||||
/// expands the given Affected range (if needed) to include all non-matching
|
|
||||||
/// tokens in the two lists. It also ensures the start and end of the Affected
|
|
||||||
/// range fall on line boundaires to support an assumption in existing clients.
|
|
||||||
///
|
|
||||||
/// Returns true if this SwiftSyntaxMap is different to Prev.
|
|
||||||
bool forEachChanged(SwiftSyntaxMap &Prev,
|
|
||||||
SwiftEditorCharRange &Affected,
|
|
||||||
StringRef BufferText,
|
|
||||||
EditorConsumer &Consumer) const {
|
|
||||||
unsigned AffectedStart = Affected.Offset, AffectedEnd = Affected.endOffset();
|
|
||||||
|
|
||||||
// Find the first pair of tokens that don't match
|
|
||||||
auto Start = std::make_pair(Tokens.begin(), Prev.Tokens.begin());
|
|
||||||
while (Start.first != Tokens.end() && Start.second != Prev.Tokens.end() &&
|
|
||||||
*Start.first == *Start.second)
|
|
||||||
++Start.first, ++Start.second;
|
|
||||||
|
|
||||||
if (Start.first == Tokens.end() && Start.second == Prev.Tokens.end()) {
|
|
||||||
// We hit the end of both token lists without a mismatch – no-op.
|
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
|
unsigned LineOffset = Line - 1;
|
||||||
|
const SwiftSyntaxLineMap &LineMap = Lines[LineOffset];
|
||||||
|
if (LineMap.empty())
|
||||||
|
return false;
|
||||||
|
|
||||||
|
const SwiftSyntaxToken &Tok = LineMap.front();
|
||||||
|
if (Tok.Column == Token.Column && Tok.Length == Token.Length
|
||||||
|
&& Tok.Kind == Token.Kind) {
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Adjust the affected bounds to include the mismatched tokens
|
return false;
|
||||||
if (Start.first != Tokens.end())
|
|
||||||
AffectedStart = std::min(Start.first->Offset, AffectedStart);
|
|
||||||
if (Start.second != Prev.Tokens.end())
|
|
||||||
AffectedStart = std::min(Start.second->Offset, AffectedStart);
|
|
||||||
|
|
||||||
// Find the last tokens that don't match
|
|
||||||
auto End = std::make_pair(Tokens.rbegin(), Prev.Tokens.rbegin());
|
|
||||||
while (End.first != Tokens.rend() && End.second != Prev.Tokens.rend() &&
|
|
||||||
*End.first == *End.second)
|
|
||||||
++End.first, ++End.second;
|
|
||||||
|
|
||||||
// Adjust the affected bounds to include the mismtached tokens
|
|
||||||
if (End.first != Tokens.rend())
|
|
||||||
AffectedEnd = std::max(End.first->endOffset(), AffectedEnd);
|
|
||||||
if (End.second != Prev.Tokens.rend())
|
|
||||||
AffectedEnd = std::max(End.second->endOffset(), AffectedEnd);
|
|
||||||
|
|
||||||
assert(AffectedEnd >= AffectedStart);
|
|
||||||
|
|
||||||
auto From = Start.first; // The first mismatched token in this syntax map
|
|
||||||
auto To = End.first; // The last mismatched token in this syntax map
|
|
||||||
|
|
||||||
// Extend the affected range to line boundaries
|
|
||||||
AffectedStart = getPrevLineBoundary(BufferText, AffectedStart);
|
|
||||||
AffectedEnd = getNextLineBoundary(BufferText, AffectedEnd,
|
|
||||||
/*hasLength=*/AffectedStart < AffectedEnd);
|
|
||||||
|
|
||||||
// Extend From/To to the outermost tokens in the Affected range
|
|
||||||
while (From != Tokens.begin()) {
|
|
||||||
auto Prev = From - 1;
|
|
||||||
while (Prev != Tokens.begin() && Prev->Offset >= AffectedStart)
|
|
||||||
From = Prev--;
|
|
||||||
if (Prev->endOffset() <= AffectedStart)
|
|
||||||
break;
|
|
||||||
// Multi-line token – extend Affected to this token's start line
|
|
||||||
AffectedStart = getPrevLineBoundary(BufferText, Prev->Offset);
|
|
||||||
From = Prev;
|
|
||||||
};
|
|
||||||
|
|
||||||
while (To != Tokens.rbegin()) {
|
|
||||||
auto Prev = To - 1;
|
|
||||||
while (Prev != Tokens.rbegin() && Prev->endOffset() <= AffectedEnd)
|
|
||||||
To = Prev--;
|
|
||||||
if (Prev->Offset >= AffectedEnd)
|
|
||||||
break;
|
|
||||||
// Multi-line token – extend Affected to this token's end line
|
|
||||||
AffectedEnd = getNextLineBoundary(BufferText, Prev->endOffset(), true);
|
|
||||||
To = Prev;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Report tokens from From -> To to the given EditorConsumer
|
|
||||||
for (; From < To.base(); ++From) {
|
|
||||||
auto Kind = SwiftLangSupport::getUIDForSyntaxNodeKind(From->Kind);
|
|
||||||
Consumer.handleSyntaxMap(From->Offset, From->Length, Kind);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Write back the final Affected range
|
|
||||||
Affected.Offset = AffectedStart;
|
|
||||||
Affected.Length = AffectedEnd - AffectedStart;
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
void addTokenForLine(unsigned Line, const SwiftSyntaxToken &Token) {
|
||||||
static size_t getPrevLineBoundary(StringRef Text, size_t Offset) {
|
assert(Line > 0);
|
||||||
auto Bound = Text.rfind('\n', Offset);
|
if (Lines.size() < Line) {
|
||||||
if (Bound == StringRef::npos)
|
Lines.resize(Line);
|
||||||
return 0;
|
}
|
||||||
return Bound + 1;
|
unsigned LineOffset = Line - 1;
|
||||||
|
SwiftSyntaxLineMap &LineMap = Lines[LineOffset];
|
||||||
|
// FIXME: Assert this token is after the last one
|
||||||
|
LineMap.push_back(Token);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Gets the offset after the next '\n'. If HasLength is true, it will start
|
void mergeTokenForLine(unsigned Line, const SwiftSyntaxToken &Token) {
|
||||||
/// looking one character before the given offset. This is to handle line
|
assert(Line > 0);
|
||||||
/// comments and other tokens that include a terminating '\n'.
|
if (Lines.size() < Line) {
|
||||||
static size_t getNextLineBoundary(StringRef Text, size_t Offset, bool HasLength) {
|
Lines.resize(Line);
|
||||||
auto Bound = Text.find('\n', HasLength? Offset - 1 : Offset);
|
}
|
||||||
if (Bound == StringRef::npos)
|
unsigned LineOffset = Line - 1;
|
||||||
return Text.size();
|
SwiftSyntaxLineMap &LineMap = Lines[LineOffset];
|
||||||
return Bound + 1;
|
if (!LineMap.empty()) {
|
||||||
|
auto &LastTok = LineMap.back();
|
||||||
|
mergeSplitRanges(LastTok.Column, LastTok.Length,
|
||||||
|
Token.Column, Token.Length,
|
||||||
|
[&](unsigned BeforeOff, unsigned BeforeLen,
|
||||||
|
unsigned AfterOff, unsigned AfterLen) {
|
||||||
|
auto LastKind = LastTok.Kind;
|
||||||
|
LineMap.pop_back();
|
||||||
|
if (BeforeLen)
|
||||||
|
LineMap.emplace_back(BeforeOff, BeforeLen, LastKind);
|
||||||
|
LineMap.push_back(Token);
|
||||||
|
if (AfterLen)
|
||||||
|
LineMap.emplace_back(AfterOff, AfterLen, LastKind);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Not overlapping, just add the new token to the end
|
||||||
|
LineMap.push_back(Token);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void clearLineRange(unsigned StartLine, unsigned Length) {
|
||||||
|
assert(StartLine > 0);
|
||||||
|
unsigned LineOffset = StartLine - 1;
|
||||||
|
for (unsigned Line = LineOffset; Line < LineOffset + Length
|
||||||
|
&& Line < Lines.size(); ++Line) {
|
||||||
|
Lines[Line].clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void removeLineRange(unsigned StartLine, unsigned Length) {
|
||||||
|
assert(StartLine > 0 && Length > 0);
|
||||||
|
|
||||||
|
if (StartLine < Lines.size()) {
|
||||||
|
unsigned EndLine = StartLine + Length - 1;
|
||||||
|
// Delete all syntax map data from start line through end line
|
||||||
|
Lines.erase(Lines.begin() + StartLine - 1,
|
||||||
|
EndLine >= Lines.size() ? Lines.end()
|
||||||
|
: Lines.begin() + EndLine);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void insertLineRange(unsigned StartLine, unsigned Length) {
|
||||||
|
Lines.insert(StartLine <= Lines.size() ? Lines.begin() + StartLine - 1
|
||||||
|
: Lines.end(),
|
||||||
|
Length, SwiftSyntaxLineMap());
|
||||||
|
}
|
||||||
|
|
||||||
|
void reset() {
|
||||||
|
Lines.clear();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -485,6 +372,8 @@ struct EditorConsumerSyntaxMapEntry {
|
|||||||
:Offset(Offset), Length(Length), Kind(Kind) { }
|
:Offset(Offset), Length(Length), Kind(Kind) { }
|
||||||
};
|
};
|
||||||
|
|
||||||
|
typedef std::pair<unsigned, unsigned> SwiftEditorCharRange;
|
||||||
|
|
||||||
struct SwiftSemanticToken {
|
struct SwiftSemanticToken {
|
||||||
unsigned ByteOffset;
|
unsigned ByteOffset;
|
||||||
unsigned Length : 24;
|
unsigned Length : 24;
|
||||||
@@ -959,12 +848,9 @@ struct SwiftEditorDocument::Implementation {
|
|||||||
const std::string FilePath;
|
const std::string FilePath;
|
||||||
EditableTextBufferRef EditableBuffer;
|
EditableTextBufferRef EditableBuffer;
|
||||||
|
|
||||||
/// The list of syntax highlighted token offsets and ranges in the document
|
|
||||||
SwiftSyntaxMap SyntaxMap;
|
SwiftSyntaxMap SyntaxMap;
|
||||||
/// The minimal range of syntax highlighted tokens affected by the last edit
|
LineRange EditedLineRange;
|
||||||
SwiftEditorCharRange AffectedRange;
|
SwiftEditorCharRange AffectedRange;
|
||||||
/// Whether the last operation was an edit rather than a document open
|
|
||||||
bool Edited;
|
|
||||||
|
|
||||||
std::vector<DiagnosticEntryInfo> ParserDiagnostics;
|
std::vector<DiagnosticEntryInfo> ParserDiagnostics;
|
||||||
RefPtr<SwiftDocumentSemanticInfo> SemanticInfo;
|
RefPtr<SwiftDocumentSemanticInfo> SemanticInfo;
|
||||||
@@ -1264,47 +1150,136 @@ public:
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Walks the syntax model to populate a given SwiftSyntaxMap with the token
|
|
||||||
/// ranges to highlight and pass document structure information to the given
|
|
||||||
/// EditorConsumer.
|
|
||||||
class SwiftEditorSyntaxWalker: public ide::SyntaxModelWalker {
|
class SwiftEditorSyntaxWalker: public ide::SyntaxModelWalker {
|
||||||
/// The syntax map to populate
|
|
||||||
SwiftSyntaxMap &SyntaxMap;
|
SwiftSyntaxMap &SyntaxMap;
|
||||||
|
LineRange EditedLineRange;
|
||||||
|
SwiftEditorCharRange &AffectedRange;
|
||||||
SourceManager &SrcManager;
|
SourceManager &SrcManager;
|
||||||
|
EditorConsumer &Consumer;
|
||||||
unsigned BufferID;
|
unsigned BufferID;
|
||||||
SwiftDocumentStructureWalker DocStructureWalker;
|
SwiftDocumentStructureWalker DocStructureWalker;
|
||||||
/// The current token nesting level (e.g. for a field in a doc comment)
|
std::vector<EditorConsumerSyntaxMapEntry> ConsumerSyntaxMap;
|
||||||
unsigned NestingLevel = 0;
|
unsigned NestingLevel = 0;
|
||||||
public:
|
public:
|
||||||
SwiftEditorSyntaxWalker(SwiftSyntaxMap &SyntaxMap,
|
SwiftEditorSyntaxWalker(SwiftSyntaxMap &SyntaxMap,
|
||||||
|
LineRange EditedLineRange,
|
||||||
|
SwiftEditorCharRange &AffectedRange,
|
||||||
SourceManager &SrcManager, EditorConsumer &Consumer,
|
SourceManager &SrcManager, EditorConsumer &Consumer,
|
||||||
unsigned BufferID)
|
unsigned BufferID)
|
||||||
: SyntaxMap(SyntaxMap), SrcManager(SrcManager), BufferID(BufferID),
|
: SyntaxMap(SyntaxMap), EditedLineRange(EditedLineRange),
|
||||||
|
AffectedRange(AffectedRange), SrcManager(SrcManager), Consumer(Consumer),
|
||||||
|
BufferID(BufferID),
|
||||||
DocStructureWalker(SrcManager, BufferID, Consumer) { }
|
DocStructureWalker(SrcManager, BufferID, Consumer) { }
|
||||||
|
|
||||||
bool walkToNodePre(SyntaxNode Node) override {
|
bool walkToNodePre(SyntaxNode Node) override {
|
||||||
if (Node.Kind == SyntaxNodeKind::CommentMarker)
|
if (Node.Kind == SyntaxNodeKind::CommentMarker)
|
||||||
return DocStructureWalker.walkToNodePre(Node);
|
return DocStructureWalker.walkToNodePre(Node);
|
||||||
|
|
||||||
++NestingLevel;
|
++NestingLevel;
|
||||||
|
SourceLoc StartLoc = Node.Range.getStart();
|
||||||
|
auto StartLineAndColumn = SrcManager.getLineAndColumn(StartLoc);
|
||||||
|
auto EndLineAndColumn = SrcManager.getLineAndColumn(Node.Range.getEnd());
|
||||||
|
unsigned StartLine = StartLineAndColumn.first;
|
||||||
|
unsigned EndLine = EndLineAndColumn.second > 1 ? EndLineAndColumn.first
|
||||||
|
: EndLineAndColumn.first - 1;
|
||||||
|
unsigned Offset = SrcManager.getByteDistance(
|
||||||
|
SrcManager.getLocForBufferStart(BufferID), StartLoc);
|
||||||
|
// Note that the length can span multiple lines.
|
||||||
|
unsigned Length = Node.Range.getByteLength();
|
||||||
|
|
||||||
auto End = SrcManager.getLocOffsetInBuffer(Node.Range.getEnd(), BufferID),
|
SwiftSyntaxToken Token(StartLineAndColumn.second, Length,
|
||||||
Start = SrcManager.getLocOffsetInBuffer(Node.Range.getStart(), BufferID);
|
Node.Kind);
|
||||||
|
if (EditedLineRange.isValid()) {
|
||||||
|
if (StartLine < EditedLineRange.startLine()) {
|
||||||
|
if (EndLine < EditedLineRange.startLine()) {
|
||||||
|
// We're entirely before the edited range, no update needed.
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// This token starts before the edited range, but doesn't end before it,
|
||||||
|
// we need to adjust edited line range and clear the affected syntax map
|
||||||
|
// line range.
|
||||||
|
unsigned AdjLineCount = EditedLineRange.startLine() - StartLine;
|
||||||
|
EditedLineRange.setRange(StartLine, AdjLineCount
|
||||||
|
+ EditedLineRange.lineCount());
|
||||||
|
SyntaxMap.clearLineRange(StartLine, AdjLineCount);
|
||||||
|
|
||||||
|
// Also adjust the affected char range accordingly.
|
||||||
|
unsigned AdjCharCount = AffectedRange.first - Offset;
|
||||||
|
AffectedRange.first -= AdjCharCount;
|
||||||
|
AffectedRange.second += AdjCharCount;
|
||||||
|
}
|
||||||
|
else if (Offset > AffectedRange.first + AffectedRange.second) {
|
||||||
|
// We're passed the affected range and already synced up, just return.
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
else if (StartLine > EditedLineRange.endLine()) {
|
||||||
|
// We're after the edited line range, let's test if we're synced up.
|
||||||
|
if (SyntaxMap.matchesFirstTokenOnLine(StartLine, Token)) {
|
||||||
|
// We're synced up, mark the affected range and return.
|
||||||
|
AffectedRange.second =
|
||||||
|
Offset - (StartLineAndColumn.second - 1) - AffectedRange.first;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// We're not synced up, continue replacing syntax map data on this line.
|
||||||
|
SyntaxMap.clearLineRange(StartLine, 1);
|
||||||
|
EditedLineRange.extendToIncludeLine(StartLine);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (EndLine > StartLine) {
|
||||||
|
// The token spans multiple lines, make sure to replace syntax map data
|
||||||
|
// for affected lines.
|
||||||
|
EditedLineRange.extendToIncludeLine(EndLine);
|
||||||
|
|
||||||
|
unsigned LineCount = EndLine - StartLine + 1;
|
||||||
|
SyntaxMap.clearLineRange(StartLine, LineCount);
|
||||||
|
}
|
||||||
|
|
||||||
if (NestingLevel > 1) {
|
|
||||||
// We're nested inside the previously reported token - merge
|
|
||||||
SyntaxMap.mergeToken({Start, End - Start, Node.Kind});
|
|
||||||
} else {
|
|
||||||
// We're a top-level token, add it after the previous one
|
|
||||||
SyntaxMap.addToken({Start, End - Start, Node.Kind});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Add the syntax map token.
|
||||||
|
if (NestingLevel > 1)
|
||||||
|
SyntaxMap.mergeTokenForLine(StartLine, Token);
|
||||||
|
else
|
||||||
|
SyntaxMap.addTokenForLine(StartLine, Token);
|
||||||
|
|
||||||
|
// Add consumer entry.
|
||||||
|
unsigned ByteOffset = SrcManager.getLocOffsetInBuffer(Node.Range.getStart(),
|
||||||
|
BufferID);
|
||||||
|
UIdent Kind = SwiftLangSupport::getUIDForSyntaxNodeKind(Node.Kind);
|
||||||
|
if (NestingLevel > 1) {
|
||||||
|
assert(!ConsumerSyntaxMap.empty());
|
||||||
|
auto &Last = ConsumerSyntaxMap.back();
|
||||||
|
mergeSplitRanges(Last.Offset, Last.Length, ByteOffset, Length,
|
||||||
|
[&](unsigned BeforeOff, unsigned BeforeLen,
|
||||||
|
unsigned AfterOff, unsigned AfterLen) {
|
||||||
|
auto LastKind = Last.Kind;
|
||||||
|
ConsumerSyntaxMap.pop_back();
|
||||||
|
if (BeforeLen)
|
||||||
|
ConsumerSyntaxMap.emplace_back(BeforeOff, BeforeLen, LastKind);
|
||||||
|
ConsumerSyntaxMap.emplace_back(ByteOffset, Length, Kind);
|
||||||
|
if (AfterLen)
|
||||||
|
ConsumerSyntaxMap.emplace_back(AfterOff, AfterLen, LastKind);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
else
|
||||||
|
ConsumerSyntaxMap.emplace_back(ByteOffset, Length, Kind);
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool walkToNodePost(SyntaxNode Node) override {
|
bool walkToNodePost(SyntaxNode Node) override {
|
||||||
if (Node.Kind == SyntaxNodeKind::CommentMarker)
|
if (Node.Kind == SyntaxNodeKind::CommentMarker)
|
||||||
return DocStructureWalker.walkToNodePost(Node);
|
return DocStructureWalker.walkToNodePost(Node);
|
||||||
--NestingLevel;
|
|
||||||
|
if (--NestingLevel == 0) {
|
||||||
|
// We've unwound to the top level, so inform the consumer and drain
|
||||||
|
// the consumer syntax map queue.
|
||||||
|
for (auto &Entry: ConsumerSyntaxMap)
|
||||||
|
Consumer.handleSyntaxMap(Entry.Offset, Entry.Length, Entry.Kind);
|
||||||
|
ConsumerSyntaxMap.clear();
|
||||||
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@@ -1580,14 +1555,11 @@ ImmutableTextSnapshotRef SwiftEditorDocument::initializeText(
|
|||||||
|
|
||||||
llvm::sys::ScopedLock L(Impl.AccessMtx);
|
llvm::sys::ScopedLock L(Impl.AccessMtx);
|
||||||
|
|
||||||
Impl.Edited = false;
|
|
||||||
Impl.EditableBuffer =
|
Impl.EditableBuffer =
|
||||||
new EditableTextBuffer(Impl.FilePath, Buf->getBuffer());
|
new EditableTextBuffer(Impl.FilePath, Buf->getBuffer());
|
||||||
|
Impl.SyntaxMap.reset();
|
||||||
// Reset the syntax map data and affected range
|
Impl.EditedLineRange.setRange(0,0);
|
||||||
Impl.SyntaxMap.Tokens.clear();
|
Impl.AffectedRange = std::make_pair(0, Buf->getBufferSize());
|
||||||
Impl.AffectedRange = {0, Buf->getBufferSize()};
|
|
||||||
|
|
||||||
Impl.SemanticInfo =
|
Impl.SemanticInfo =
|
||||||
new SwiftDocumentSemanticInfo(Impl.FilePath, Impl.LangSupport);
|
new SwiftDocumentSemanticInfo(Impl.FilePath, Impl.LangSupport);
|
||||||
Impl.SemanticInfo->setCompilerArgs(Args);
|
Impl.SemanticInfo->setCompilerArgs(Args);
|
||||||
@@ -1600,10 +1572,7 @@ ImmutableTextSnapshotRef SwiftEditorDocument::replaceText(
|
|||||||
|
|
||||||
llvm::sys::ScopedLock L(Impl.AccessMtx);
|
llvm::sys::ScopedLock L(Impl.AccessMtx);
|
||||||
|
|
||||||
Impl.Edited = true;
|
|
||||||
llvm::StringRef Str = Buf->getBuffer();
|
llvm::StringRef Str = Buf->getBuffer();
|
||||||
|
|
||||||
// Update the buffer itself
|
|
||||||
ImmutableTextSnapshotRef Snapshot =
|
ImmutableTextSnapshotRef Snapshot =
|
||||||
Impl.EditableBuffer->replace(Offset, Length, Str);
|
Impl.EditableBuffer->replace(Offset, Length, Str);
|
||||||
|
|
||||||
@@ -1629,10 +1598,37 @@ ImmutableTextSnapshotRef SwiftEditorDocument::replaceText(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update the old syntax map offsets to account for the replaced range
|
SourceManager &SrcManager = Impl.SyntaxInfo->getSourceManager();
|
||||||
Impl.AffectedRange =
|
unsigned BufID = Impl.SyntaxInfo->getBufferID();
|
||||||
Impl.SyntaxMap.adjustForReplacement(Offset, Length, Str.size(),
|
SourceLoc StartLoc = SrcManager.getLocForBufferStart(BufID).getAdvancedLoc(
|
||||||
Snapshot->getBuffer()->getText());
|
Offset);
|
||||||
|
unsigned StartLine = SrcManager.getLineAndColumn(StartLoc).first;
|
||||||
|
unsigned EndLine = SrcManager.getLineAndColumn(
|
||||||
|
StartLoc.getAdvancedLoc(Length)).first;
|
||||||
|
|
||||||
|
// Delete all syntax map data from start line through end line.
|
||||||
|
unsigned OldLineCount = EndLine - StartLine + 1;
|
||||||
|
Impl.SyntaxMap.removeLineRange(StartLine, OldLineCount);
|
||||||
|
|
||||||
|
// Insert empty syntax map data for replaced lines.
|
||||||
|
unsigned NewLineCount = Str.count('\n') + 1;
|
||||||
|
Impl.SyntaxMap.insertLineRange(StartLine, NewLineCount);
|
||||||
|
|
||||||
|
// Update the edited line range.
|
||||||
|
Impl.EditedLineRange.setRange(StartLine, NewLineCount);
|
||||||
|
|
||||||
|
ImmutableTextBufferRef ImmBuf = Snapshot->getBuffer();
|
||||||
|
|
||||||
|
// The affected range starts from the previous newline.
|
||||||
|
if (Offset > 0) {
|
||||||
|
auto AffectedRangeOffset = ImmBuf->getText().rfind('\n', Offset);
|
||||||
|
Impl.AffectedRange.first =
|
||||||
|
AffectedRangeOffset != StringRef::npos ? AffectedRangeOffset + 1 : 0;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
Impl.AffectedRange.first = 0;
|
||||||
|
|
||||||
|
Impl.AffectedRange.second = ImmBuf->getText().size() - Impl.AffectedRange.first;
|
||||||
|
|
||||||
return Snapshot;
|
return Snapshot;
|
||||||
}
|
}
|
||||||
@@ -1685,35 +1681,17 @@ void SwiftEditorDocument::readSyntaxInfo(EditorConsumer &Consumer) {
|
|||||||
|
|
||||||
ide::SyntaxModelContext ModelContext(Impl.SyntaxInfo->getSourceFile());
|
ide::SyntaxModelContext ModelContext(Impl.SyntaxInfo->getSourceFile());
|
||||||
|
|
||||||
SwiftSyntaxMap NewMap = SwiftSyntaxMap(Impl.SyntaxMap.Tokens.size() + 16);
|
SwiftEditorSyntaxWalker SyntaxWalker(Impl.SyntaxMap,
|
||||||
|
Impl.EditedLineRange,
|
||||||
SwiftEditorSyntaxWalker SyntaxWalker(NewMap,
|
Impl.AffectedRange,
|
||||||
Impl.SyntaxInfo->getSourceManager(),
|
Impl.SyntaxInfo->getSourceManager(),
|
||||||
Consumer,
|
Consumer,
|
||||||
Impl.SyntaxInfo->getBufferID());
|
Impl.SyntaxInfo->getBufferID());
|
||||||
|
|
||||||
ModelContext.walk(SyntaxWalker);
|
ModelContext.walk(SyntaxWalker);
|
||||||
|
|
||||||
bool SawChanges = true;
|
Consumer.recordAffectedRange(Impl.AffectedRange.first,
|
||||||
if (Impl.Edited) {
|
Impl.AffectedRange.second);
|
||||||
// We're ansering an edit request. Report all highlighted token ranges not
|
|
||||||
// in the previous syntax map (and any other tokens on the same lines) to
|
|
||||||
// the Consumer and update the affected range to contain them.
|
|
||||||
auto Text = Impl.EditableBuffer->getBuffer()->getText();
|
|
||||||
SawChanges = NewMap.forEachChanged(Impl.SyntaxMap, Impl.AffectedRange, Text,
|
|
||||||
Consumer);
|
|
||||||
} else {
|
|
||||||
// The is an open/initialise. Report all highlighted token ranges to the
|
|
||||||
// Consumer.
|
|
||||||
NewMap.forEach(Consumer);
|
|
||||||
}
|
|
||||||
Impl.SyntaxMap = std::move(NewMap);
|
|
||||||
|
|
||||||
// Recording an affected length of 0 still results in the client updating its
|
|
||||||
// copy of the syntax map (by clearning all tokens on the line of the affected
|
|
||||||
// offset). We need to not record it at all to signal a no-op.
|
|
||||||
if (SawChanges)
|
|
||||||
Consumer.recordAffectedRange(Impl.AffectedRange.Offset,
|
|
||||||
Impl.AffectedRange.Length);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void SwiftEditorDocument::readSemanticInfo(ImmutableTextSnapshotRef Snapshot,
|
void SwiftEditorDocument::readSemanticInfo(ImmutableTextSnapshotRef Snapshot,
|
||||||
|
|||||||
Reference in New Issue
Block a user