lsp_types/
semantic_tokens.rs

1use std::borrow::Cow;
2
3use serde::ser::SerializeSeq;
4use serde::{Deserialize, Serialize};
5
6use crate::{
7    PartialResultParams, Range, StaticRegistrationOptions, TextDocumentIdentifier,
8    TextDocumentRegistrationOptions, WorkDoneProgressOptions, WorkDoneProgressParams,
9};
10/// A set of predefined token types. This set is not fixed
11/// and clients can specify additional token types via the
12/// corresponding client capabilities.
13///
14/// @since 3.16.0
15#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
16pub struct SemanticTokenType(Cow<'static, str>);
17
18impl SemanticTokenType {
19    pub const NAMESPACE: SemanticTokenType = SemanticTokenType::new("namespace");
20    pub const TYPE: SemanticTokenType = SemanticTokenType::new("type");
21    pub const CLASS: SemanticTokenType = SemanticTokenType::new("class");
22    pub const ENUM: SemanticTokenType = SemanticTokenType::new("enum");
23    pub const INTERFACE: SemanticTokenType = SemanticTokenType::new("interface");
24    pub const STRUCT: SemanticTokenType = SemanticTokenType::new("struct");
25    pub const TYPE_PARAMETER: SemanticTokenType = SemanticTokenType::new("typeParameter");
26    pub const PARAMETER: SemanticTokenType = SemanticTokenType::new("parameter");
27    pub const VARIABLE: SemanticTokenType = SemanticTokenType::new("variable");
28    pub const PROPERTY: SemanticTokenType = SemanticTokenType::new("property");
29    pub const ENUM_MEMBER: SemanticTokenType = SemanticTokenType::new("enumMember");
30    pub const EVENT: SemanticTokenType = SemanticTokenType::new("event");
31    pub const FUNCTION: SemanticTokenType = SemanticTokenType::new("function");
32    pub const METHOD: SemanticTokenType = SemanticTokenType::new("method");
33    pub const MACRO: SemanticTokenType = SemanticTokenType::new("macro");
34    pub const KEYWORD: SemanticTokenType = SemanticTokenType::new("keyword");
35    pub const MODIFIER: SemanticTokenType = SemanticTokenType::new("modifier");
36    pub const COMMENT: SemanticTokenType = SemanticTokenType::new("comment");
37    pub const STRING: SemanticTokenType = SemanticTokenType::new("string");
38    pub const NUMBER: SemanticTokenType = SemanticTokenType::new("number");
39    pub const REGEXP: SemanticTokenType = SemanticTokenType::new("regexp");
40    pub const OPERATOR: SemanticTokenType = SemanticTokenType::new("operator");
41
42    /// @since 3.17.0
43    pub const DECORATOR: SemanticTokenType = SemanticTokenType::new("decorator");
44
45    pub const fn new(tag: &'static str) -> Self {
46        SemanticTokenType(Cow::Borrowed(tag))
47    }
48
49    pub fn as_str(&self) -> &str {
50        &self.0
51    }
52}
53
54impl From<String> for SemanticTokenType {
55    fn from(from: String) -> Self {
56        SemanticTokenType(Cow::from(from))
57    }
58}
59
60impl From<&'static str> for SemanticTokenType {
61    fn from(from: &'static str) -> Self {
62        SemanticTokenType::new(from)
63    }
64}
65
66/// A set of predefined token modifiers. This set is not fixed
67/// and clients can specify additional token types via the
68/// corresponding client capabilities.
69///
70/// @since 3.16.0
71#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
72pub struct SemanticTokenModifier(Cow<'static, str>);
73
74impl SemanticTokenModifier {
75    pub const DECLARATION: SemanticTokenModifier = SemanticTokenModifier::new("declaration");
76    pub const DEFINITION: SemanticTokenModifier = SemanticTokenModifier::new("definition");
77    pub const READONLY: SemanticTokenModifier = SemanticTokenModifier::new("readonly");
78    pub const STATIC: SemanticTokenModifier = SemanticTokenModifier::new("static");
79    pub const DEPRECATED: SemanticTokenModifier = SemanticTokenModifier::new("deprecated");
80    pub const ABSTRACT: SemanticTokenModifier = SemanticTokenModifier::new("abstract");
81    pub const ASYNC: SemanticTokenModifier = SemanticTokenModifier::new("async");
82    pub const MODIFICATION: SemanticTokenModifier = SemanticTokenModifier::new("modification");
83    pub const DOCUMENTATION: SemanticTokenModifier = SemanticTokenModifier::new("documentation");
84    pub const DEFAULT_LIBRARY: SemanticTokenModifier = SemanticTokenModifier::new("defaultLibrary");
85
86    pub const fn new(tag: &'static str) -> Self {
87        SemanticTokenModifier(Cow::Borrowed(tag))
88    }
89
90    pub fn as_str(&self) -> &str {
91        &self.0
92    }
93}
94
95impl From<String> for SemanticTokenModifier {
96    fn from(from: String) -> Self {
97        SemanticTokenModifier(Cow::from(from))
98    }
99}
100
101impl From<&'static str> for SemanticTokenModifier {
102    fn from(from: &'static str) -> Self {
103        SemanticTokenModifier::new(from)
104    }
105}
106
107#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
108pub struct TokenFormat(Cow<'static, str>);
109
110impl TokenFormat {
111    pub const RELATIVE: TokenFormat = TokenFormat::new("relative");
112
113    pub const fn new(tag: &'static str) -> Self {
114        TokenFormat(Cow::Borrowed(tag))
115    }
116
117    pub fn as_str(&self) -> &str {
118        &self.0
119    }
120}
121
122impl From<String> for TokenFormat {
123    fn from(from: String) -> Self {
124        TokenFormat(Cow::from(from))
125    }
126}
127
128impl From<&'static str> for TokenFormat {
129    fn from(from: &'static str) -> Self {
130        TokenFormat::new(from)
131    }
132}
133
134/// @since 3.16.0
135#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
136#[serde(rename_all = "camelCase")]
137pub struct SemanticTokensLegend {
138    /// The token types a server uses.
139    pub token_types: Vec<SemanticTokenType>,
140
141    /// The token modifiers a server uses.
142    pub token_modifiers: Vec<SemanticTokenModifier>,
143}
144
145/// The actual tokens.
146#[derive(Debug, Eq, PartialEq, Copy, Clone, Default)]
147pub struct SemanticToken {
148    pub delta_line: u32,
149    pub delta_start: u32,
150    pub length: u32,
151    pub token_type: u32,
152    pub token_modifiers_bitset: u32,
153}
154
155impl SemanticToken {
156    fn deserialize_tokens<'de, D>(deserializer: D) -> Result<Vec<SemanticToken>, D::Error>
157    where
158        D: serde::Deserializer<'de>,
159    {
160        let data = Vec::<u32>::deserialize(deserializer)?;
161        let chunks = data.chunks_exact(5);
162
163        if !chunks.remainder().is_empty() {
164            return Result::Err(serde::de::Error::custom("Length is not divisible by 5"));
165        }
166
167        Result::Ok(
168            chunks
169                .map(|chunk| SemanticToken {
170                    delta_line: chunk[0],
171                    delta_start: chunk[1],
172                    length: chunk[2],
173                    token_type: chunk[3],
174                    token_modifiers_bitset: chunk[4],
175                })
176                .collect(),
177        )
178    }
179
180    fn serialize_tokens<S>(tokens: &[SemanticToken], serializer: S) -> Result<S::Ok, S::Error>
181    where
182        S: serde::Serializer,
183    {
184        let mut seq = serializer.serialize_seq(Some(tokens.len() * 5))?;
185        for token in tokens.iter() {
186            seq.serialize_element(&token.delta_line)?;
187            seq.serialize_element(&token.delta_start)?;
188            seq.serialize_element(&token.length)?;
189            seq.serialize_element(&token.token_type)?;
190            seq.serialize_element(&token.token_modifiers_bitset)?;
191        }
192        seq.end()
193    }
194
195    fn deserialize_tokens_opt<'de, D>(
196        deserializer: D,
197    ) -> Result<Option<Vec<SemanticToken>>, D::Error>
198    where
199        D: serde::Deserializer<'de>,
200    {
201        #[derive(Deserialize)]
202        #[serde(transparent)]
203        struct Wrapper {
204            #[serde(deserialize_with = "SemanticToken::deserialize_tokens")]
205            tokens: Vec<SemanticToken>,
206        }
207
208        Ok(Option::<Wrapper>::deserialize(deserializer)?.map(|wrapper| wrapper.tokens))
209    }
210
211    fn serialize_tokens_opt<S>(
212        data: &Option<Vec<SemanticToken>>,
213        serializer: S,
214    ) -> Result<S::Ok, S::Error>
215    where
216        S: serde::Serializer,
217    {
218        #[derive(Serialize)]
219        #[serde(transparent)]
220        struct Wrapper {
221            #[serde(serialize_with = "SemanticToken::serialize_tokens")]
222            tokens: Vec<SemanticToken>,
223        }
224
225        let opt = data.as_ref().map(|t| Wrapper { tokens: t.to_vec() });
226
227        opt.serialize(serializer)
228    }
229}
230
231/// @since 3.16.0
232#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
233#[serde(rename_all = "camelCase")]
234pub struct SemanticTokens {
235    /// An optional result id. If provided and clients support delta updating
236    /// the client will include the result id in the next semantic token request.
237    /// A server can then instead of computing all semantic tokens again simply
238    /// send a delta.
239    #[serde(skip_serializing_if = "Option::is_none")]
240    pub result_id: Option<String>,
241
242    /// The actual tokens. For a detailed description about how the data is
243    /// structured please see
244    /// <https://github.com/microsoft/vscode-extension-samples/blob/5ae1f7787122812dcc84e37427ca90af5ee09f14/semantic-tokens-sample/vscode.proposed.d.ts#L71>
245    #[serde(
246        deserialize_with = "SemanticToken::deserialize_tokens",
247        serialize_with = "SemanticToken::serialize_tokens"
248    )]
249    pub data: Vec<SemanticToken>,
250}
251
252/// @since 3.16.0
253#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
254#[serde(rename_all = "camelCase")]
255pub struct SemanticTokensPartialResult {
256    #[serde(
257        deserialize_with = "SemanticToken::deserialize_tokens",
258        serialize_with = "SemanticToken::serialize_tokens"
259    )]
260    pub data: Vec<SemanticToken>,
261}
262
263#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
264#[serde(rename_all = "camelCase")]
265#[serde(untagged)]
266pub enum SemanticTokensResult {
267    Tokens(SemanticTokens),
268    Partial(SemanticTokensPartialResult),
269}
270
271impl From<SemanticTokens> for SemanticTokensResult {
272    fn from(from: SemanticTokens) -> Self {
273        SemanticTokensResult::Tokens(from)
274    }
275}
276
277impl From<SemanticTokensPartialResult> for SemanticTokensResult {
278    fn from(from: SemanticTokensPartialResult) -> Self {
279        SemanticTokensResult::Partial(from)
280    }
281}
282
283/// @since 3.16.0
284#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
285#[serde(rename_all = "camelCase")]
286pub struct SemanticTokensEdit {
287    pub start: u32,
288    pub delete_count: u32,
289
290    #[serde(
291        default,
292        skip_serializing_if = "Option::is_none",
293        deserialize_with = "SemanticToken::deserialize_tokens_opt",
294        serialize_with = "SemanticToken::serialize_tokens_opt"
295    )]
296    pub data: Option<Vec<SemanticToken>>,
297}
298
299#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
300#[serde(rename_all = "camelCase")]
301#[serde(untagged)]
302pub enum SemanticTokensFullDeltaResult {
303    Tokens(SemanticTokens),
304    TokensDelta(SemanticTokensDelta),
305    PartialTokensDelta { edits: Vec<SemanticTokensEdit> },
306}
307
308impl From<SemanticTokens> for SemanticTokensFullDeltaResult {
309    fn from(from: SemanticTokens) -> Self {
310        SemanticTokensFullDeltaResult::Tokens(from)
311    }
312}
313
314impl From<SemanticTokensDelta> for SemanticTokensFullDeltaResult {
315    fn from(from: SemanticTokensDelta) -> Self {
316        SemanticTokensFullDeltaResult::TokensDelta(from)
317    }
318}
319
320/// @since 3.16.0
321#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
322#[serde(rename_all = "camelCase")]
323pub struct SemanticTokensDelta {
324    #[serde(skip_serializing_if = "Option::is_none")]
325    pub result_id: Option<String>,
326    /// For a detailed description how these edits are structured please see
327    /// <https://github.com/microsoft/vscode-extension-samples/blob/5ae1f7787122812dcc84e37427ca90af5ee09f14/semantic-tokens-sample/vscode.proposed.d.ts#L131>
328    pub edits: Vec<SemanticTokensEdit>,
329}
330
331/// Capabilities specific to the `textDocument/semanticTokens/*` requests.
332///
333/// @since 3.16.0
334#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
335#[serde(rename_all = "camelCase")]
336pub struct SemanticTokensClientCapabilities {
337    /// Whether implementation supports dynamic registration. If this is set to `true`
338    /// the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
339    /// return value for the corresponding server capability as well.
340    #[serde(skip_serializing_if = "Option::is_none")]
341    pub dynamic_registration: Option<bool>,
342
343    /// Which requests the client supports and might send to the server
344    /// depending on the server's capability. Please note that clients might not
345    /// show semantic tokens or degrade some of the user experience if a range
346    /// or full request is advertised by the client but not provided by the
347    /// server. If for example the client capability `requests.full` and
348    /// `request.range` are both set to true but the server only provides a
349    /// range provider the client might not render a minimap correctly or might
350    /// even decide to not show any semantic tokens at all.
351    pub requests: SemanticTokensClientCapabilitiesRequests,
352
353    /// The token types that the client supports.
354    pub token_types: Vec<SemanticTokenType>,
355
356    /// The token modifiers that the client supports.
357    pub token_modifiers: Vec<SemanticTokenModifier>,
358
359    /// The token formats the clients supports.
360    pub formats: Vec<TokenFormat>,
361
362    /// Whether the client supports tokens that can overlap each other.
363    #[serde(skip_serializing_if = "Option::is_none")]
364    pub overlapping_token_support: Option<bool>,
365
366    /// Whether the client supports tokens that can span multiple lines.
367    #[serde(skip_serializing_if = "Option::is_none")]
368    pub multiline_token_support: Option<bool>,
369
370    /// Whether the client allows the server to actively cancel a
371    /// semantic token request, e.g. supports returning
372    /// ErrorCodes.ServerCancelled. If a server does the client
373    /// needs to retrigger the request.
374    ///
375    /// @since 3.17.0
376    #[serde(skip_serializing_if = "Option::is_none")]
377    pub server_cancel_support: Option<bool>,
378
379    /// Whether the client uses semantic tokens to augment existing
380    /// syntax tokens. If set to `true` client side created syntax
381    /// tokens and semantic tokens are both used for colorization. If
382    /// set to `false` the client only uses the returned semantic tokens
383    /// for colorization.
384    ///
385    /// If the value is `undefined` then the client behavior is not
386    /// specified.
387    ///
388    /// @since 3.17.0
389    #[serde(skip_serializing_if = "Option::is_none")]
390    pub augments_syntax_tokens: Option<bool>,
391}
392
393#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
394#[serde(rename_all = "camelCase")]
395pub struct SemanticTokensClientCapabilitiesRequests {
396    /// The client will send the `textDocument/semanticTokens/range` request if the server provides a corresponding handler.
397    #[serde(skip_serializing_if = "Option::is_none")]
398    pub range: Option<bool>,
399
400    /// The client will send the `textDocument/semanticTokens/full` request if the server provides a corresponding handler.
401    #[serde(skip_serializing_if = "Option::is_none")]
402    pub full: Option<SemanticTokensFullOptions>,
403}
404
405#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
406#[serde(rename_all = "camelCase")]
407#[serde(untagged)]
408pub enum SemanticTokensFullOptions {
409    Bool(bool),
410    Delta {
411        /// The client will send the `textDocument/semanticTokens/full/delta` request if the server provides a corresponding handler.
412        /// The server supports deltas for full documents.
413        #[serde(skip_serializing_if = "Option::is_none")]
414        delta: Option<bool>,
415    },
416}
417
418/// @since 3.16.0
419#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
420#[serde(rename_all = "camelCase")]
421pub struct SemanticTokensOptions {
422    #[serde(flatten)]
423    pub work_done_progress_options: WorkDoneProgressOptions,
424
425    /// The legend used by the server
426    pub legend: SemanticTokensLegend,
427
428    /// Server supports providing semantic tokens for a specific range
429    /// of a document.
430    #[serde(skip_serializing_if = "Option::is_none")]
431    pub range: Option<bool>,
432
433    /// Server supports providing semantic tokens for a full document.
434    #[serde(skip_serializing_if = "Option::is_none")]
435    pub full: Option<SemanticTokensFullOptions>,
436}
437
438#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
439#[serde(rename_all = "camelCase")]
440pub struct SemanticTokensRegistrationOptions {
441    #[serde(flatten)]
442    pub text_document_registration_options: TextDocumentRegistrationOptions,
443
444    #[serde(flatten)]
445    pub semantic_tokens_options: SemanticTokensOptions,
446
447    #[serde(flatten)]
448    pub static_registration_options: StaticRegistrationOptions,
449}
450
451#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
452#[serde(rename_all = "camelCase")]
453#[serde(untagged)]
454pub enum SemanticTokensServerCapabilities {
455    SemanticTokensOptions(SemanticTokensOptions),
456    SemanticTokensRegistrationOptions(SemanticTokensRegistrationOptions),
457}
458
459impl From<SemanticTokensOptions> for SemanticTokensServerCapabilities {
460    fn from(from: SemanticTokensOptions) -> Self {
461        SemanticTokensServerCapabilities::SemanticTokensOptions(from)
462    }
463}
464
465impl From<SemanticTokensRegistrationOptions> for SemanticTokensServerCapabilities {
466    fn from(from: SemanticTokensRegistrationOptions) -> Self {
467        SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions(from)
468    }
469}
470
471#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
472#[serde(rename_all = "camelCase")]
473pub struct SemanticTokensWorkspaceClientCapabilities {
474    /// Whether the client implementation supports a refresh request sent from
475    /// the server to the client.
476    ///
477    /// Note that this event is global and will force the client to refresh all
478    /// semantic tokens currently shown. It should be used with absolute care
479    /// and is useful for situation where a server for example detect a project
480    /// wide change that requires such a calculation.
481    pub refresh_support: Option<bool>,
482}
483
484#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
485#[serde(rename_all = "camelCase")]
486pub struct SemanticTokensParams {
487    #[serde(flatten)]
488    pub work_done_progress_params: WorkDoneProgressParams,
489
490    #[serde(flatten)]
491    pub partial_result_params: PartialResultParams,
492
493    /// The text document.
494    pub text_document: TextDocumentIdentifier,
495}
496
497#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
498#[serde(rename_all = "camelCase")]
499pub struct SemanticTokensDeltaParams {
500    #[serde(flatten)]
501    pub work_done_progress_params: WorkDoneProgressParams,
502
503    #[serde(flatten)]
504    pub partial_result_params: PartialResultParams,
505
506    /// The text document.
507    pub text_document: TextDocumentIdentifier,
508
509    /// The result id of a previous response. The result Id can either point to a full response
510    /// or a delta response depending on what was received last.
511    pub previous_result_id: String,
512}
513
514#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
515#[serde(rename_all = "camelCase")]
516pub struct SemanticTokensRangeParams {
517    #[serde(flatten)]
518    pub work_done_progress_params: WorkDoneProgressParams,
519
520    #[serde(flatten)]
521    pub partial_result_params: PartialResultParams,
522
523    /// The text document.
524    pub text_document: TextDocumentIdentifier,
525
526    /// The range the semantic tokens are requested for.
527    pub range: Range,
528}
529
530#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
531#[serde(rename_all = "camelCase")]
532#[serde(untagged)]
533pub enum SemanticTokensRangeResult {
534    Tokens(SemanticTokens),
535    Partial(SemanticTokensPartialResult),
536}
537
538impl From<SemanticTokens> for SemanticTokensRangeResult {
539    fn from(tokens: SemanticTokens) -> Self {
540        SemanticTokensRangeResult::Tokens(tokens)
541    }
542}
543
544impl From<SemanticTokensPartialResult> for SemanticTokensRangeResult {
545    fn from(partial: SemanticTokensPartialResult) -> Self {
546        SemanticTokensRangeResult::Partial(partial)
547    }
548}
549
550#[cfg(test)]
551mod tests {
552    use super::*;
553    use crate::tests::{test_deserialization, test_serialization};
554
555    #[test]
556    fn test_semantic_tokens_support_serialization() {
557        test_serialization(
558            &SemanticTokens {
559                result_id: None,
560                data: vec![],
561            },
562            r#"{"data":[]}"#,
563        );
564
565        test_serialization(
566            &SemanticTokens {
567                result_id: None,
568                data: vec![SemanticToken {
569                    delta_line: 2,
570                    delta_start: 5,
571                    length: 3,
572                    token_type: 0,
573                    token_modifiers_bitset: 3,
574                }],
575            },
576            r#"{"data":[2,5,3,0,3]}"#,
577        );
578
579        test_serialization(
580            &SemanticTokens {
581                result_id: None,
582                data: vec![
583                    SemanticToken {
584                        delta_line: 2,
585                        delta_start: 5,
586                        length: 3,
587                        token_type: 0,
588                        token_modifiers_bitset: 3,
589                    },
590                    SemanticToken {
591                        delta_line: 0,
592                        delta_start: 5,
593                        length: 4,
594                        token_type: 1,
595                        token_modifiers_bitset: 0,
596                    },
597                ],
598            },
599            r#"{"data":[2,5,3,0,3,0,5,4,1,0]}"#,
600        );
601    }
602
603    #[test]
604    fn test_semantic_tokens_support_deserialization() {
605        test_deserialization(
606            r#"{"data":[]}"#,
607            &SemanticTokens {
608                result_id: None,
609                data: vec![],
610            },
611        );
612
613        test_deserialization(
614            r#"{"data":[2,5,3,0,3]}"#,
615            &SemanticTokens {
616                result_id: None,
617                data: vec![SemanticToken {
618                    delta_line: 2,
619                    delta_start: 5,
620                    length: 3,
621                    token_type: 0,
622                    token_modifiers_bitset: 3,
623                }],
624            },
625        );
626
627        test_deserialization(
628            r#"{"data":[2,5,3,0,3,0,5,4,1,0]}"#,
629            &SemanticTokens {
630                result_id: None,
631                data: vec![
632                    SemanticToken {
633                        delta_line: 2,
634                        delta_start: 5,
635                        length: 3,
636                        token_type: 0,
637                        token_modifiers_bitset: 3,
638                    },
639                    SemanticToken {
640                        delta_line: 0,
641                        delta_start: 5,
642                        length: 4,
643                        token_type: 1,
644                        token_modifiers_bitset: 0,
645                    },
646                ],
647            },
648        );
649    }
650
651    #[test]
652    #[should_panic]
653    fn test_semantic_tokens_support_deserialization_err() {
654        test_deserialization(
655            r#"{"data":[1]}"#,
656            &SemanticTokens {
657                result_id: None,
658                data: vec![],
659            },
660        );
661    }
662
663    #[test]
664    fn test_semantic_tokens_edit_support_deserialization() {
665        test_deserialization(
666            r#"{"start":0,"deleteCount":1,"data":[2,5,3,0,3,0,5,4,1,0]}"#,
667            &SemanticTokensEdit {
668                start: 0,
669                delete_count: 1,
670                data: Some(vec![
671                    SemanticToken {
672                        delta_line: 2,
673                        delta_start: 5,
674                        length: 3,
675                        token_type: 0,
676                        token_modifiers_bitset: 3,
677                    },
678                    SemanticToken {
679                        delta_line: 0,
680                        delta_start: 5,
681                        length: 4,
682                        token_type: 1,
683                        token_modifiers_bitset: 0,
684                    },
685                ]),
686            },
687        );
688
689        test_deserialization(
690            r#"{"start":0,"deleteCount":1}"#,
691            &SemanticTokensEdit {
692                start: 0,
693                delete_count: 1,
694                data: None,
695            },
696        );
697    }
698
699    #[test]
700    fn test_semantic_tokens_edit_support_serialization() {
701        test_serialization(
702            &SemanticTokensEdit {
703                start: 0,
704                delete_count: 1,
705                data: Some(vec![
706                    SemanticToken {
707                        delta_line: 2,
708                        delta_start: 5,
709                        length: 3,
710                        token_type: 0,
711                        token_modifiers_bitset: 3,
712                    },
713                    SemanticToken {
714                        delta_line: 0,
715                        delta_start: 5,
716                        length: 4,
717                        token_type: 1,
718                        token_modifiers_bitset: 0,
719                    },
720                ]),
721            },
722            r#"{"start":0,"deleteCount":1,"data":[2,5,3,0,3,0,5,4,1,0]}"#,
723        );
724
725        test_serialization(
726            &SemanticTokensEdit {
727                start: 0,
728                delete_count: 1,
729                data: None,
730            },
731            r#"{"start":0,"deleteCount":1}"#,
732        );
733    }
734}