OriginalClaude 3.7 SonnetLLaMA-3.3-70BLLaMA-4-ScoutGPT-4.1-mini
Model:Claude 3.7 Sonnet
Time:21s planning + 15s coding
Prompt tokens:5.1K
Response tokens:893
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
using SysprogsDevTools.Tokenizer; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace Sysprogs.IDE.Framework.Languages.DiffExport { public class ExporterDemo { public class TokenWrapper { public enum DiffState { Normal, Added, Removed }{...} private readonly UniversalTokenizer.Token _token; private readonly string _text; public string Value => _text; public DiffState State { get; set; } = DiffState.Normal; public UniversalTokenizer.TokenType Type => _token.Type; public TokenWrapper(UniversalTokenizer.Token token, string text) { _token = token; _text = text; }{...} }{...} public class LineWrapper { public int Number { get; set; } public List<TokenWrapper> Tokens { get; set; } public LineWrapper(int number, List<TokenWrapper> tokens) { Number = number; Tokens = tokens; }{...} }{...} public static string DumpTokens(string text, UniversalTokenizer tokenizer) { var lines = text.Split('\n'); var result = new StringBuilder(); // First build a list of wrapped lines var wrappedLines = new List<LineWrapper>(); UniversalTokenizer.TokenType tokenType = 0; int contextFlags = 0; for (int lineNumber = 0; lineNumber < lines.Length; lineNumber++) { var line = lines[lineNumber]; var tokens = tokenizer.TokenizeLine(line, ref tokenType, contextFlags); var wrappedTokens = new List<TokenWrapper>(); foreach (var token in tokens) { var wrappedToken = new TokenWrapper(token, token.GetText(line)); wrappedTokens.Add(wrappedToken); } wrappedLines.Add(new LineWrapper(lineNumber + 1, wrappedTokens)); } // Now dump all the wrapped lines foreach (var wrappedLine in wrappedLines) { foreach (var token in wrappedLine.Tokens) { string cssClass = TranslateTokenType(token.Type); if (cssClass != null) result.Append($"<span class=\"{cssClass}\">{token.Value}</span>"); else result.Append(token.Value); } result.AppendLine(); } return result.ToString(); }{...} private static string TranslateTokenType(UniversalTokenizer.TokenType tokenType) { switch (tokenType) { case UniversalTokenizer.TokenType.Keyword: return "keyword"; case UniversalTokenizer.TokenType.CharacterLiteral: case UniversalTokenizer.TokenType.StringLiteral: return "literal"; case UniversalTokenizer.TokenType.Comment: return "comment"; default: return null; } }{...} }{...} }{...}