|
1 | | -{[ctx; code; stringify] |
2 | | - if [-10h ~ type ctx; |
3 | | - ctx: enlist ctx]; |
4 | | - toString: {[data] |
5 | | - text : .Q.s data; |
6 | | - : $[all text in " \r\n"; |
7 | | - .Q.s1[data] , "\n"; |
8 | | - text]; |
9 | | - }; |
10 | | - removeMultilineComments: {[text] |
11 | | - text: "\n" , text; |
12 | | - lines: (where text = "\n") cut text; |
13 | | - potentialStart: where lines like "\n/*"; |
14 | | - start: potentialStart where all each (2_/:lines potentialStart) in "\t "; |
15 | | - potentialEnd: where lines like "\n\\*"; |
16 | | - end: 1 + potentialEnd where all each (2_/:lines potentialEnd) in "\t "; |
17 | | - lines[0]: 1 _ lines[0]; |
18 | | - boundaries: (`start,' start), (`end,' end); |
19 | | - boundaries: boundaries iasc boundaries[;1]; |
20 | | - if [`end ~ first first boundaries; |
21 | | - : "\n" sv (boundaries[0;1] - 1) # lines]; |
22 | | - filteredList: (); |
23 | | - lastBoundary: `end; |
24 | | - index: 0; |
25 | | - do [count boundaries; |
26 | | - if [lastBoundary <> first boundaries index; |
27 | | - lastBoundary: first boundaries index; |
28 | | - filteredList,: enlist boundaries index]; |
29 | | - index+: 1]; |
30 | | - result: raze first each 2 cut raze each (0, filteredList[;1]) cut lines; |
31 | | - : $[result ~ (); |
32 | | - ""; |
33 | | - result]; |
34 | | - }; |
35 | | - tokenize: {[text] |
36 | | - parsed: -4!text; |
37 | | - cmtInd: where ((1 < count each parsed) & parsed[;0] in "/ \t\n") & not parsed ~\: "/:"; |
38 | | - parsed[cmtInd] : (parsed[cmtInd]?\:"/")#'parsed[cmtInd]; |
39 | | - parsed where (0 <> count each parsed) |
40 | | - }; |
41 | | - stripTrailingSemi: {[tokenize; str] |
42 | | - str: tokenize str; |
43 | | - $[ ("" ~ str) or (() ~ str); |
44 | | - ""; |
45 | | - {(neg sum &\[reverse x in "\r\n; \t"]) _ x} trim raze str] |
46 | | - } tokenize; |
47 | | - splitExpression: {[expr] |
48 | | - tokens: -4!expr; |
49 | | - newlines: where enlist["\n"] ~/: tokens; |
50 | | - : "c"$raze each (0 , 1 + newlines where not tokens[1 + newlines] in enlist each " \t\r\n") _ tokens |
51 | | - }; |
52 | | - fixSpecialSyntax: {[stripTrailingSemi; expr] |
53 | | - escape: {[str] |
54 | | - chars: (`char$til 255)!(string each `char$til 255); |
55 | | - chars[("\\";"\"";"\t";"\n";"\r")]: ("\\\\";"\\\"";"\\t";"\\n";"\\r"); |
56 | | - : raze chars str; |
57 | | - }; |
58 | | - $[ |
59 | | - expr like "[kq])*"; |
60 | | - "value \"",(2#expr), escape[stripTrailingSemi 2_expr], "\";"; |
61 | | - expr like "\\*"; |
62 | | - "system \"", escape[trim 1_expr], "\";"; |
63 | | - {s:rtrim first l:(0,x ss "::")_x; (1<count l) & (all s in .Q.an) and 0 < count s} expr; |
64 | | - "value \"", escape[trim expr], "\";"; |
65 | | - expr like "[a-zA-Z])*"; |
66 | | - "value \"",(2#expr), escape[2_expr], "\";"; |
67 | | - expr] |
68 | | - } stripTrailingSemi; |
69 | | - wrapLines: {[fixSpecialSyntax; expn] |
70 | | - tokenizeAndSplit: {[expn] |
71 | | - : $[.z.K <= 3.4; |
72 | | - "\n" vs expn; |
73 | | - [ |
74 | | - tokens: -4!"\n" , expn; |
75 | | - tokens: raze {[token; isMerged] |
76 | | - $[ isMerged; |
77 | | - (enlist "\n"; 1 _ token); |
78 | | - enlist token] |
79 | | - } ./: flip (tokens; tokens like "\n/*"); |
80 | | - "" sv/: 1_/:(where tokens ~\: enlist "\n") cut tokens]]; |
81 | | - }; |
82 | | - lines: @[tokenizeAndSplit; |
83 | | - expn; |
84 | | - {[expn; err] "\n" vs expn} expn]; |
85 | | - lines:{[acc; line] |
86 | | - $[ 0 = count acc; |
87 | | - acc,: enlist line; |
88 | | - (last[acc] like "[a-zA-Z])*") and line[0] in " \t/"; |
89 | | - [ acc[count[acc] - 1]: last[acc],"\n",line; |
90 | | - acc]; |
91 | | - acc,: enlist line] |
92 | | - }/[();] lines; |
93 | | - : "\n" sv fixSpecialSyntax each lines; |
94 | | - } fixSpecialSyntax; |
95 | | - evalInContext: {[ctx; expressions] |
96 | | - cachedCtx: string system "d"; |
97 | | - system "d ", ctx; |
98 | | - index: 0; |
99 | | - do [count expressions; |
100 | | - expr: expressions index; |
101 | | - isLastLine: (index + 1) = count expressions; |
102 | | - if ["\n" ~ last expr; |
103 | | - expr: -1_expr]; |
104 | | - prefix: ";[::;"; |
105 | | - suffix: $[(not isLastLine) and not ";" ~ last expr; ";]"; "]"]; |
106 | | - expr: prefix , expr , suffix; |
107 | | - result: .Q.trp[{[expr] `result`errored`error`backtrace!({$[x ~ (::); (::); x]} value expr; 0b; ""; ())}; |
108 | | - expr; |
109 | | - {[suffix; prefix; err; backtrace] |
110 | | - if [err ~ enlist " "; |
111 | | - err: "syntax error"]; |
112 | | - userCode: (-1 + last where (.Q.trp ~ first first @) each backtrace) # backtrace; |
113 | | - userCode[;3]: reverse 1 + til count userCode; |
114 | | - userCode[-1 + count userCode; 1; 3]: (neg count suffix) _ (count prefix) _ userCode[-1 + count userCode; 1; 3]; |
115 | | - userCode[-1 + count userCode; 2]-: count prefix; |
116 | | - (!) . flip ( |
117 | | - (`result; ::); |
118 | | - (`errored; 1b); |
119 | | - (`error; err); |
120 | | - (`backtrace; .Q.sbt userCode); |
121 | | - (`base64; 0b)) |
122 | | - }[suffix; prefix]]; |
123 | | - if [isLastLine or result`errored; |
124 | | - system "d ", cachedCtx; |
125 | | - : result]; |
126 | | - index +: 1]; |
127 | | - }; |
128 | | - result: evalInContext[ctx; splitExpression stripTrailingSemi wrapLines removeMultilineComments code]; |
129 | | - if[result `errored; :result]; |
130 | | - if[type[result[`result]] = 99h; |
131 | | - if[`output in key result[`result]; |
132 | | - if[type[result[`result][`output]] = 99h; |
133 | | - if[`bytes in key result[`result][`output]; |
134 | | - result[`base64]:1b; result[`result]: .Q.btoa result[`result][`output][`bytes]; :result]]]]; |
135 | | - if [stringify; result[`result]: toString result `result]; |
136 | | - result |
137 | | - } |
| 1 | +{[ctx; code; returnFormat] |
| 2 | + if [-10h ~ type ctx; |
| 3 | + ctx: enlist ctx]; |
| 4 | + toString: {[data] |
| 5 | + text : .Q.s data; |
| 6 | + : $[all text in " \r\n"; |
| 7 | + .Q.s1[data] , "\n"; |
| 8 | + text]; |
| 9 | + }; |
| 10 | + removeMultilineComments: {[text] |
| 11 | + text: "\n" , text; |
| 12 | + lines: (where text = "\n") cut text; |
| 13 | + potentialStart: where lines like "\n/*"; |
| 14 | + start: potentialStart where all each (2_/:lines potentialStart) in "\t "; |
| 15 | + potentialEnd: where lines like "\n\\*"; |
| 16 | + end: 1 + potentialEnd where all each (2_/:lines potentialEnd) in "\t "; |
| 17 | + lines[0]: 1 _ lines[0]; |
| 18 | + boundaries: (`start,' start), (`end,' end); |
| 19 | + boundaries: boundaries iasc boundaries[;1]; |
| 20 | + if [`end ~ first first boundaries; |
| 21 | + : "\n" sv (boundaries[0;1] - 1) # lines]; |
| 22 | + filteredList: (); |
| 23 | + lastBoundary: `end; |
| 24 | + index: 0; |
| 25 | + do [count boundaries; |
| 26 | + if [lastBoundary <> first boundaries index; |
| 27 | + lastBoundary: first boundaries index; |
| 28 | + filteredList,: enlist boundaries index]; |
| 29 | + index+: 1]; |
| 30 | + result: raze first each 2 cut raze each (0, filteredList[;1]) cut lines; |
| 31 | + : $[result ~ (); |
| 32 | + ""; |
| 33 | + result]; |
| 34 | + }; |
| 35 | + tokenize: {[text] |
| 36 | + parsed: -4!text; |
| 37 | + cmtInd: where ((1 < count each parsed) & parsed[;0] in "/ \t\n") & not parsed ~\: "/:"; |
| 38 | + parsed[cmtInd] : (parsed[cmtInd]?\:"/")#'parsed[cmtInd]; |
| 39 | + parsed where (0 <> count each parsed) |
| 40 | + }; |
| 41 | + stripTrailingSemi: {[tokenize; str] |
| 42 | + str: tokenize str; |
| 43 | + $[ ("" ~ str) or (() ~ str); |
| 44 | + ""; |
| 45 | + {(neg sum &\[reverse x in "\r\n; \t"]) _ x} trim raze str] |
| 46 | + } tokenize; |
| 47 | + splitExpression: {[expr] |
| 48 | + tokens: -4!expr; |
| 49 | + newlines: where enlist["\n"] ~/: tokens; |
| 50 | + : "c"$raze each (0 , 1 + newlines where not tokens[1 + newlines] in enlist each " \t\r\n") _ tokens |
| 51 | + }; |
| 52 | + fixSpecialSyntax: {[stripTrailingSemi; expr] |
| 53 | + escape: {[str] |
| 54 | + chars: (`char$til 255)!(string each `char$til 255); |
| 55 | + chars[("\\";"\"";"\t";"\n";"\r")]: ("\\\\";"\\\"";"\\t";"\\n";"\\r"); |
| 56 | + : raze chars str; |
| 57 | + }; |
| 58 | + $[ |
| 59 | + expr like "[kq])*"; |
| 60 | + "value \"",(2#expr), escape[stripTrailingSemi 2_expr], "\";"; |
| 61 | + expr like "\\*"; |
| 62 | + "system \"", escape[trim 1_expr], "\";"; |
| 63 | + {s:rtrim first l:(0,x ss "::")_x; (1<count l) & (all s in .Q.an) and 0 < count s} expr; |
| 64 | + "value \"", escape[trim expr], "\";"; |
| 65 | + expr like "[a-zA-Z])*"; |
| 66 | + "value \"",(2#expr), escape[2_expr], "\";"; |
| 67 | + expr] |
| 68 | + } stripTrailingSemi; |
| 69 | + wrapLines: {[fixSpecialSyntax; expn] |
| 70 | + tokenizeAndSplit: {[expn] |
| 71 | + : $[.z.K <= 3.4; |
| 72 | + "\n" vs expn; |
| 73 | + [ |
| 74 | + tokens: -4!"\n" , expn; |
| 75 | + tokens: raze {[token; isMerged] |
| 76 | + $[ isMerged; |
| 77 | + (enlist "\n"; 1 _ token); |
| 78 | + enlist token] |
| 79 | + } ./: flip (tokens; tokens like "\n/*"); |
| 80 | + "" sv/: 1_/:(where tokens ~\: enlist "\n") cut tokens]]; |
| 81 | + }; |
| 82 | + lines: @[tokenizeAndSplit; |
| 83 | + expn; |
| 84 | + {[expn; err] "\n" vs expn} expn]; |
| 85 | + lines:{[acc; line] |
| 86 | + $[ 0 = count acc; |
| 87 | + acc,: enlist line; |
| 88 | + (last[acc] like "[a-zA-Z])*") and line[0] in " \t/"; |
| 89 | + [ acc[count[acc] - 1]: last[acc],"\n",line; |
| 90 | + acc]; |
| 91 | + acc,: enlist line] |
| 92 | + }/[();] lines; |
| 93 | + : "\n" sv fixSpecialSyntax each lines; |
| 94 | + } fixSpecialSyntax; |
| 95 | + evalInContext: {[ctx; expressions] |
| 96 | + cachedCtx: string system "d"; |
| 97 | + system "d ", ctx; |
| 98 | + index: 0; |
| 99 | + do [count expressions; |
| 100 | + expr: expressions index; |
| 101 | + isLastLine: (index + 1) = count expressions; |
| 102 | + if ["\n" ~ last expr; |
| 103 | + expr: -1_expr]; |
| 104 | + prefix: ";[::;"; |
| 105 | + suffix: $[(not isLastLine) and not ";" ~ last expr; ";]"; "]"]; |
| 106 | + expr: prefix , expr , suffix; |
| 107 | + result: .Q.trp[{[expr] `result`errored`error`backtrace!({$[x ~ (::); (::); x]} value expr; 0b; ""; ())}; |
| 108 | + expr; |
| 109 | + {[suffix; prefix; err; backtrace] |
| 110 | + if [err ~ enlist " "; |
| 111 | + err: "syntax error"]; |
| 112 | + userCode: (-1 + last where (.Q.trp ~ first first @) each backtrace) # backtrace; |
| 113 | + userCode[;3]: reverse 1 + til count userCode; |
| 114 | + userCode[-1 + count userCode; 1; 3]: (neg count suffix) _ (count prefix) _ userCode[-1 + count userCode; 1; 3]; |
| 115 | + userCode[-1 + count userCode; 2]-: count prefix; |
| 116 | + (!) . flip ( |
| 117 | + (`result; ::); |
| 118 | + (`errored; 1b); |
| 119 | + (`error; err); |
| 120 | + (`backtrace; .Q.sbt userCode); |
| 121 | + (`base64; 0b)) |
| 122 | + }[suffix; prefix]]; |
| 123 | + if [isLastLine or result`errored; |
| 124 | + system "d ", cachedCtx; |
| 125 | + : result]; |
| 126 | + index +: 1]; |
| 127 | + }; |
| 128 | + .axq.i_PRIMCODE: `undefined`boolean`guid`undefined`byte`short`int`long`real`float`char`symbol`timestamp`month`date`datetime`timespan`minute`second`time`enum; |
| 129 | + .axq.i_NONPRIMCODE: |
| 130 | + `general`booleans`guids`undefined`bytes`shorts`ints`longs`reals`floats`chars`symbols`timestamps`months`dates`datetimes`timespans`minutes`seconds`times, |
| 131 | + `enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum, |
| 132 | + `enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum, |
| 133 | + `enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum`enum, |
| 134 | + `compoundGeneral`compoundBoolean`compoundGuid`compoundUndefined`compoundByte`compoundShort`compoundInt`compoundLong`compoundReal`compoundFloat, |
| 135 | + `compoundChar`compoundSymbol`compoundTimestamp`compoundMonth`compoundDate`compoundDatetime`compoundTimespan`compoundMinute`compoundSecond, |
| 136 | + `compoundTime`compoundEnum`table`dictionary`lambda`unary`binary`ternary`projection`composition, |
| 137 | + `$("f'";"f/";"f\\";"f':";"f/:";"f\\:";"dynamicload"); |
| 138 | + removeTrailingNewline: {[text] |
| 139 | + if ["\n" = last text; |
| 140 | + text: -1 _ text]; |
| 141 | + text |
| 142 | + }; |
| 143 | + generateColumns:{[removeTrailingNewline; toString; originalType; isAtom; isKey; data; name] |
| 144 | + types: $[ |
| 145 | + isAtom; |
| 146 | + originalType; |
| 147 | + originalType ~ `chars; |
| 148 | + `chars; |
| 149 | + .axq.i_NONPRIMCODE type data]; |
| 150 | + values: ('[removeTrailingNewline; toString] each data); |
| 151 | + order: $[1 ~ count data; iasc enlist data; iasc data]; |
| 152 | + returnDictionary: `name`type`values`order!(name;types;values;order); |
| 153 | + if[isKey; returnDictionary[`isKey]: isKey]; |
| 154 | + if[attr[data] <> `; returnDictionary[`attributes]: attr data]; |
| 155 | + :returnDictionary |
| 156 | + }[removeTrailingNewline;toString]; |
| 157 | + generateTableColumns:{[generateColumns; originalType; isAtom; isKey; data] |
| 158 | + if [.Q.qp data; |
| 159 | + ' "Partitioned tables cannot be displayed in this view"]; |
| 160 | + if [0b ~ .Q.qp data; |
| 161 | + ' "This view is not supported for splayed tables"]; |
| 162 | + generateColumns[originalType; isAtom; isKey] ./: flip (value; key) @\: flip data |
| 163 | + }[generateColumns]; |
| 164 | + toStructuredText:{[generateTableColumns; generateColumns; data; quantity; isAtom; originalType] |
| 165 | + if[(type data) ~ 10h; data: enlist data]; |
| 166 | + isTable: .Q.qt data; |
| 167 | + isDict: 99h ~ type data; |
| 168 | + columns: $[ |
| 169 | + isTable and isDict; |
| 170 | + raze (generateTableColumns[::;0b;1b;key data]; generateTableColumns[::;0b;0b;value data]); |
| 171 | + isDict; |
| 172 | + (generateColumns[::;0b;1b;key data;"key"]; generateColumns[::;0b;0b;value data;"values"]); |
| 173 | + isTable; |
| 174 | + generateTableColumns[originalType;isAtom;0b;data]; |
| 175 | + ]; |
| 176 | + : .j.j `count`columns!(quantity; columns) |
| 177 | + }[generateTableColumns; generateColumns]; |
| 178 | + typeOf: {$[0>type x; .axq.i_PRIMCODE neg type x; .axq.i_NONPRIMCODE type x]}; |
| 179 | + isAtom: {not type[x] within 0 99h}; |
| 180 | + sample: {[sampleFn; sampleSize; data] |
| 181 | + sampleSize: min (sampleSize; count data); |
| 182 | + fn: $[ sampleFn ~ "random"; |
| 183 | + {[sampleSize; data] |
| 184 | + $[ type[data] ~ 99h; |
| 185 | + [ ii: neg[sampleSize]?count data; |
| 186 | + (key[data] ii)!value[data]ii]; |
| 187 | + neg[sampleSize]?data] |
| 188 | + }; |
| 189 | + sampleFn ~ "first"; #; |
| 190 | + sampleFn ~ "last"; {neg[x]#y}; |
| 191 | + ' "Unrecognized sample function"]; |
| 192 | + fn[sampleSize; data] |
| 193 | + } |
| 194 | + result: evalInContext[ctx; splitExpression stripTrailingSemi wrapLines removeMultilineComments code]; |
| 195 | + if[result `errored; :result]; |
| 196 | + if[type[result[`result]] = 99h; |
| 197 | + if[`output in key result[`result]; |
| 198 | + if[type[result[`result][`output]] = 99h; |
| 199 | + if[`bytes in key result[`result][`output]; |
| 200 | + result[`base64]:1b; result[`result]: .Q.btoa result[`result][`output][`bytes]; :result]]]]; |
| 201 | + if [returnFormat ~ "text"; |
| 202 | + result[`result]: toString result `result]; |
| 203 | + if [returnFormat ~ "structuredText"; |
| 204 | + result[`result]: toStructuredText[result `result;count result`result; isAtom result`result; typeOf result`result]]; |
| 205 | + result |
| 206 | + } |
0 commit comments