Skip to content

Commit e752c3e

Browse files
committed
Add function token count comments
1 parent 8d6db10 commit e752c3e

File tree

3 files changed

+48
-5
lines changed

3 files changed

+48
-5
lines changed

pico_annotate.py

+33
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
from textwrap import indent
2+
3+
# Calls function for the descendants of node, then for node
4+
def apply_node_tree(node, func):
5+
if hasattr(node, "extra_children"):
6+
for child in reversed(node.extra_children):
7+
apply_node_tree(child, func)
8+
for child in reversed(node.children):
9+
apply_node_tree(child, func)
10+
func(node)
11+
12+
def annotate_code(ctxt, source, root, fail=True):
13+
def comment_before_function(node, source=source):
14+
if node.type==NodeType.function:
15+
tokens, errors = tokenize(PicoSource("temp", source.text[node.idx:node.endidx]), ctxt)
16+
if fail and errors:
17+
raise Exception("\n".join(map(str, errors)))
18+
token_count = count_tokens(tokens)
19+
char_count = len(source.text[node.idx:node.endidx])
20+
print("SOURCE:")
21+
print(source.text[node.idx:node.endidx])
22+
rename_tokens(ctxt, node, True)
23+
minified_source = minify_code(source.text[node.idx:node.endidx], ctxt, node, minify=True)
24+
min_char_count = len(minified_source)
25+
prefix_newline = "\n" if node.idx > 0 and source.text[node.idx-1] != "\n" else ""
26+
source.text = f'{source.text[:node.idx]}{prefix_newline}-- T:{token_count} C:{char_count} minC:{min_char_count}\n-- minified source:\n{indent(minified_source,"-- ")}\n{source.text[node.idx:]}'
27+
apply_node_tree(root, comment_before_function)
28+
29+
from pico_tokenize import tokenize, count_tokens
30+
from pico_parse import NodeType
31+
from pico_process import PicoSource
32+
from pico_minify import minify_code
33+
from pico_rename import rename_tokens

pico_process.py

+10-4
Original file line numberDiff line numberDiff line change
@@ -143,16 +143,18 @@ def __str__(m):
143143
def print_token_count(num_tokens, **kwargs):
144144
print_size("tokens", num_tokens, 8192, **kwargs)
145145

146-
def process_code(ctxt, source, input_count=False, count=False, lint=False, minify=False, rename=False, unminify=False, fail=True, want_count=True):
146+
def process_code(ctxt, source, input_count=False, count=False, lint=False, minify=False, rename=False, unminify=False, fail=True, want_count=True, annotate=False):
147147
need_lint = lint not in (None, False)
148148
need_minify = minify not in (None, False)
149149
need_rename = rename not in (None, False)
150150
need_unminify = unminify not in (None, False)
151+
need_annotate = annotate not in (None, False)
151152

152-
if not need_lint and not need_minify and not need_unminify and not (want_count and (count or input_count)):
153+
need_parse = need_lint or need_minify or need_unminify or need_annotate
154+
155+
if not need_parse and not (want_count and (count or input_count)):
153156
return True, ()
154-
155-
need_parse = need_lint or need_minify or need_unminify
157+
156158
need_all_comments = need_unminify or (need_minify and minify_needs_comments(minify))
157159

158160
ok = False
@@ -182,6 +184,9 @@ def process_code(ctxt, source, input_count=False, count=False, lint=False, minif
182184
new_tokens = root.get_tokens() if need_parse else tokens
183185
print_token_count(count_tokens(new_tokens), handler=count)
184186

187+
if annotate:
188+
annotate_code(ctxt, source, root)
189+
185190
if fail and errors:
186191
throw("\n".join(map(str, errors)))
187192
return ok, errors
@@ -199,6 +204,7 @@ def echo_code(code, echo=True):
199204
from pico_lint import lint_code
200205
from pico_minify import minify_code, minify_needs_comments
201206
from pico_unminify import unminify_code
207+
from pico_annotate import annotate_code
202208
from pico_rename import rename_tokens
203209

204210
# re-export some things for examples/etc.

shrinko8.py

+5-1
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,9 @@ def ParsableCountHandler(prefix, name, size, limit):
5252
pgroup.add_argument("--no-count-compress", action="store_true", help="do not compress the cart just to print the compressed size")
5353
pgroup.add_argument("--no-count-tokenize", action="store_true", help="do not tokenize the cart just to print the token count")
5454

55+
pgroup = parser.add_argument_group("annotate options")
56+
pgroup.add_argument("-a", "--annotate", action="store_true", help="enable annotating source during output")
57+
5558
pgroup = parser.add_argument_group("script options")
5659
pgroup.add_argument("-s", "--script", help="manipulate the cart via a custom python script - see README for api details")
5760
pgroup.add_argument("--script-args", nargs=argparse.REMAINDER, help="send arguments directly to --script", default=())
@@ -195,7 +198,8 @@ def main_inner(raw_args):
195198
ok, errors = process_code(ctxt, src, input_count=args.input_count, count=args.count,
196199
lint=args.lint, minify=args.minify, rename=args.rename,
197200
unminify=args.unminify,
198-
fail=False, want_count=not args.no_count_tokenize)
201+
fail=False, want_count=not args.no_count_tokenize,
202+
annotate=args.annotate)
199203
if errors:
200204
print("Lint errors:" if ok else "Compilation errors:")
201205
for error in errors:

0 commit comments

Comments
 (0)