55# For license information see LICENSE.txt
66
77# Meta
8- __version__ = '2.2'
9- __version_info__ = (2 , 2 )
10- __license__ = "GPLv3" # See LICENSE.txt
8+ __version__ = '2.2.1 '
9+ __version_info__ = (2 , 2 , 1 )
10+ __license__ = "GPLv3" # See LICENSE.txt
1111__author__ = 'Dan McDougall <[email protected] >' 1212
1313# TODO: Add the ability to mark variables, functions, classes, and methods for non-obfuscation.
1414# TODO: Add the ability to selectively obfuscate identifiers inside strings (for metaprogramming stuff).
1515# TODO: Add the ability to use a config file instead of just command line args.
1616# TODO: Add the ability to save a file that allows for de-obfuscation later (or at least the ability to debug).
1717# TODO: Separate out the individual functions of minification so that they can be chosen selectively like the obfuscation functions.
18+ # TODO: A conflict file entry in the windows operating system
1819
1920__doc__ = """\
2021 **Python Minifier:** Reduces the size of (minifies) Python code for use on
6667"""
6768
6869# Import built-in modules
69- import os , sys , re , io
70+ import os
71+ import sys
72+ import re
73+ import io
74+
7075from optparse import OptionParser
7176from collections import Iterable
7277
8691 except ImportError :
8792 pass
8893
94+ # define the name of the operating system 'nt'- windows
95+ os_name = os .name
96+
8997# Regexes
9098multiline_indicator = re .compile ('\\ \\ (\s*#.*)?\n ' )
9199
92100# The test.+() functions below are for testing pyminifier...
101+
102+
93103def test_decorator (f ):
94104 """Decorator that does nothing"""
95105 return f
96106
107+
97108def test_reduce_operators ():
98109 """Test the case where an operator such as an open paren starts a line"""
99- (a , b ) = 1 , 2 # The indentation level should be preserved
110+ (a , b ) = 1 , 2 # The indentation level should be preserved
100111 pass
101112
113+
102114def test_empty_functions ():
103115 """
104116 This is a test function.
105117 This should be replaced with 'def test_empty_functions(): pass'
106118 """
107119
120+
108121class test_class (object ):
109122 "Testing indented decorators"
110123
111124 @test_decorator
112125 def test_function (self ):
113126 pass
114127
128+
115129def test_function ():
116130 """
117131 This function encapsulates the edge cases to prevent them from invading the
@@ -120,8 +134,8 @@ def test_function():
120134 # This tests method obfuscation:
121135 method_obfuscate = test_class ()
122136 method_obfuscate .test_function ()
123- foo = ("The # character in this string should " # This comment
124- "not result in a syntax error" ) # ...and this one should go away
137+ foo = ("The # character in this string should " # This comment
138+ "not result in a syntax error" ) # ...and this one should go away
125139 test_multi_line_list = [
126140 'item1' ,
127141 'item2' ,
@@ -135,13 +149,14 @@ def test_function():
135149 # It may seem strange but the code below tests our docstring removal code.
136150 test_string_inside_operators = imaginary_function (
137151 "This string was indented but the tokenizer won't see it that way."
138- ) # To understand how this could mess up docstring removal code see the
152+ ) # To understand how this could mess up docstring removal code see the
139153 # minification.minification.remove_comments_and_docstrings() function
140154 # starting at this line:
141155 # "elif token_type == tokenize.STRING:"
142156 # This tests remove_extraneous_spaces():
143- this_line_has_leading_indentation = '''<--That extraneous space should be
144- removed''' # But not these spaces
157+ this_line_has_leading_indentation = '''<--That extraneous space should be
158+ removed''' # But not these spaces
159+
145160
146161def is_iterable (obj ):
147162 """
@@ -152,6 +167,7 @@ def is_iterable(obj):
152167 return False
153168 return isinstance (obj , Iterable )
154169
170+
155171def pyminify (options , files ):
156172 """
157173 Given an *options* object (from `optparse.OptionParser` or similar),
@@ -177,7 +193,7 @@ def pyminify(options, files):
177193 sys .exit (1 )
178194 # Make our .pyz:
179195 compression .zip_pack (files , options )
180- return None # Make sure we don't do anything else
196+ return None # Make sure we don't do anything else
181197 # Read in our prepend text (if any)
182198 prepend = None
183199 if options .prepend :
@@ -195,8 +211,8 @@ def pyminify(options, files):
195211 # obfuscation is stated)
196212 if options .use_nonlatin and not any (obfuscations ):
197213 options .obfuscate = True
198- if len (files ) > 1 : # We're dealing with more than one file
199- name_generator = None # So we can tell if we need to obfuscate
214+ if len (files ) > 1 : # We're dealing with more than one file
215+ name_generator = None # So we can tell if we need to obfuscate
200216 if any (obfuscations ):
201217 # Put together that will be used for all obfuscation functions:
202218 identifier_length = int (options .replacement_length )
@@ -212,19 +228,22 @@ def pyminify(options, files):
212228 else :
213229 name_generator = obfuscate .obfuscation_machine (
214230 identifier_length = identifier_length )
215- table = [{}]
216- cumulative_size = 0 # For size reduction stats
217- cumulative_new = 0 # Ditto
231+ table = [{}]
232+ cumulative_size = 0 # For size reduction stats
233+ cumulative_new = 0 # Ditto
218234 for sourcefile in files :
219235 # Record how big the file is so we can compare afterwards
220236 filesize = os .path .getsize (sourcefile )
221237 cumulative_size += filesize
222238 # Get the module name from the path
223239 module = os .path .split (sourcefile )[1 ]
224240 module = "." .join (module .split ('.' )[:- 1 ])
225- source = open (sourcefile ).read ()
241+ if os_name in ('nt' ,):
242+ source = open (sourcefile , encoding = "utf8" ).read ()
243+ else :
244+ source = open (sourcefile ).read ()
226245 tokens = token_utils .listified_tokenizer (source )
227- if not options .nominify : # Perform minification
246+ if not options .nominify : # Perform minification
228247 source = minification .minify (tokens , options )
229248 # Have to re-tokenize for obfucation (it is quick):
230249 tokens = token_utils .listified_tokenizer (source )
@@ -238,7 +257,7 @@ def pyminify(options, files):
238257 table = table
239258 )
240259 # Convert back to text
241- result = ''
260+ result = '# -*- coding: utf-8 -*- \n '
242261 if prepend :
243262 result += prepend
244263 result += token_utils .untokenize (tokens )
@@ -257,16 +276,21 @@ def pyminify(options, files):
257276 os .mkdir (options .destdir )
258277 # Need the path where the script lives for the next steps:
259278 filepath = os .path .split (sourcefile )[1 ]
260- path = options .destdir + '/' + filepath # Put everything in destdir
261- f = open (path , 'w' )
279+ path = options .destdir + '/' + filepath # Put everything in destdir
280+ if os_name in ('nt' ,):
281+ f = open (path , 'w' , encoding = 'utf-8' )
282+ else :
283+ f = open (path , 'w' )
262284 f .write (result )
263285 f .close ()
264286 new_filesize = os .path .getsize (path )
265287 cumulative_new += new_filesize
266- percent_saved = round ((float (new_filesize ) / float (filesize )) * 100 , 2 ) if float (filesize )!= 0 else 0
267- print ((
288+ percent_saved = round ((float (new_filesize ) / float (filesize )) * 100 , 2 ) if float (filesize ) != 0 else 0
289+ print (((
268290 "{sourcefile} ({filesize}) reduced to {new_filesize} bytes "
269- "({percent_saved}% of original size)" ).format (** locals ()))
291+ "({percent_saved}% of original size)" ).format (** locals ())))
292+ #p_saved = round(
293+ # (float(cumulative_new) / float(cumulative_size) * 100), 2)
270294 if cumulative_size :
271295 p_saved = round (
272296 (float (cumulative_new ) / float (cumulative_size ) * 100 ), 2 )
@@ -279,11 +303,14 @@ def pyminify(options, files):
279303 module = os .path .split (_file )[1 ]
280304 module = "." .join (module .split ('.' )[:- 1 ])
281305 filesize = os .path .getsize (_file )
282- source = open (_file ).read ()
306+ if os_name in ('nt' ,):
307+ source = open (_file , encoding = 'utf-8' ).read ()
308+ else :
309+ source = open (_file ).read ()
283310 # Convert the tokens from a tuple of tuples to a list of lists so we can
284311 # update in-place.
285312 tokens = token_utils .listified_tokenizer (source )
286- if not options .nominify : # Perform minification
313+ if not options .nominify : # Perform minification
287314 source = minification .minify (tokens , options )
288315 # Convert back to tokens in case we're obfuscating
289316 tokens = token_utils .listified_tokenizer (source )
@@ -316,9 +343,18 @@ def pyminify(options, files):
316343 f .write (result )
317344 f .close ()
318345 new_filesize = os .path .getsize (options .outfile )
319- percent_saved = round (float (new_filesize )/ float (filesize ) * 100 , 2 )
346+ percent_saved = round (float (new_filesize ) / float (filesize ) * 100 , 2 )
320347 print ((
321348 "{_file} ({filesize}) reduced to {new_filesize} bytes "
322349 "({percent_saved}% of original size)" .format (** locals ())))
323350 else :
324- print (result )
351+ try :
352+ import pprint
353+ pprint .pprint (result )
354+ except Exception as inst :
355+ print (inst )
356+ pass
357+
358+
359+
360+
0 commit comments