mirror of
https://github.com/shlomif/PySolFC.git
synced 2025-04-05 00:02:29 -04:00
Maintain compatibility with Python 2
This commit is contained in:
parent
0bd7b44ed5
commit
57485fac95
2 changed files with 28 additions and 23 deletions
|
@ -6,7 +6,7 @@ import sys
|
|||
import os
|
||||
import time
|
||||
# from pprint import pprint
|
||||
import builtins
|
||||
from six.moves import builtins
|
||||
from pysollib.mygettext import fix_gettext
|
||||
import pysollib.games
|
||||
import pysollib.games.special
|
||||
|
|
|
@ -26,6 +26,7 @@ import tokenize
|
|||
import operator
|
||||
import sys
|
||||
import functools
|
||||
from six import print_, PY2
|
||||
|
||||
# for selftesting
|
||||
try:
|
||||
|
@ -201,9 +202,9 @@ msgstr ""
|
|||
|
||||
|
||||
def usage(code, msg=''):
|
||||
print(__doc__ % globals(), file=sys.stderr)
|
||||
print_(__doc__ % globals(), file=sys.stderr)
|
||||
if msg:
|
||||
print(msg, file=sys.stderr)
|
||||
print_(msg, file=sys.stderr)
|
||||
sys.exit(code)
|
||||
|
||||
|
||||
|
@ -435,7 +436,7 @@ class TokenEater:
|
|||
if len(data) == 2 and data[0] and data[1]:
|
||||
self.__addentry(tuple(data))
|
||||
elif self.__options.verbose:
|
||||
print(_(
|
||||
print_(_(
|
||||
'*** %(file)s:%(lineno)s: incorrect '
|
||||
'ngettext format'
|
||||
) % {
|
||||
|
@ -455,7 +456,7 @@ class TokenEater:
|
|||
else:
|
||||
# warn if we see anything else than STRING or whitespace
|
||||
if self.__options.verbose:
|
||||
print(_(
|
||||
print_(_(
|
||||
'*** %(file)s:%(lineno)s: Seen unexpected '
|
||||
'token "%(token)s"'
|
||||
) % {
|
||||
|
@ -481,8 +482,8 @@ class TokenEater:
|
|||
timestamp = time.ctime(time.time())
|
||||
# The time stamp in the header doesn't have the same format as that
|
||||
# generated by xgettext...
|
||||
print(pot_header % {'time': timestamp, 'version': __version__},
|
||||
file=fp)
|
||||
print_(pot_header % {'time': timestamp, 'version': __version__},
|
||||
file=fp)
|
||||
# Sort the entries. First sort each particular entry's keys, then
|
||||
# sort all the entries by their first item.
|
||||
reverse = {}
|
||||
|
@ -513,8 +514,8 @@ class TokenEater:
|
|||
elif options.locationstyle == options.SOLARIS:
|
||||
for filename, lineno in v:
|
||||
d = {'filename': filename, 'lineno': lineno}
|
||||
print(_('# File: %(filename)s, line: %(lineno)d') % d,
|
||||
file=fp)
|
||||
print_(_('# File: %(filename)s, line: %(lineno)d') % d,
|
||||
file=fp)
|
||||
elif options.locationstyle == options.GNU:
|
||||
# fit as many locations on one line, as long as the
|
||||
# resulting line length doesn't exceeds 'options.width'
|
||||
|
@ -525,23 +526,23 @@ class TokenEater:
|
|||
if len(locline) + len(s) <= options.width:
|
||||
locline = locline + s
|
||||
else:
|
||||
print(locline, file=fp)
|
||||
print_(locline, file=fp)
|
||||
locline = "#:" + s
|
||||
if len(locline) > 2:
|
||||
print(locline, file=fp)
|
||||
print_(locline, file=fp)
|
||||
if isdocstring:
|
||||
print('#, docstring', file=fp)
|
||||
print_('#, docstring', file=fp)
|
||||
if isinstance(k, str):
|
||||
print('msgid', normalize(k), file=fp)
|
||||
print('msgstr ""\n', file=fp)
|
||||
print_('msgid', normalize(k), file=fp)
|
||||
print_('msgstr ""\n', file=fp)
|
||||
else:
|
||||
# ngettext
|
||||
assert isinstance(k, tuple)
|
||||
assert len(k) == 2
|
||||
print('msgid', normalize(k[0]), file=fp)
|
||||
print('msgid_plural', normalize(k[1]), file=fp)
|
||||
print('msgstr[0] ""', file=fp)
|
||||
print('msgstr[1] ""\n', file=fp)
|
||||
print_('msgid', normalize(k[0]), file=fp)
|
||||
print_('msgid_plural', normalize(k[1]), file=fp)
|
||||
print_('msgstr[0] ""', file=fp)
|
||||
print_('msgstr[1] ""\n', file=fp)
|
||||
|
||||
|
||||
def main():
|
||||
|
@ -653,8 +654,8 @@ def main():
|
|||
options.toexclude = fp.readlines()
|
||||
fp.close()
|
||||
except IOError:
|
||||
print(_("Can't read --exclude-file: %s") % options.excludefilename,
|
||||
file=sys.stderr)
|
||||
print_(_("Can't read --exclude-file: %s") %
|
||||
options.excludefilename, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
else:
|
||||
options.toexclude = []
|
||||
|
@ -684,10 +685,14 @@ def main():
|
|||
try:
|
||||
eater.set_filename(filename)
|
||||
try:
|
||||
for token_info in tokenize.tokenize(fp.readline):
|
||||
eater(*token_info)
|
||||
if PY2:
|
||||
for token_info in tokenize.generate_tokens(fp.readline):
|
||||
eater(*token_info)
|
||||
else:
|
||||
for token_info in tokenize.tokenize(fp.readline):
|
||||
eater(*token_info)
|
||||
except tokenize.TokenError as e:
|
||||
print('%s: %s, line %d, column %d' % (
|
||||
print_('%s: %s, line %d, column %d' % (
|
||||
e[0], filename, e[1][0], e[1][1]), file=sys.stderr)
|
||||
except tokenize.StopTokenizing:
|
||||
pass
|
||||
|
|
Loading…
Add table
Reference in a new issue