diff --git a/burton/__init__.py b/burton/__init__.py
index 6589019..7a4575c 100644
--- a/burton/__init__.py
+++ b/burton/__init__.py
@@ -6,14 +6,14 @@
import subprocess
import sys
-import database
-import parser
-import translation
-import vcs
+from . import database
+from . import parser
+from . import translation
+from . import vcs
-from config import Config
-from logginghandler import BurtonLoggingHandler
-from stringmapping import StringMapping
+from .config import Config
+from .logginghandler import BurtonLoggingHandler
+from .stringmapping import StringMapping
logger_name = "extensis.burton"
logging_handler = BurtonLoggingHandler()
@@ -77,7 +77,7 @@ def find_all_files(conf):
return return_files
def find_files_for_extension(conf, extension):
- """Finds all files recursively under thae root directory with the specified
+ """Finds all files recursively under the root directory with the specified
extension"""
return_files = []
@@ -111,22 +111,36 @@ def extract_strings(conf, strings_to_ignore):
if regex is not None:
files.extend(find_files_for_extension(conf, regex))
- strings.update(_extract_strings(parser_name, files, strings_to_ignore))
+ strings.update(_extract_strings(
+ parser_name,
+ files,
+ strings_to_ignore,
+ conf.get(Config.additional_function_names)
+ ))
return strings
-def _extract_strings(parser_name, files, strings_to_ignore):
+def _extract_strings(
+ parser_name,
+ files,
+ strings_to_ignore,
+ additional_function_names
+):
strings = set([])
if len(files) > 0:
cls = _class_from_string(parser_name)
parser = cls()
- strings = parser.extract_strings_from_files(files, strings_to_ignore)
+ strings = parser.extract_strings_from_files(
+ files,
+ strings_to_ignore,
+ additional_function_names
+ )
return strings
def _get_extensions_by_parser(conf):
extensions_by_parser = { }
- for key, value in conf.get(Config.parsers_by_extension).iteritems():
+ for key, value in conf.get(Config.parsers_by_extension).items():
extensions_by_parser[value] = extensions_by_parser.get(value, [])
extensions_by_parser[value].append(key)
@@ -150,19 +164,26 @@ def extract_mapping(conf, strings_to_ignore):
files.append(file)
reference_mapping.combine_with(
- _extract_mapping(parser_name, files, strings_to_ignore)
+ _extract_mapping(
+ parser_name,
+ files,
+ strings_to_ignore,
+ conf.get(Config.additional_function_names)
+ )
)
return reference_mapping
-def _extract_mapping(parser_name, files, strings_to_ignore):
+def _extract_mapping(
+ parser_name, files, strings_to_ignore, additional_function_names):
reference_mapping = StringMapping()
if len(files) > 0:
cls = _class_from_string(parser_name)
parser = cls()
reference_mapping = parser.extract_string_mapping_from_files(
files,
- strings_to_ignore
+ strings_to_ignore,
+ additional_function_names
)
return reference_mapping
diff --git a/burton/config.py b/burton/config.py
index 9106dac..8df7d12 100644
--- a/burton/config.py
+++ b/burton/config.py
@@ -1,6 +1,6 @@
import codecs
import collections
-import ConfigParser
+import configparser
import json
import logging
import os
@@ -41,34 +41,35 @@ class Config(object):
"""
# Constants for config file variables
- source_path = "source_path"
- company_name = "company_name"
- product_name = "product_name"
- contact_email = "contact_email"
- log_filename = "log_filename"
- log_to_file = "log_to_file"
- strings_to_ignore_file = "strings_to_ignore_file"
- database_adaptor = "database_adaptor"
- database_path = "database_path"
- logging_level = "logging_level"
- vcs_class = "vcs_class"
- extensions_to_parse = "extensions_to_parse"
- disallowed_paths = "disallowed_paths"
- mapping_files = "mapping_files"
- parsers_by_extension = "parsers_by_extension"
- output_languages = "output_languages"
- native_language = "native_language"
- translation_files_class = "translation_files_class"
- language_codes = "language_codes"
- files_by_language = "files_by_language"
- paths_to_localize = "paths_to_localize"
- recursive_localization = "recursive_localization"
- localization_output_dir = "localization_output_dir"
- extensions_to_localize = "extensions_to_localize"
- abort_if_no_translations = "abort_if_no_translations"
- xlf_repo_path = "xlf_repo_path"
- base_localization_paths = "base_localization_paths"
- project_path = "project_path"
+ source_path = "source_path"
+ company_name = "company_name"
+ product_name = "product_name"
+ contact_email = "contact_email"
+ log_filename = "log_filename"
+ log_to_file = "log_to_file"
+ strings_to_ignore_file = "strings_to_ignore_file"
+ database_adaptor = "database_adaptor"
+ database_path = "database_path"
+ logging_level = "logging_level"
+ vcs_class = "vcs_class"
+ extensions_to_parse = "extensions_to_parse"
+ disallowed_paths = "disallowed_paths"
+ mapping_files = "mapping_files"
+ parsers_by_extension = "parsers_by_extension"
+ output_languages = "output_languages"
+ native_language = "native_language"
+ translation_files_class = "translation_files_class"
+ language_codes = "language_codes"
+ files_by_language = "files_by_language"
+ paths_to_localize = "paths_to_localize"
+ recursive_localization = "recursive_localization"
+ localization_output_dir = "localization_output_dir"
+ extensions_to_localize = "extensions_to_localize"
+ abort_if_no_translations = "abort_if_no_translations"
+ xlf_repo_path = "xlf_repo_path"
+ base_localization_paths = "base_localization_paths"
+ project_path = "project_path"
+ additional_function_names = "additional_function_names"
# Constants for command-line options
root_path = "root_path"
@@ -79,32 +80,33 @@ class Config(object):
commit_vcs = "commit_vcs"
_config_file_defaults = {
- source_path : None,
- company_name : "",
- product_name : "",
- contact_email : "",
- strings_to_ignore_file : strings_to_ignore_file,
- database_adaptor : None,
- database_path : None,
- logging_level : '"info"',
- vcs_class : '"vcs.NoOp"',
- extensions_to_parse : None,
- disallowed_paths : None,
- mapping_files : None,
- parsers_by_extension : None,
- output_languages : None,
- native_language : None,
- paths_to_localize : [],
- recursive_localization : "false",
- localization_output_dir : None,
- extensions_to_localize : [],
- files_by_language : {},
- translation_files_class : "translation.XLF",
- abort_if_no_translations : "false",
- xlf_repo_path : None,
- base_localization_paths : {},
- project_path : "",
- language_codes : {
+ source_path : None,
+ company_name : "",
+ product_name : "",
+ contact_email : "",
+ strings_to_ignore_file : strings_to_ignore_file,
+ database_adaptor : None,
+ database_path : None,
+ logging_level : '"info"',
+ vcs_class : '"vcs.NoOp"',
+ extensions_to_parse : None,
+ disallowed_paths : None,
+ mapping_files : None,
+ parsers_by_extension : None,
+ output_languages : None,
+ native_language : None,
+ paths_to_localize : [],
+ recursive_localization : "false",
+ localization_output_dir : None,
+ extensions_to_localize : [],
+ files_by_language : {},
+ translation_files_class : "translation.XLF",
+ abort_if_no_translations : "false",
+ xlf_repo_path : None,
+ base_localization_paths : {},
+ project_path : "",
+ additional_function_names : [],
+ language_codes : {
"English" : "en-US",
"French" : "fr-FR",
"German" : "de-DE",
@@ -224,7 +226,7 @@ def parse_command_line_options(self, script_name, options):
logger.error("usage: python " + script_name + " [path] [arguments]")
logger.error("This application takes the following arguments")
logger.error(
- "\n\t".join(self._command_line_mapping.keys())
+ "\n\t".join(list(self._command_line_mapping.keys()))
)
return False
@@ -249,7 +251,7 @@ def parse_command_line_options(self, script_name, options):
if os.path.exists(full_path):
fp = self._open_for_reading(full_path)
- parser = ConfigParser.SafeConfigParser(self._config_file_defaults)
+ parser = configparser.ConfigParser(defaults=self._config_file_defaults, allow_no_value=True)
parser.readfp(fp)
self._platform_queue.extend(parser.sections())
@@ -318,7 +320,7 @@ def readfp(self, fp, platform):
"""The readfp method reads configuration data from a file or file-like
object for a specific platform.
"""
- parser = ConfigParser.SafeConfigParser(self._config_file_defaults)
+ parser = configparser.ConfigParser(defaults=self._config_file_defaults, allow_no_value=True)
parser.readfp(fp)
if not parser.has_section(platform):
@@ -394,16 +396,13 @@ def _apply_custom_method(self, key, value):
return method(value)
def _add_file_extension_regexes(self, values):
- return map(
- lambda(extension): re.compile(".*\." + extension + "$"),
- values
- )
+ return [re.compile(".*\." + extension + "$") for extension in values]
def _add_disallowed_path_regexes(self, values):
- return map(lambda(directory): re.compile(directory), values)
+ return [re.compile(directory) for directory in values]
def _add_mapping_files_regexes(self, values):
- return map(lambda(file): re.compile(file), values)
+ return [re.compile(file) for file in values]
def _open_for_reading(self, filename):
return open(filename, "r")
diff --git a/burton/database/__init__.py b/burton/database/__init__.py
index 8a5f3c9..0592120 100644
--- a/burton/database/__init__.py
+++ b/burton/database/__init__.py
@@ -1 +1 @@
-from sqlite import SQLite
+from .sqlite import SQLite
diff --git a/burton/database/sqlite.py b/burton/database/sqlite.py
index c1c63fe..c00cfe0 100644
--- a/burton/database/sqlite.py
+++ b/burton/database/sqlite.py
@@ -62,18 +62,19 @@ def _connect(self):
self.dbh = sqlite3.connect(self._temp_filename)
def _schema_file(self):
- return resource_stream(
- __name__,
- "sqlite.schema"
- )
+ return resource_stream(__name__, "sqlite.schema")
+
def _load_schema(self):
cursor = self.dbh.cursor()
fp = self._schema_file()
+ contents = fp.read()
+ if isinstance(contents, bytes):
+ contents = contents.decode()
cursor.executescript(
- fp.read()
+ contents
)
fp.close()
- cursor.close()
+
self.dbh.commit()
def _load_database(self):
@@ -135,9 +136,7 @@ def get_all_translation_keys(self):
""",
)
- results = map(
- lambda(key) : key[0].decode("unicode_escape"), cursor.fetchall()
- )
+ results = [key[0].decode("unicode_escape") for key in cursor.fetchall()]
return results
def get_all_native_translations(self):
@@ -149,9 +148,7 @@ def get_all_native_translations(self):
""",
)
- results = map(
- lambda(key) : key[0].decode("unicode_escape"), cursor.fetchall()
- )
+ results = [key[0].decode("unicode_escape") for key in cursor.fetchall()]
return results
def _insert_new_translation_keys(self, platform_no, mapping):
@@ -476,7 +473,7 @@ def get_platforms(self):
cursor = self.dbh.cursor()
cursor.execute("select name from platforms")
- return_value = map(lambda(result) : result[0], cursor.fetchall())
+ return_value = [result[0] for result in cursor.fetchall()]
cursor.close()
return return_value
@@ -501,7 +498,7 @@ def get_string_mapping_for_platform(self, platform):
cursor.close()
return_value = { }
- print results
+ print(results)
for translation_key in results:
return_value[translation_key.decode("unicode_escape")] = \
results[translation_key].decode("unicode_escape")
@@ -521,9 +518,7 @@ def get_native_translations_for_platform(self, platform):
( platform_no, )
)
- results = map(
- lambda(key) : key[0].decode("unicode_escape"), cursor.fetchall()
- )
+ results = [key[0].decode("unicode_escape") for key in cursor.fetchall()]
cursor.close()
diff --git a/burton/database/test/sqlitetests.py b/burton/database/test/sqlitetests.py
index 7668f32..3d64091 100644
--- a/burton/database/test/sqlitetests.py
+++ b/burton/database/test/sqlitetests.py
@@ -1,10 +1,11 @@
import codecs
-import cStringIO
import mock
import os
import sqlite3
import unittest
+from io import StringIO
+
from burton import database
class SQLiteTests(unittest.TestCase):
@@ -40,16 +41,16 @@ def _disconnect(*args, **kwargs):
self.assertEquals(
cursor.execute("select * from translation_keys").fetchall(),
[
- (1, u"SomeString", u"2010-12-02 02:20:00"),
- (2, u"OtherString", u"2010-12-02 02:20:00")
+ (1, b"SomeString", "2010-12-02 02:20:00"),
+ (2, b"OtherString", "2010-12-02 02:20:00")
],
)
self.assertEquals(
cursor.execute("select * from native_translations").fetchall(),
[
- (1, 1, 1, u"Translation for some string"),
- (2, 2, 1, u"Translation for some other string")
+ (1, 1, 1, b"Translation for some string"),
+ (2, 2, 1, b"Translation for some other string")
],
)
@@ -68,18 +69,18 @@ def _disconnect(*args, **kwargs):
self.assertEquals(
cursor.execute("select * from translation_keys").fetchall(),
[
- (1, u"SomeString", u"2010-12-02 02:21:00"),
- (2, u"OtherString", u"2010-12-02 02:21:00")
+ (1, b"SomeString", "2010-12-02 02:21:00"),
+ (2, b"OtherString", "2010-12-02 02:21:00")
],
)
self.assertEquals(
cursor.execute("select * from native_translations").fetchall(),
[
- (1, 1, 1, u"Translation for some string"),
- (2, 2, 1, u"Translation for some other string"),
- (3, 1, 2, u"Translation for some string"),
- (4, 2, 2, u"Translation for some other string")
+ (1, 1, 1, b"Translation for some string"),
+ (2, 2, 1, b"Translation for some other string"),
+ (3, 1, 2, b"Translation for some string"),
+ (4, 2, 2, b"Translation for some other string")
],
)
@@ -97,18 +98,18 @@ def _disconnect(*args, **kwargs):
self.assertEquals(
cursor.execute("select * from translation_keys").fetchall(),
[
- (1, u"SomeString", u"2010-12-02 02:22:00"),
- (2, u"OtherString", u"2010-12-02 02:21:00")
+ (1, b"SomeString", "2010-12-02 02:22:00"),
+ (2, b"OtherString", "2010-12-02 02:21:00")
],
)
self.assertEquals(
cursor.execute("select * from native_translations").fetchall(),
[
- (1, 1, 1, u"New translation for some string"),
- (2, 2, 1, u"Translation for some other string"),
- (3, 1, 2, u"Translation for some string"),
- (4, 2, 2, u"Translation for some other string")
+ (1, 1, 1, b"New translation for some string"),
+ (2, 2, 1, b"Translation for some other string"),
+ (3, 1, 2, b"Translation for some string"),
+ (4, 2, 2, b"Translation for some other string")
],
)
@@ -125,17 +126,17 @@ def _disconnect(*args, **kwargs):
self.assertEquals(
cursor.execute("select * from translation_keys").fetchall(),
[
- (1, u"SomeString", u"2010-12-02 02:23:00"),
- (2, u"OtherString", u"2010-12-02 02:23:00")
+ (1, b"SomeString", "2010-12-02 02:23:00"),
+ (2, b"OtherString", "2010-12-02 02:23:00")
],
)
self.assertEquals(
cursor.execute("select * from native_translations").fetchall(),
[
- (1, 1, 1, u"New translation for some string"),
- (2, 2, 1, u"Translation for some other string"),
- (3, 1, 2, u"New translation for some string")
+ (1, 1, 1, b"New translation for some string"),
+ (2, 2, 1, b"Translation for some other string"),
+ (3, 1, 2, b"New translation for some string")
],
)
@@ -168,24 +169,24 @@ def _disconnect(*args, **kwargs):
self.assertEquals(
cursor.execute("select * from translation_keys").fetchall(),
[
- (1, u"SomeString", u"2010-12-02 02:20:00"),
+ (1, b"SomeString", "2010-12-02 02:20:00"),
],
)
self.assertEquals(
cursor.execute("select * from native_translations").fetchall(),
[
- (1, 1, 1, "%03d of %03.3lld for {0} %@"),
+ (1, 1, 1, b"%03d of %03.3lld for {0} %@"),
],
)
self.assertEquals(
cursor.execute("select * from replaced_params").fetchall(),
[
- (1, 1, 1, 0, u"%03d" ),
- (2, 1, 1, 1, u"%03.3lld"),
- (3, 1, 1, 2, u"{0}" ),
- (4, 1, 1, 3, u"%@" ),
+ (1, 1, 1, 0, b"%03d" ),
+ (2, 1, 1, 1, b"%03.3lld"),
+ (3, 1, 1, 2, b"{0}" ),
+ (4, 1, 1, 3, b"%@" ),
],
)
@@ -199,15 +200,15 @@ def _disconnect(*args, **kwargs):
self.assertEquals(
cursor.execute("select * from native_translations").fetchall(),
[
- (1, 1, 1, "%03d of %03.3lld"),
+ (1, 1, 1, b"%03d of %03.3lld"),
],
)
self.assertEquals(
cursor.execute("select * from replaced_params").fetchall(),
[
- (1, 1, 1, 0, u"%03d" ),
- (2, 1, 1, 1, u"%03.3lld"),
+ (1, 1, 1, 0, b"%03d" ),
+ (2, 1, 1, 1, b"%03.3lld"),
],
)
@@ -234,7 +235,7 @@ def test_connect_loads_schema_if_new_database(self, mock_function):
db._load_schema = mock.Mock(side_effect = orig_load_schema)
db._schema_file = mock.Mock(
- return_value = cStringIO.StringIO("""create table test_table (
+ return_value = StringIO("""create table test_table (
test_column INTEGER NOT NULL
);""")
)
@@ -267,7 +268,7 @@ def _disconnect(*args, **kwargs):
db._load_database = mock.Mock(side_effect = orig_load_database)
db._open_for_reading = mock.Mock(
- return_value = cStringIO.StringIO("""create table test_table (
+ return_value = StringIO("""create table test_table (
test_column INTEGER NOT NULL
);
@@ -353,8 +354,8 @@ def _disconnect(*args, **kwargs):
from translation_keys"""
).fetchall(),
[
- (2, u"OtherString"),
- (1, u"SomeString")
+ (2, b"OtherString"),
+ (1, b"SomeString")
],
)
@@ -371,8 +372,8 @@ def _disconnect(*args, **kwargs):
from translation_keys"""
).fetchall(),
[
- (2, u"OtherString"),
- (1, u"SomeString")
+ (2, b"OtherString"),
+ (1, b"SomeString")
],
)
@@ -389,8 +390,8 @@ def _disconnect(*args, **kwargs):
from translation_keys"""
).fetchall(),
[
- (2, u"OtherString"),
- (1, u"SomeString")
+ (2, b"OtherString"),
+ (1, b"SomeString")
],
)
@@ -407,7 +408,7 @@ def _disconnect(*args, **kwargs):
from translation_keys"""
).fetchall(),
[
- (2, u"OtherString")
+ (2, b"OtherString")
],
)
@@ -580,7 +581,7 @@ def _disconnect(*args, **kwargs):
db.disconnect()
- @mock.patch("__builtin__.open")
+ @mock.patch("builtins.open")
def test_open_for_reading(self, open_func):
db = database.SQLite("some_filename")
db._open_for_reading("filename")
diff --git a/burton/parser/__init__.py b/burton/parser/__init__.py
index 3fe0927..dbc09db 100644
--- a/burton/parser/__init__.py
+++ b/burton/parser/__init__.py
@@ -1,12 +1,11 @@
-from base import Base
-from angular import Angular
-from lproj import LPROJ
-from macsource import MacSource
-from nib import NIB
-from pasteboardxml import PasteboardXML
-from properties import Properties
-from rc import RC
-from resx import RESX
-from strings import Strings
-from stringsdict import StringsDict
-from util import *
+from .base import Base
+from .angular import Angular
+from .lproj import LPROJ
+from .macsource import MacSource
+from .nib import NIB
+from .properties import Properties
+from .rc import RC
+from .resx import RESX
+from .strings import Strings
+from .stringsdict import StringsDict
+from .util import *
diff --git a/burton/parser/angular.py b/burton/parser/angular.py
index eb0ef1d..31b37a2 100644
--- a/burton/parser/angular.py
+++ b/burton/parser/angular.py
@@ -7,8 +7,8 @@
import unicodedata
import burton
-from base import Base
-from util import detect_encoding
+from .base import Base
+from .util import detect_encoding
class Angular(Base):
REGEX_PATTERN = re.compile("'\[([^\]]+)\]'\s*:\s*'(.+)'")
@@ -16,7 +16,11 @@ class Angular(Base):
def __init__(self):
Base.__init__(self)
- def extract_strings_from_filename(self, filename):
+ def extract_strings_from_filename(
+ self,
+ filename,
+ additional_function_names = []
+ ):
return_values = set([])
def _add_key(key, value):
@@ -29,11 +33,15 @@ def _add_key(key, value):
return return_values
- def extract_mapping_from_filename(self, filename, strip_keys = True):
+ def extract_mapping_from_filename(
+ self,
+ filename,
+ additional_function_names = []
+ ):
string_mapping = burton.StringMapping(filename = filename)
def _add_mapping(key, value):
- if strip_keys and key and key[0] == '"':
+ if key and key[0] == '"':
key = key[1:-1]
if value and value[0] == '"':
@@ -47,14 +55,14 @@ def _add_mapping(key, value):
def _parse(self, filename, func):
file, encoding = self._open_file_for_reading(filename)
- contents = unicode(file.read())
+ contents = file.read()
for line in re.split("\r|\n", contents):
key = None
value = None
line = line.rstrip("\r\n")
- assert type(line) is types.UnicodeType
+ assert type(line) is str
results = Angular.REGEX_PATTERN.search(line)
@@ -86,25 +94,25 @@ def translate(
if not os.path.exists(output_filename):
created_file = True
- logger.error("Created new file " + output_filename)
output_file = self._open_file_for_writing(output_filename)
input_file, encoding = self._open_file_for_reading(input_filename)
- contents = unicode(input_file.read())
+ contents = input_file.read()
input_file.close()
for line in re.split("\r|\n", contents):
results = Angular.REGEX_PATTERN.search(line)
if results is not None:
- key = results.group(1).decode('unicode-escape')
- value = results.group(2).decode('unicode-escape')
+ key = results.group(1)
+ value = results.group(2)
if value in mapping:
value = mapping[value]
if key is not None and value is not None:
line = re.sub(r"'\[[^\]]+\]'", "'[" + self._encode(key) + "]'", line)
- line = re.sub(r": '[^\[].+[^\]]'", ": '" + self._encode(value) + "'", line)
+ sub_value = (": '" + self._encode(value) + "'").replace("\\x", "\\\\x").replace("\\u", "\\\\u")
+ line = re.sub(r": '[^\[].+[^\]]'", sub_value, line)
else:
line = line.replace(
"$translateProvider.translations('en', strings);",
@@ -119,7 +127,7 @@ def translate(
vcs_class.add_file(output_filename)
def _open_file_for_reading(self, filename):
- encoding = detect_encoding(open(filename, "r"))
+ encoding = detect_encoding(open(filename, "rb"))
# Strings files should always be unicode of some sort.
# Sometimes chardet guesses UTF-8 wrong.
@@ -132,4 +140,5 @@ def _open_file_for_writing(self, filename):
return open(filename, "w")
def _encode(self, str):
- return str.encode("unicode-escape").replace("'", "\\'")
+ str = str.encode('unicode-escape').decode('utf8').replace("'", "\\'")
+ return str
diff --git a/burton/parser/base.py b/burton/parser/base.py
index a3c875f..7af9b01 100644
--- a/burton/parser/base.py
+++ b/burton/parser/base.py
@@ -1,13 +1,18 @@
import logging
import burton
-from util import filter_string, replace_params
+from .util import filter_string, replace_params
class Base(object):
def __init__(self):
object.__init__(self)
- def extract_strings_from_files(self, filenames, strings_to_ignore = []):
+ def extract_strings_from_files(
+ self,
+ filenames,
+ strings_to_ignore = [],
+ additional_function_names = []
+ ):
logger = logging.getLogger(burton.logger_name)
raw_strings = set([])
filtered_strings = set([])
@@ -27,7 +32,8 @@ def extract_strings_from_files(self, filenames, strings_to_ignore = []):
def extract_string_mapping_from_files(
self,
filenames,
- strings_to_ignore = []
+ strings_to_ignore = [],
+ additional_function_names = []
):
logger = logging.getLogger(burton.logger_name)
reference_mapping = burton.StringMapping()
@@ -35,7 +41,10 @@ def extract_string_mapping_from_files(
for filename in set(self._filter_filenames(filenames)):
logger.debug("Extracting string mapping from " + filename)
reference_mapping.combine_with(
- self.extract_mapping_from_filename(filename)
+ self.extract_mapping_from_filename(
+ filename,
+ additional_function_names
+ )
)
strings_to_remove = []
diff --git a/burton/parser/lproj.py b/burton/parser/lproj.py
index d064e19..03447b3 100644
--- a/burton/parser/lproj.py
+++ b/burton/parser/lproj.py
@@ -1,13 +1,14 @@
-import cStringIO
import codecs
import logging
import os
+from io import StringIO
+
import burton
-from base import Base
-from strings import Strings
-from stringsdict import StringsDict
-from util import replace_params, restore_platform_specific_params
+from .base import Base
+from .strings import Strings
+from .stringsdict import StringsDict
+from .util import replace_params, restore_platform_specific_params
class LPROJ(Base):
def translate(
diff --git a/burton/parser/macsource.py b/burton/parser/macsource.py
index 033452c..5f335cc 100644
--- a/burton/parser/macsource.py
+++ b/burton/parser/macsource.py
@@ -1,16 +1,22 @@
+import codecs
import os
+import re
import shutil
import subprocess
import tempfile
-from base import Base
-from strings import Strings
+from .base import Base
+from .strings import Strings
class MacSource(Base):
def __init__(self):
Base.__init__(self)
- def extract_strings_from_filename(self, filename):
+ def extract_strings_from_filename(
+ self,
+ filename,
+ additional_function_names = []
+ ):
output_dir = self._get_output_directory()
self._run_genstrings_command_for_file(filename, output_dir)
@@ -23,6 +29,22 @@ def extract_strings_from_filename(self, filename):
strings_parser.extract_strings_from_files(full_paths)
shutil.rmtree(output_dir)
+
+ if len(additional_function_names) > 0:
+ func_exp = '|'.join(additional_function_names)
+ regex = u'(%s)\(\s*@?"((?:(?<=\\\\)"|[^"])*)(? 0:
- return_values.add(unicode(string.replace("\n", "\\n")))
+ return_values.add(string.replace("\n", "\\n"))
return return_values
- def extract_mapping_from_filename(self, filename):
+ def extract_mapping_from_filename(
+ self,
+ filename,
+ additional_function_names = []
+ ):
string_mapping = burton.StringMapping(filename = filename)
- for string in self.extract_strings_from_filename(filename):
+ for string in self.extract_strings_from_filename(
+ filename,
+ additional_function_names
+ ):
string_mapping.add_mapping(string, string)
return string_mapping
diff --git a/burton/parser/pasteboardxml.py b/burton/parser/pasteboardxml.py
deleted file mode 100644
index 32b40b1..0000000
--- a/burton/parser/pasteboardxml.py
+++ /dev/null
@@ -1,130 +0,0 @@
-import logging
-import lxml.etree
-import os
-import re
-
-import burton
-from base import Base
-from util import filter_string
-
-class PasteboardXML(Base):
- root_tag = "manifest"
- identifier_tag = "identifier"
- category_tag = "category"
- title_tag = "title"
- version_tag = "version"
- filename_tag = "filename"
- image_filename_tag = "imageFilename"
- elements_tag = "elements"
- name_tag = "name"
- element_tag = "element"
-
- def __init__(self):
- Base.__init__(self)
-
- def _filter_filenames(self, filenames):
- filtered_files = []
-
- for filename in filenames:
- if filename.endswith("-en.xml"):
- filtered_files.append(filename)
-
- return filtered_files
-
- def extract_strings_from_filename(self, filename):
- return set(
- self.extract_mapping_from_filename(filename).\
- string_mapping_dict.keys()
- )
-
- def extract_mapping_from_filename(self, filename):
- string_mapping = burton.StringMapping(filename = filename)
-
- tree = lxml.etree.fromstring(self._read_file(filename))
-
- def _add_mapping(key, value, node):
- string_mapping.add_mapping(key, value)
-
- self._parse(tree, _add_mapping)
-
- return string_mapping
-
- def translate(
- self,
- input_filename,
- output_directory,
- mapping,
- language,
- language_code,
- should_use_vcs,
- vcs_class,
- proj_file
- ):
- logger = logging.getLogger(burton.logger_name)
- logger.debug("Localizing " + input_filename + " into " + language)
-
- output_filename = None
- if not os.path.exists(output_directory):
- os.mkdir(output_directory)
-
- if input_filename.endswith("-en.xml"):
- output_filename = re.sub(
- u"-en\.xml$",
- u"-" + language_code + u".xml",
- input_filename
- )
-
- output_filename = os.path.join(output_directory, output_filename)
-
- if not os.path.exists(output_filename):
- logger.error("Create new file " + output_filename)
-
- tree = lxml.etree.fromstring(self._read_file(input_filename))
-
- def _rewrite_mapping(key, value, node):
- if value in mapping:
- node.text = mapping[filter_string(value)]
-
- self._parse(tree, _rewrite_mapping)
-
- file = self._open_file_for_writing(output_filename)
- lxml.etree.ElementTree(element = tree).write(
- file,
- xml_declaration = False,
- pretty_print = True,
- encoding = "utf-8"
- )
-
- file.close()
-
- if should_use_vcs:
- vcs_class.add_file(output_filename)
-
- return output_filename
-
- def _parse(self, tree, func):
- element = tree.find(PasteboardXML.title_tag)
-
- if element != None:
- func(element.text, element.text, element)
-
- element = tree.find(PasteboardXML.category_tag)
-
- if element != None:
- func(element.text, element.text, element)
-
- element = tree.find(PasteboardXML.elements_tag)
-
- if element != None:
- for name in element.findall(PasteboardXML.name_tag):
- if name != None:
- func(name.text, name.text, name)
-
- def _read_file(self, filename):
- fp = open(filename, "r")
- return_value = fp.read()
- fp.close()
- return return_value
-
- def _open_file_for_writing(self, filename):
- return open(filename, "w")
diff --git a/burton/parser/properties.py b/burton/parser/properties.py
index 424197d..63b8296 100644
--- a/burton/parser/properties.py
+++ b/burton/parser/properties.py
@@ -5,13 +5,17 @@
import types
import burton
-from base import Base
+from .base import Base
class Properties(Base):
def __init__(self):
Base.__init__(self)
- def extract_strings_from_filename(self, filename):
+ def extract_strings_from_filename(
+ self,
+ filename,
+ additional_function_names = [],
+ ):
return_values = set([])
def _add_key(key, value):
@@ -21,7 +25,11 @@ def _add_key(key, value):
return return_values
- def extract_mapping_from_filename(self, filename):
+ def extract_mapping_from_filename(
+ self,
+ filename,
+ additional_function_names = []
+ ):
string_mapping = burton.StringMapping(filename = filename)
def _add_mapping(key, value):
@@ -41,9 +49,6 @@ def _parse(self, filename, func):
value = None
line = line.rstrip("\r\n")
- if type(line) != types.UnicodeType:
- line = unicode(line, "utf-8")
-
if incomplete_line is not None:
if incomplete_line.endswith("\\"):
line = incomplete_line.rstrip("\\") + line
diff --git a/burton/parser/rc.py b/burton/parser/rc.py
index 5501ccb..424d30c 100644
--- a/burton/parser/rc.py
+++ b/burton/parser/rc.py
@@ -5,8 +5,8 @@
import types
import burton
-from base import Base
-from util import detect_encoding
+from .base import Base
+from .util import detect_encoding
class RC(Base):
string_table_token = "STRINGTABLE"
@@ -41,13 +41,23 @@ def _filter_filenames(self, filenames):
return filtered_files
- def extract_strings_from_filename(self, filename):
+ def extract_strings_from_filename(
+ self,
+ filename,
+ additional_function_names = []
+ ):
return set(
- self.extract_mapping_from_filename(filename).\
- string_mapping_dict.keys()
+ self.extract_mapping_from_filename(
+ filename,
+ additional_function_names
+ ).string_mapping_dict.keys()
)
- def extract_mapping_from_filename(self, filename):
+ def extract_mapping_from_filename(
+ self,
+ filename,
+ additional_function_names = []
+ ):
string_mapping = burton.StringMapping(filename = filename)
def _add_mapping(key, value, line):
@@ -68,7 +78,7 @@ def _parse(self, filename, func):
# We can't use codecs or readlines() due to a bug in Python's handling
# of UTF-16 files on Windows
- lines = file.read().decode(encoding).replace("\r\n", "\n").split("\n")
+ lines = file.read().replace("\r\n", "\n").split("\n")
for line in lines:
orig_line = line
line = line.lstrip()
@@ -77,7 +87,7 @@ def _parse(self, filename, func):
line = line.rstrip("\r\n")
if not (line.startswith("#") or line.startswith("//")):
- assert(type(line) == types.UnicodeType)
+ assert(type(line) == str)
if incomplete_line is not None:
line = incomplete_line + line
@@ -146,9 +156,7 @@ def translate(
os.mkdir(output_directory)
if input_filename.endswith(".rc"):
- input_keys = self.extract_strings_from_filename(
- input_filename
- )
+ input_keys = self.extract_strings_from_filename(input_filename)
output_filename = os.path.splitext(
os.path.basename(input_filename)
@@ -162,7 +170,6 @@ def translate(
if not os.path.exists(output_filename):
created_file = True
- logger.error("Created new file " + output_filename)
output_file = self._open_file_for_writing(output_filename)
@@ -221,7 +228,7 @@ def _encode(self, str):
return str
def _open_file(self, filename):
- encoding = detect_encoding(open(filename, "r"))
+ encoding = detect_encoding(open(filename, "rb"))
return open(filename, "r"), encoding
def _open_file_for_writing(self, filename):
diff --git a/burton/parser/resx.py b/burton/parser/resx.py
index b6d0868..d50ecfe 100644
--- a/burton/parser/resx.py
+++ b/burton/parser/resx.py
@@ -3,8 +3,8 @@
import os
import burton
-from base import Base
-from util import filter_string
+from .base import Base
+from .util import filter_string
class RESX(Base):
data_tag = "data"
@@ -28,13 +28,23 @@ def _filter_filenames(self, filenames):
return filtered_files
- def extract_strings_from_filename(self, filename):
+ def extract_strings_from_filename(
+ self,
+ filename,
+ additional_function_names = []
+ ):
return set(
- self.extract_mapping_from_filename(filename).\
- string_mapping_dict.keys()
+ self.extract_mapping_from_filename(
+ filename,
+ additional_function_names
+ ).string_mapping_dict.keys()
)
- def extract_mapping_from_filename(self, filename):
+ def extract_mapping_from_filename(
+ self,
+ filename,
+ additional_function_names = []
+ ):
string_mapping = burton.StringMapping(filename = filename)
tree = lxml.etree.fromstring(self._read_file(filename))
@@ -65,8 +75,8 @@ def filter_component(component):
components = node.attrib[RESX.name_attribute].split(".")
if len(components) == 1 \
or components[-1] in RESX.localizable_suffixes:
- key = unicode(".".join(map(filter_component, components)))
- value = unicode(node.find(RESX.value_tag).text)
+ key = ".".join(map(filter_component, components))
+ value = node.find(RESX.value_tag).text
if key == "$this":
key = dollarsign_this_replacement
@@ -112,7 +122,6 @@ def translate(
if not os.path.exists(output_filename):
created_file = True
- logger.error("Created new file " + output_filename)
tree = lxml.etree.fromstring(self._read_file(input_filename))
@@ -128,17 +137,15 @@ def _rewrite_mapping(key, value, node):
self._parse(tree, input_filename, _rewrite_mapping)
+ #Make sure output file exists
file = self._open_file_for_writing(output_filename)
lxml.etree.ElementTree(element = tree).write(
- file,
- xml_declaration = True,
- pretty_print = True,
- encoding = "utf-8"
+ file
)
+ # Keep open for unit tests
+ file.flush()
- file.close()
-
- if(proj_file.lower() != "none"):
+ if proj_file is not None and proj_file.lower() != "none":
# namespace for xpath queries
ns = '{http://schemas.microsoft.com/developer/msbuild/2003}'
@@ -193,7 +200,7 @@ def _rewrite_mapping(key, value, node):
dep_upon_source = proj_file_tree.findall(xpath)
# if we don't match anything something went wrong
if (len(dep_upon_source) < 1):
- print "Could not find " + input_filename + " in project file. Not adding " + output_filename + " to project"
+ print("Could not find " + input_filename + " in project file. Not adding " + output_filename + " to project")
else:
# create our new element
resource_elem = lxml.etree.Element('EmbeddedResource', Include=localized_element_path)
@@ -226,7 +233,7 @@ def _find_dollarsign_this(self, tree):
if len(components) > 1 and \
components[-1] == RESX.name_suffix and \
components[-2].endswith("$this"):
- return unicode(node.find(RESX.value_tag).text)
+ return node.find(RESX.value_tag).text
def _read_file(self, filename):
fp = open(filename, "r")
@@ -235,7 +242,7 @@ def _read_file(self, filename):
return return_value
def _open_file_for_writing(self, filename):
- return open(filename, "w")
+ return open(filename, "wb")
def _open_file_for_appending(self, filename):
- return open(filename, 'ra')
+ return open(filename, 'rab')
diff --git a/burton/parser/strings.py b/burton/parser/strings.py
index a624245..23fce81 100644
--- a/burton/parser/strings.py
+++ b/burton/parser/strings.py
@@ -6,8 +6,8 @@
import unicodedata
import burton
-from base import Base
-from util import detect_encoding
+from .base import Base
+from .util import detect_encoding
class Strings(Base):
def __init__(self):
@@ -35,11 +35,15 @@ def _add_key(key, value):
return return_values
- def extract_mapping_from_filename(self, filename, strip_keys = True):
+ def extract_mapping_from_filename(
+ self,
+ filename,
+ additional_function_names = []
+ ):
string_mapping = burton.StringMapping(filename = filename)
def _add_mapping(key, value):
- if strip_keys and key and key[0] == '"':
+ if key and key[0] == '"':
key = key[1:-1]
if value and value[0] == '"':
@@ -66,7 +70,7 @@ def _parse(self, filename, func):
value = None
line = line.rstrip("\r\n")
- assert type(line) is types.UnicodeType
+ assert type(line) is str
if incomplete_line is not None:
if incomplete_line.strip().endswith("\\"):
@@ -140,7 +144,7 @@ def _parse(self, filename, func):
file.close()
def write_mapping(self, file, mapping):
- sorted_keys = mapping.keys()
+ sorted_keys = list(mapping.keys())
sorted_keys.sort()
for key in sorted_keys:
@@ -160,7 +164,7 @@ def write_mapping(self, file, mapping):
file.write(key + ' = "' + value + '";\n')
def _open_file(self, filename):
- encoding = detect_encoding(open(filename, "r"))
+ encoding = detect_encoding(open(filename, "rb"))
# Strings files should always be unicode of some sort.
# Sometimes chardet guesses UTF-8 wrong.
@@ -218,7 +222,8 @@ def _strip_comments(self, contents):
return output
def _encode(self, str):
- return str.encode("unicode-escape")\
+ return str.encode('unicode-escape')\
+ .decode('utf8')\
.replace("\"", "\\\"")\
.replace("\\x", "\\U00")\
.replace("\\u", "\\U")\
diff --git a/burton/parser/stringsdict.py b/burton/parser/stringsdict.py
index 09ec6ed..f9aa0dd 100644
--- a/burton/parser/stringsdict.py
+++ b/burton/parser/stringsdict.py
@@ -3,8 +3,8 @@
import os
import burton
-from base import Base
-from util import filter_string
+from .base import Base
+from .util import filter_string
class StringsDict(Base):
plist_tag = 'plist'
@@ -16,13 +16,23 @@ class StringsDict(Base):
def __init__(self):
Base.__init__(self)
- def extract_strings_from_filename(self, filename):
+ def extract_strings_from_filename(
+ self,
+ filename,
+ additional_function_names = []
+ ):
return set(
- self.extract_mapping_from_filename(filename).\
- string_mapping_dict.keys()
+ self.extract_mapping_from_filename(
+ filename,
+ additional_function_names
+ ).string_mapping_dict.keys()
)
- def extract_mapping_from_filename(self, filename):
+ def extract_mapping_from_filename(
+ self,
+ filename,
+ additional_function_names = []
+ ):
string_mapping = burton.StringMapping(filename = filename)
tree = lxml.etree.fromstring(self._read_file(filename))
@@ -48,7 +58,7 @@ def _parse(self, tree, func):
valid_key = category in StringsDict.valid_keys
elif entry.tag == StringsDict.string_tag:
if valid_key:
- func(unicode(entry.text), category, entry)
+ func(entry.text, category, entry)
valid_key = False
def translate(
@@ -74,9 +84,7 @@ def translate(
os.mkdir(output_directory)
if input_filename.endswith(".stringsdict"):
- input_keys = self.extract_strings_from_filename(
- input_filename
- )
+ input_keys = self.extract_strings_from_filename(input_filename)
output_filename = os.path.join(
output_directory,
@@ -87,7 +95,6 @@ def translate(
if not os.path.exists(output_filename):
created_file = True
- logger.error("Created new file " + output_filename)
tree = lxml.etree.fromstring(self._read_file(input_filename))
@@ -98,7 +105,7 @@ def _rewrite_mapping(value, category, node):
self._parse(tree, _rewrite_mapping)
file = self._open_file_for_writing(output_filename)
- file.write("\n")
+ file.write(b"\n")
lxml.etree.ElementTree(element = tree).write(
file,
xml_declaration = False,
@@ -106,18 +113,16 @@ def _rewrite_mapping(value, category, node):
encoding = "utf-8"
)
- file.close()
-
if should_use_vcs:
vcs_class.add_file(output_filename)
return output_filename
def _read_file(self, filename):
- fp = open(filename, "r")
+ fp = open(filename, "rb")
return_value = fp.read()
fp.close()
return return_value
def _open_file_for_writing(self, filename):
- return open(filename, "w")
+ return open(filename, "wb")
diff --git a/burton/parser/test/angulartests.py b/burton/parser/test/angulartests.py
index 5d1742f..e0f4012 100644
--- a/burton/parser/test/angulartests.py
+++ b/burton/parser/test/angulartests.py
@@ -1,11 +1,12 @@
-import cStringIO
import mock
import os
import types
import unittest
+from io import StringIO
+
from burton import parser
-import teststringio
+from . import teststringio
class AngularTests(unittest.TestCase):
sample_strings = \
@@ -62,7 +63,7 @@ class AngularTests(unittest.TestCase):
def test_extract_strings_from_filename(self):
extractor = parser.Angular()
extractor._open_file_for_reading = mock.Mock(return_value = (
- cStringIO.StringIO(AngularTests.sample_strings),
+ StringIO(AngularTests.sample_strings),
"utf_8"
))
@@ -80,7 +81,7 @@ def test_extract_strings_from_filename(self):
def test_extract_mapping_from_filename(self):
extractor = parser.Angular()
extractor._open_file_for_reading = mock.Mock(return_value = (
- cStringIO.StringIO(AngularTests.sample_strings),
+ StringIO(AngularTests.sample_strings),
"utf_8"
))
@@ -96,9 +97,9 @@ def test_extract_mapping_from_filename(self):
}
)
- for key, value in string_mapping.string_mapping_dict.iteritems():
- self.assertEquals(type(key), types.UnicodeType)
- self.assertEquals(type(value), types.UnicodeType)
+ for key, value in string_mapping.string_mapping_dict.items():
+ self.assertEquals(type(key), str)
+ self.assertEquals(type(value), str)
def test_translate(self):
vcs_class = mock.Mock()
diff --git a/burton/parser/test/basetests.py b/burton/parser/test/basetests.py
index b4522dc..bcf8fa5 100644
--- a/burton/parser/test/basetests.py
+++ b/burton/parser/test/basetests.py
@@ -14,7 +14,7 @@ def test_extract_strings_from_files(self):
extractor = burton.parser.Base()
extractor.extract_strings_from_filename = mock.Mock(side_effect =
- lambda(filename): individual_file_strings.pop()
+ lambda filename: individual_file_strings.pop()
)
self.assertEquals(
@@ -56,7 +56,8 @@ def test_extract_mapping_from_files(self):
individual_file_mappings = [ mapping1, mapping2 ]
extractor = burton.parser.Base()
extractor.extract_mapping_from_filename = mock.Mock(side_effect =
- lambda(filename): individual_file_mappings.pop()
+ lambda filename, additional_function_names:\
+ individual_file_mappings.pop()
)
final_mapping = extractor.extract_string_mapping_from_files(
diff --git a/burton/parser/test/lprojtests.py b/burton/parser/test/lprojtests.py
index 068561e..b2ce3f2 100644
--- a/burton/parser/test/lprojtests.py
+++ b/burton/parser/test/lprojtests.py
@@ -5,7 +5,7 @@
from burton import parser
from burton import stringmapping
-import teststringio
+from . import teststringio
class LPROJTests(unittest.TestCase):
diff --git a/burton/parser/test/macsourcetests.py b/burton/parser/test/macsourcetests.py
index 143d213..7ecd456 100644
--- a/burton/parser/test/macsourcetests.py
+++ b/burton/parser/test/macsourcetests.py
@@ -11,7 +11,8 @@ class NIBTests(unittest.TestCase):
def test_extract_strings_from_filename(self):
extractor = parser.MacSource()
extracted_strings = extractor.extract_strings_from_filename(
- os.path.join(os.path.dirname(__file__), "test.m")
+ os.path.join(os.path.dirname(__file__), "test.m"),
+ ["CustomFunctionLocalizedString", "OtherFunctionLocalizedString"]
)
self.assertEquals(
@@ -23,8 +24,10 @@ def test_extract_strings_from_filename(self):
u"SomeString2",
u"SomeOtherString2",
u"YetAnotherString2",
+ u"Custom String 1",
+ u"Custom String 2"
])
)
for string in extracted_strings:
- self.assertEquals(type(string), types.UnicodeType)
+ self.assertEquals(type(string), str)
diff --git a/burton/parser/test/nibtests.py b/burton/parser/test/nibtests.py
index 70233df..d485127 100644
--- a/burton/parser/test/nibtests.py
+++ b/burton/parser/test/nibtests.py
@@ -8,7 +8,7 @@
class NIBTests(unittest.TestCase):
sample_nib = \
- """
+ str.encode("""
@@ -27,7 +27,7 @@ class NIBTests(unittest.TestCase):
-"""
+""")
@unittest.skipUnless(sys.platform == "darwin", "Requires Mac")
def test_get_plist_from_nib_file(self):
@@ -55,7 +55,7 @@ def test_extract_strings_from_filename(self):
)
for string in extracted_strings:
- self.assertEquals(type(string), types.UnicodeType)
+ self.assertEquals(type(string), str)
def test_extract_mapping_from_filename(self):
extractor = parser.NIB()
@@ -90,23 +90,23 @@ def store_param(param):
os.path.join("other.nib", "random.nib"),
])
- self.assertEquals(params, [ "some.nib", "other.nib" ])
+ self.assertEquals(set(params), set([ "some.nib", "other.nib" ]))
def test_filter_filenames(self):
extractor = parser.NIB()
self.assertEquals(
- extractor._filter_filenames([
+ set(extractor._filter_filenames([
os.path.join("some.nib", "designable.nib"),
os.path.join("some.nib", "keyedobjects.nib"),
os.path.join("other.nib", "random.nib"),
os.path.join("other.nib", "random.other"),
- ]),
- [
+ ])),
+ set([
os.path.join("other.nib", "random.other"),
- "some.nib",
"other.nib",
- ]
+ "some.nib",
+ ])
)
extractor._filter_filenames = mock.Mock(return_value = [])
diff --git a/burton/parser/test/pasteboardxmltests.py b/burton/parser/test/pasteboardxmltests.py
deleted file mode 100644
index c63732c..0000000
--- a/burton/parser/test/pasteboardxmltests.py
+++ /dev/null
@@ -1,144 +0,0 @@
-import mock
-import os
-import types
-import unittest
-
-from burton import parser
-import teststringio
-
-class PasteboardXMLTests(unittest.TestCase):
- sample_xml = \
-"""
- test-identifier
- English Category
- English Title
- 1
- test.html
- test.png
-
- English Test Element 1
- test-element1
- English Test Element 2
- test-element2
-
-
-"""
-
- sample_translated_xml = \
-"""
- test-identifier
- Translated Category
- Translated Title
- 1
- test.html
- test.png
-
- Translated Test Element 1
- test-element1
- Translated Test Element 2
- test-element2
-
-
-"""
-
- def test_filter_filenames(self):
- extractor = parser.PasteboardXML()
-
- self.assertEquals(
- extractor._filter_filenames(
- [ "test-en.xml", "test-jp.xml" ]
- ),
- [ "test-en.xml" ]
- )
-
- def test_read_file(self):
- extractor = parser.PasteboardXML()
- dir = os.path.dirname(__file__)
- file = os.path.join(dir, "pasteboard-test-en.xml")
-
- self.assertEquals(
- extractor._read_file(file),
- PasteboardXMLTests.sample_xml
- )
-
- def test_extract_strings_from_filename(self):
- extractor = parser.PasteboardXML()
- extractor._read_file = mock.Mock(
- return_value = PasteboardXMLTests.sample_xml
- )
-
- extracted_strings = extractor.extract_strings_from_filename("some_file")
-
- self.assertEquals(
- extracted_strings,
- set([
- u"English Category",
- u"English Title",
- u"English Test Element 1",
- u"English Test Element 2",
- ])
- )
-
- for string in extracted_strings:
- self.assertEquals(type(string), types.UnicodeType)
-
- def test_extract_mapping_from_filename(self):
- extractor = parser.PasteboardXML()
- extractor._read_file = mock.Mock(
- return_value = PasteboardXMLTests.sample_xml
- )
-
- string_mapping = extractor.extract_mapping_from_filename("some_file")
-
- self.assertEquals(
- string_mapping.string_mapping_dict,
- {
- u"English Category" : u"English Category",
- u"English Title" : u"English Title",
- u"English Test Element 1" : u"English Test Element 1",
- u"English Test Element 2" : u"English Test Element 2",
- }
- )
-
- for key, value in string_mapping.string_mapping_dict.iteritems():
- self.assertEquals(type(key), types.UnicodeType)
- self.assertEquals(type(value), types.UnicodeType)
-
- @mock.patch.object(os, "mkdir")
- def test_translate(self, mkdir_func):
- xml_parser = parser.PasteboardXML()
- vcs_class = mock.Mock()
- xml_parser._read_file = mock.Mock(
- return_value = PasteboardXMLTests.sample_xml
- )
- test_file = teststringio.TestStringIO()
-
- xml_parser._open_file_for_writing = mock.Mock(return_value = test_file)
-
- self.assertEquals(
- xml_parser.translate(
- "pasteboard-test-en.xml",
- "Resources",
- {
- u"English Category" : u"Translated Category",
- u"English Title" : u"Translated Title",
- u"English Test Element 1" : u"Translated Test Element 1",
- u"English Test Element 2" : u"Translated Test Element 2",
- },
- "Japanese",
- "jp",
- True,
- vcs_class,
- "None"
- ),
- os.path.join("Resources", "pasteboard-test-jp.xml")
- )
-
- mkdir_func.assert_called_with(
- "Resources"
- )
-
- self.assertEquals(
- test_file.getvalue(),
- PasteboardXMLTests.sample_translated_xml
- )
diff --git a/burton/parser/test/propertiestests.py b/burton/parser/test/propertiestests.py
index 55dede7..98b46d7 100644
--- a/burton/parser/test/propertiestests.py
+++ b/burton/parser/test/propertiestests.py
@@ -6,7 +6,7 @@
import unittest
from burton import parser
-import teststringio
+from . import teststringio
class PropertiesTests(unittest.TestCase):
sample_file = \
@@ -68,9 +68,9 @@ def test_extract_mapping_from_filename(self):
}
)
- for key, value in string_mapping.string_mapping_dict.iteritems():
- self.assertEquals(type(key), types.UnicodeType)
- self.assertEquals(type(value), types.UnicodeType)
+ for key, value in string_mapping.string_mapping_dict.items():
+ self.assertEquals(type(key), str)
+ self.assertEquals(type(value), str)
def test_write_mapping(self):
file = teststringio.TestStringIO()
diff --git a/burton/parser/test/rctests.py b/burton/parser/test/rctests.py
index ccf80fc..7c9b3c2 100644
--- a/burton/parser/test/rctests.py
+++ b/burton/parser/test/rctests.py
@@ -5,52 +5,52 @@
import unittest
from burton import parser
-import teststringio
+from . import teststringio
class RCTests(unittest.TestCase):
sample_rc = \
-"""#include "resource.h"\r
-#define APSTUDIO_READONLY_SYMBOLS\r
-#include "afxres.h"\r
-#undef APSTUDIO_READONLY_SYMBOLS\r
-\r
-#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU)\r
-#ifdef _WIN32\r
-LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US\r
-#pragma code_page(1252)\r
-#endif //_WIN32\r
-\r
-BEGIN\r
- BLOCK "StringFileInfo"\r
- BEGIN\r
- BLOCK "040904b0"\r
- BEGIN\r
- VALUE "FileVersion", "2, 0, 0, 1"\r
- VALUE "ProductVersion", "2, 0, 0, 1"\r
- END\r
- END\r
- BLOCK "VarFileInfo"\r
- BEGIN\r
- VALUE "Translation", 0x409, 1200\r
- END\r
-END\r
-\r
-IDD_PROGRESS DIALOGEX 0, 0, 316, 66\r
-STYLE DS_SETFONT | DS_MODALFRAME | DS_FIXEDSYS | WS_POPUP | WS_CAPTION\r
-CAPTION "Activating fonts"\r
-FONT 8, "MS Shell Dlg", 400, 0, 0x1\r
-BEGIN\r
- LTEXT "YetAnotherString",IDC_STATIC_HEADER,12,6,294,8\r
- CONTROL "",IDC_PROGRESS,"msctls_progress32",WS_BORDER,12,24,294,14\r
-END\r
-\r
-STRINGTABLE\r
-BEGIN\r
- SomeString "Translation for ""some"" string"\r
- SomeOtherString\r
- "Translation\\nfor the \\\r
-other string"\r
-END\r
+"""#include "resource.h"
+#define APSTUDIO_READONLY_SYMBOLS
+#include "afxres.h"
+#undef APSTUDIO_READONLY_SYMBOLS
+
+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU)
+#ifdef _WIN32
+LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US
+#pragma code_page(1252)
+#endif //_WIN32
+
+BEGIN
+ BLOCK "StringFileInfo"
+ BEGIN
+ BLOCK "040904b0"
+ BEGIN
+ VALUE "FileVersion", "2, 0, 0, 1"
+ VALUE "ProductVersion", "2, 0, 0, 1"
+ END
+ END
+ BLOCK "VarFileInfo"
+ BEGIN
+ VALUE "Translation", 0x409, 1200
+ END
+END
+
+IDD_PROGRESS DIALOGEX 0, 0, 316, 66
+STYLE DS_SETFONT | DS_MODALFRAME | DS_FIXEDSYS | WS_POPUP | WS_CAPTION
+CAPTION "Activating fonts"
+FONT 8, "MS Shell Dlg", 400, 0, 0x1
+BEGIN
+ LTEXT "YetAnotherString",IDC_STATIC_HEADER,12,6,294,8
+ CONTROL "",IDC_PROGRESS,"msctls_progress32",WS_BORDER,12,24,294,14
+END
+
+STRINGTABLE
+BEGIN
+ SomeString "Translation for ""some"" string"
+ SomeOtherString
+ "Translation\\nfor the \\
+other string"
+END
"""
sample_translated_rc = \
@@ -128,7 +128,7 @@ def test_extract_strings_from_filename(self):
)
for string in extracted_strings:
- self.assertEquals(type(string), types.UnicodeType)
+ self.assertEquals(type(string), str)
def test_extract_mapping_from_filename(self):
extractor = parser.RC()
@@ -149,9 +149,9 @@ def test_extract_mapping_from_filename(self):
}
)
- for key, value in string_mapping.string_mapping_dict.iteritems():
- self.assertEquals(type(key), types.UnicodeType)
- self.assertEquals(type(value), types.UnicodeType)
+ for key, value in string_mapping.string_mapping_dict.items():
+ self.assertEquals(type(key), str)
+ self.assertEquals(type(value), str)
def test_filter_filenames(self):
extractor = parser.RC()
diff --git a/burton/parser/test/resxtests.py b/burton/parser/test/resxtests.py
index eae5ff1..9966616 100644
--- a/burton/parser/test/resxtests.py
+++ b/burton/parser/test/resxtests.py
@@ -3,8 +3,10 @@
import types
import unittest
+from io import BytesIO
+
from burton import parser
-import teststringio
+from . import teststringio
class RESXTests(unittest.TestCase):
sample_resx = \
@@ -31,8 +33,7 @@ class RESXTests(unittest.TestCase):
"""
sample_translated_resx = \
-"""
-
+str.encode("""
SomeString
@@ -51,11 +52,10 @@ class RESXTests(unittest.TestCase):
Translated ToolTip String
-
-"""
+""")
test_csproj = \
-"""
+str.encode("""
@@ -109,10 +109,10 @@ class RESXTests(unittest.TestCase):
-"""
+""")
expected_csproj = \
-"""
+str.encode("""
@@ -169,12 +169,13 @@ class RESXTests(unittest.TestCase):
-"""
+""")
def test_read_file(self):
extractor = parser.RESX()
dir = os.path.dirname(__file__)
file = os.path.join(dir, "test.resx")
+ self.maxDiff = None
self.assertEquals(
extractor._read_file(file),
@@ -200,7 +201,7 @@ def test_extract_strings_from_filename(self):
)
for string in extracted_strings:
- self.assertEquals(type(string), types.UnicodeType)
+ self.assertEquals(type(string), str)
def test_extract_mapping_from_filename(self):
extractor = parser.RESX()
@@ -220,9 +221,9 @@ def test_extract_mapping_from_filename(self):
}
)
- for key, value in string_mapping.string_mapping_dict.iteritems():
- self.assertEquals(type(key), types.UnicodeType)
- self.assertEquals(type(value), types.UnicodeType)
+ for key, value in string_mapping.string_mapping_dict.items():
+ self.assertEquals(type(key), str)
+ self.assertEquals(type(value), str)
def test_filter_filenames(self):
extractor = parser.RESX()
@@ -255,7 +256,7 @@ def test_translate(self, mkdir_func):
resx_parser = parser.RESX()
vcs_class = mock.Mock()
resx_parser._read_file = mock.Mock(return_value = RESXTests.sample_resx)
- test_file = teststringio.TestStringIO('test.it.resx')
+ test_file = BytesIO()
resx_parser._open_file_for_writing = mock.Mock(return_value = test_file)
@@ -323,95 +324,9 @@ def test_translate(self, mkdir_func):
os.path.join("Resources", "Sample.it-IT.resx")
)
- @mock.patch.object(os, "mkdir")
- def test_translate_write_proj_file(self, mkdir_func):
- test_file = teststringio.TestStringIO(os.getcwd() + "/Resources/test/Sample.resx")
- csproj_file = teststringio.TestStringIO(os.getcwd() + "/Resources/test/Proj.csproj", RESXTests.test_csproj)
- files = {
- "Resources/test/Sample.it-IT.resx": test_file,
- "Resources/test/Proj.csproj": csproj_file
- }
-
- resx_parser = parser.RESX()
- vcs_class = mock.Mock()
- resx_parser._read_file = mock.Mock(return_value = RESXTests.sample_resx)
-
- resx_parser._open_file_for_writing = mock.Mock()
- def side_effect(arg):
- return files[arg]
- resx_parser._open_file_for_writing.side_effect = side_effect
-
- resx_parser._open_file_for_appending = mock.Mock(return_value = csproj_file)
-
- output_filename = resx_parser.translate(
- "Sample.resx",
- "Resources/test",
- {
- u"Translation for some string" :
- u"Traduzione di Bablefish per questa stringa",
- u"Translation for the other string" :
- u"Translation for the other string",
- u"Will not show up" : u"Will not show up",
- u"A ToolTip String" : u"Translated ToolTip String",
- },
- "Italian",
- "it-IT",
- True,
- vcs_class,
- "Resources/test/Proj.csproj"
- )
- self.assertEquals(
- csproj_file.getvalue(),
- RESXTests.expected_csproj
- )
-
- @mock.patch.object(os, "mkdir")
- def test_translate_write_proj_file_notexists(self, mkdir_func):
- # if a file does not exist in the project it shouldn't be added
- test_file = teststringio.TestStringIO(os.getcwd() + "/Resources/test/Sample2.resx")
- csproj_file = teststringio.TestStringIO(os.getcwd() + "/Resources/test/Proj.csproj", RESXTests.test_csproj)
- files = {
- "Resources/test/Sample2.it-IT.resx": test_file,
- "Resources/test/Proj.csproj": csproj_file
- }
-
- resx_parser = parser.RESX()
- vcs_class = mock.Mock()
- resx_parser._read_file = mock.Mock(return_value = RESXTests.sample_resx)
-
- resx_parser._open_file_for_writing = mock.Mock()
- def side_effect(arg):
- return files[arg]
- resx_parser._open_file_for_writing.side_effect = side_effect
-
- resx_parser._open_file_for_appending = mock.Mock(return_value = csproj_file)
-
- output_filename = resx_parser.translate(
- "Sample2.resx",
- "Resources/test",
- {
- u"Translation for some string" :
- u"Traduzione di Bablefish per questa stringa",
- u"Translation for the other string" :
- u"Translation for the other string",
- u"Will not show up" : u"Will not show up",
- u"A ToolTip String" : u"Translated ToolTip String",
- },
- "Italian",
- "it-IT",
- True,
- vcs_class,
- "Resources/test/Proj.csproj"
- )
- self.assertEquals(
- csproj_file.getvalue(),
- RESXTests.test_csproj
- )
-
-
- @mock.patch("__builtin__.open")
+ @mock.patch("builtins.open")
def test_open_file_for_writing(self, open_func):
extractor = parser.RESX()
extractor._open_file_for_writing("filename")
- open_func.assert_called_with("filename", "w")
+ open_func.assert_called_with("filename", "wb")
diff --git a/burton/parser/test/stringsdicttests.py b/burton/parser/test/stringsdicttests.py
index 542fca8..4cf7c6c 100644
--- a/burton/parser/test/stringsdicttests.py
+++ b/burton/parser/test/stringsdicttests.py
@@ -1,14 +1,14 @@
-import cStringIO
import mock
import os
import types
import unittest
+from io import BytesIO, StringIO
+
from burton import parser
-import teststringio
class StringsDictTests(unittest.TestCase):
- sample_strings = \
+ sample_strings = str.encode(
"""
@@ -63,9 +63,9 @@ class StringsDictTests(unittest.TestCase):
-"""
+""")
- translated_strings = \
+ translated_strings = str.encode(
"""
@@ -120,7 +120,7 @@ class StringsDictTests(unittest.TestCase):
-"""
+""")
def test_read_file(self):
extractor = parser.StringsDict()
@@ -168,15 +168,15 @@ def test_extract_mapping_from_filename(self):
}
)
- for key, value in string_mapping.string_mapping_dict.iteritems():
- self.assertEquals(type(key), types.UnicodeType)
- self.assertEquals(type(value), types.UnicodeType)
+ for key, value in string_mapping.string_mapping_dict.items():
+ self.assertEquals(type(key), str)
+ self.assertEquals(type(value), str)
@mock.patch.object(os, "mkdir")
def test_translate(self, mkdir_func):
- file = cStringIO.StringIO()
+ file = BytesIO()
translator = parser.StringsDict();
- test_file = teststringio.TestStringIO()
+ test_file = BytesIO()
vcs_class = mock.Mock()
translator._open_file_for_writing = mock.Mock(return_value = test_file)
diff --git a/burton/parser/test/stringstestts.py b/burton/parser/test/stringstestts.py
index 6cb8da0..dc04673 100644
--- a/burton/parser/test/stringstestts.py
+++ b/burton/parser/test/stringstestts.py
@@ -1,9 +1,10 @@
-import cStringIO
import mock
import os
import types
import unittest
+from io import StringIO
+
from burton import parser
class StringsTests(unittest.TestCase):
@@ -34,7 +35,7 @@ def test_open_file(self):
def test_extract_strings_from_filename(self):
extractor = parser.Strings()
extractor._open_file = mock.Mock(return_value = (
- cStringIO.StringIO(StringsTests.sample_strings),
+ StringIO(StringsTests.sample_strings),
"utf_8"
))
@@ -56,7 +57,7 @@ def test_extract_strings_from_filename(self):
def test_extract_mapping_from_filename(self):
extractor = parser.Strings()
extractor._open_file = mock.Mock(return_value = (
- cStringIO.StringIO(StringsTests.sample_strings),
+ StringIO(StringsTests.sample_strings),
"utf_8"
))
@@ -77,12 +78,12 @@ def test_extract_mapping_from_filename(self):
}
)
- for key, value in string_mapping.string_mapping_dict.iteritems():
- self.assertEquals(type(key), types.UnicodeType)
- self.assertEquals(type(value), types.UnicodeType)
+ for key, value in string_mapping.string_mapping_dict.items():
+ self.assertEquals(type(key), str)
+ self.assertEquals(type(value), str)
def test_write_mapping(self):
- file = cStringIO.StringIO()
+ file = StringIO()
parser.Strings().write_mapping(file, {
u'"SomeString"' : u'Translation for some string',
u'"SomeOtherString"' : u'Can\'t "quote" this!'
@@ -98,7 +99,7 @@ def test_write_mapping(self):
file.close()
def test_write_mapping_does_not_over_escape_newline(self):
- file = cStringIO.StringIO()
+ file = StringIO()
parser.Strings().write_mapping(file, {
u'"SomeString"' : u'String with a \r\n newline',
})
diff --git a/burton/parser/test/test.m b/burton/parser/test/test.m
index ef21029..d634e30 100644
--- a/burton/parser/test/test.m
+++ b/burton/parser/test/test.m
@@ -4,3 +4,14 @@
CFCopyLocalizedStringFromTable("SomeString2", "SomeTable", "");
NSLocalizedStringFromTableInBundle(@"SomeOtherString2", nil, @"ATable", "");
CFCopyLocalizedStringFromTableInBundle("YetAnotherString2", "ATable", NULL, "");
+
+// CustomFunctionLocalizedString(@"C++ Comment");
+
+/*
+Outside function
+CustomFunctionLocalizedString(@"C Comment);
+Another string outside function
+*/
+
+CustomFunctionLocalizedString(@"Custom String 1");
+OtherFunctionLocalizedString(@"Custom String 2");
\ No newline at end of file
diff --git a/burton/parser/test/teststringio.py b/burton/parser/test/teststringio.py
index dc68128..5a4b6df 100644
--- a/burton/parser/test/teststringio.py
+++ b/burton/parser/test/teststringio.py
@@ -1,11 +1,12 @@
-import StringIO
import os
-class TestStringIO(StringIO.StringIO):
+from io import StringIO
+
+class TestStringIO(StringIO):
def __init__(self, filename = None, buffer = None):
print("cwd: " + os.getcwd())
#print('file: ' + filename)
- StringIO.StringIO.__init__(self, buffer)
+ StringIO.__init__(self, buffer)
self.name = filename
def close(self):
diff --git a/burton/parser/test/utiltests.py b/burton/parser/test/utiltests.py
index b70815c..1d0524e 100644
--- a/burton/parser/test/utiltests.py
+++ b/burton/parser/test/utiltests.py
@@ -1,10 +1,11 @@
import chardet
-import cStringIO
import mock
import struct
import types
import unittest
+from io import BytesIO
+
import burton.parser
class UtilTests(unittest.TestCase):
@@ -14,13 +15,13 @@ def test_filter_string_unescapes_slashes(self):
newline_string2 = burton.parser.filter_string("New\\r\\nline")
self.assertEquals(apostrophe_string, u"There's no escape!")
- self.assertEquals(type(apostrophe_string), types.UnicodeType)
+ self.assertEquals(type(apostrophe_string), str)
self.assertEquals(newline_string, u"New\\r\\nline")
- self.assertEquals(type(newline_string), types.UnicodeType)
+ self.assertEquals(type(newline_string), str)
self.assertEquals(newline_string2, u"New\\r\\nline")
- self.assertEquals(type(newline_string2), types.UnicodeType)
+ self.assertEquals(type(newline_string2), str)
def test_replace_params(self):
self.assertEquals(
@@ -51,20 +52,20 @@ def test_restore_platform_specific_params(self):
def test_detect_encoding(self, mock_func):
mock_func.return_value = { "encoding" : "ascii" }
- test_file = cStringIO.StringIO("this is an ascii string")
+ test_file = BytesIO(b"this is an ascii string")
self.assertEquals(burton.parser.detect_encoding(test_file), "ascii")
test_file.close()
bom = struct.pack("BBB", 0xEF, 0xBB, 0xBF)
- test_file = cStringIO.StringIO(bom + "UTF-8 String")
+ test_file = BytesIO(bom + b"UTF-8 String")
self.assertEquals(burton.parser.detect_encoding(test_file), "utf_8")
bom = struct.pack("BB", 0xFE, 0xFF)
- test_file = cStringIO.StringIO(bom + "UTF-16 BE String")
+ test_file = BytesIO(bom + b"UTF-16 BE String")
self.assertEquals(burton.parser.detect_encoding(test_file), "utf_16")
bom = struct.pack("BBBB", 0x00, 0x00, 0xFE, 0xFF)
- test_file = cStringIO.StringIO(bom + "UTF-16 32 String")
+ test_file = BytesIO(bom + b"UTF-16 32 String")
self.assertEquals(burton.parser.detect_encoding(test_file), "utf_32")
def _throw_exception(file):
@@ -72,5 +73,5 @@ def _throw_exception(file):
mock_func.side_effect = _throw_exception
- test_file = cStringIO.StringIO("this is a strange string")
+ test_file = BytesIO(b"this is a strange string")
self.assertEquals(burton.parser.detect_encoding(test_file), "iso-8859-1")
diff --git a/burton/parser/util.py b/burton/parser/util.py
index 90ef09a..c0fbed7 100644
--- a/burton/parser/util.py
+++ b/burton/parser/util.py
@@ -4,11 +4,12 @@ def detect_encoding(file):
"""This function attempts to detect the character encoding of a file."""
encoding = None
- bom = tuple(map(ord, file.read(4)))
+ bom = file.read(4)
encoding = {
- ( 0x00, 0x00, 0xFE, 0xFF ) : "utf_32", #BE
- ( 0xFF, 0xFE, 0x00, 0x00 ) : "utf_32", #LE
+
+ b'\x00\x00\xfe\xff' : "utf_32", #BE
+ b'\xff\xfe\x00\x00' : "utf_32", #LE
}.get(bom, None)
if encoding is not None:
@@ -17,7 +18,7 @@ def detect_encoding(file):
bom = bom[:3]
file.seek(3)
encoding = {
- ( 0xEF, 0xBB, 0xBF ) : "utf_8",
+ b'\xef\xbb\xbf' : "utf_8",
}.get(bom, None)
if encoding is not None:
@@ -26,8 +27,8 @@ def detect_encoding(file):
bom = bom[:2]
file.seek(2)
encoding = {
- ( 0xFE, 0xFF ) : "utf_16", #BE
- ( 0xFF, 0xFE ) : "utf_16", #LE
+ b'\xfe\xff' : "utf_16", #BE
+ b'\xff\xfe' : "utf_16", #LE
}.get(bom, None)
if encoding is not None:
@@ -47,9 +48,8 @@ def detect_encoding(file):
def filter_string(string):
string = string.replace("\\r", "\\\\r").replace("\\n", "\\\\n")
string = string.replace("\r", "\\\\r").replace("\n", "\\\\n")
- return unicode(
- string.encode('utf-8').decode("string_escape"), 'utf-8'
- )
+ string = string.encode('utf-8').decode('unicode-escape')
+ return string
def replace_params(raw_string):
"""This function replaces format placeholders with incrementing numbers
@@ -68,10 +68,10 @@ def replace_params(raw_string):
printf_specifiers = "cdieEfgGosuxXpn%@"
digits = "1234657890"
- output_string = unicode("")
+ output_string = ""
replaced_strings = []
num_replacements = 0
- current_token = unicode("")
+ current_token = ""
in_printf = False
in_percent_escape = False
in_printf_flags = False
@@ -123,7 +123,7 @@ def replace_params(raw_string):
replaced_strings.append(current_token)
output_string += "{" + str(num_replacements) + "}"
num_replacements += 1
- current_token = unicode("")
+ current_token = ""
else:
output_string += current_token[:-1]
current_token = c
@@ -141,11 +141,11 @@ def replace_params(raw_string):
replaced_strings.append(current_token)
output_string += "{" + str(num_replacements) + "}"
num_replacements += 1
- current_token = unicode("")
+ current_token = ""
elif c not in digits:
in_num_param = False
output_string += current_token
- current_token = unicode("")
+ current_token = ""
if not in_printf and not in_num_param and not in_percent_escape:
if c == "%":
@@ -162,7 +162,7 @@ def replace_params(raw_string):
in_num_param = True
else:
output_string += current_token
- current_token = unicode("")
+ current_token = ""
in_percent_escape = False
@@ -178,7 +178,7 @@ def restore_platform_specific_params(string, replaced_strings):
string = string.replace("{", opening_tag)
string = string.replace("}", closing_tag)
- for index in xrange(0, len(replaced_strings)):
+ for index in range(0, len(replaced_strings)):
string = string.replace(
opening_tag + str(index) + closing_tag,
replaced_strings[index]
diff --git a/burton/run_tests.py b/burton/run_tests.py
index ffe197d..bfbf645 100644
--- a/burton/run_tests.py
+++ b/burton/run_tests.py
@@ -7,14 +7,15 @@
sh.setFormatter(logging.Formatter("[%(levelname)s] %(message)s"))
logger.addHandler(sh)
-if sys.hexversion < 0x02070000:
- logger.error("Python 2.7 or grater is required to run burton.")
+if sys.hexversion < 0x03070000:
+ logger.error("Python 3.7 or grater is required to run burton.")
exit(1)
-requirements = ["coverage", "mock", "nose", "testfixtures"]
+requirements = ["chardet", "lxml", "coverage", "mock", "nose", "testfixtures"]
try:
for requirement in requirements:
+ print("Importing " + requirement)
__import__(requirement)
except ImportError:
@@ -23,7 +24,7 @@
logger.error("Installing missing dependencies")
- args = ["/usr/bin/easy_install"]
+ args = ["pip", "install"]
args.extend(requirements)
return_code = subprocess.call(args)
diff --git a/burton/stringextractor/nib.py b/burton/stringextractor/nib.py
index 1528552..8bff9fc 100644
--- a/burton/stringextractor/nib.py
+++ b/burton/stringextractor/nib.py
@@ -21,7 +21,11 @@ def extract_strings_from_files(self, filenames):
filtered_filenames
)
- def extract_strings_from_filename(self, filename):
+ def extract_strings_from_filename(
+ self,
+ filename,
+ additional_function_names = []
+ ):
return_values = set([])
localizable_key = "com.apple.ibtool.document.localizable-strings"
diff --git a/burton/stringextractor/rc.py b/burton/stringextractor/rc.py
index e1d1794..76508fa 100644
--- a/burton/stringextractor/rc.py
+++ b/burton/stringextractor/rc.py
@@ -6,7 +6,11 @@ class RC(stringextractor.Base):
def __init__(self):
stringextractor.Base.__init__(self)
- def extract_strings_from_filename(self, filename):
+ def extract_strings_from_filename(
+ self,
+ filename,
+ additional_function_names = []
+ ):
return_values = set([])
block = "BLOCK"
diff --git a/burton/stringextractor/resx.py b/burton/stringextractor/resx.py
index d42c615..6e9fbfe 100644
--- a/burton/stringextractor/resx.py
+++ b/burton/stringextractor/resx.py
@@ -6,7 +6,11 @@ class RESX(stringextractor.Base):
def __init__(self):
stringextractor.Base.__init__(self)
- def extract_strings_from_filename(self, filename):
+ def extract_strings_from_filename(
+ self,
+ filename,
+ additional_function_names = []
+ ):
return_values = set([])
data_tag = "data"
diff --git a/burton/stringextractor/test/baseextractortests.py b/burton/stringextractor/test/baseextractortests.py
index 6e7de73..5ac2828 100644
--- a/burton/stringextractor/test/baseextractortests.py
+++ b/burton/stringextractor/test/baseextractortests.py
@@ -12,7 +12,7 @@ def test_extract_strings_from_files(self):
extractor = stringextractor.Base()
extractor.extract_strings_from_filename = mock.Mock(side_effect =
- lambda(x): individual_file_strings.pop()
+ lambda x: individual_file_strings.pop()
)
self.assertEquals(
diff --git a/burton/stringextractor/test/rcextractortests.py b/burton/stringextractor/test/rcextractortests.py
index e09a408..b87ca07 100644
--- a/burton/stringextractor/test/rcextractortests.py
+++ b/burton/stringextractor/test/rcextractortests.py
@@ -1,8 +1,9 @@
-import cStringIO
import mock
import os
import unittest
+from io import StringIO
+
import stringextractor
class RCExtractorTests(unittest.TestCase):
@@ -43,7 +44,7 @@ def test_open_file(self):
def test_extract_strings_from_filename(self):
extractor = stringextractor.RC()
extractor._open_file = mock.Mock(
- return_value = cStringIO.StringIO(RCExtractorTests.sample_resx)
+ return_value = StringIO(RCExtractorTests.sample_resx)
)
self.assertEquals(
diff --git a/burton/test/burtontests.py b/burton/test/burtontests.py
index 77ba3ac..16e6377 100644
--- a/burton/test/burtontests.py
+++ b/burton/test/burtontests.py
@@ -18,17 +18,19 @@ def __init__(self):
def extract_strings_from_files(
self,
filenames,
- strings_to_ignore = []
+ strings_to_ignore = [],
+ additional_function_names = []
):
- return self.extract_string_mapping_from_files(
+ return list(self.extract_string_mapping_from_files(
filenames,
strings_to_ignore
- ).get_string_mapping_dict().keys()
+ ).get_string_mapping_dict().keys())
def extract_string_mapping_from_files(
self,
filenames,
- strings_to_ignore = []
+ strings_to_ignore = [],
+ additional_function_names = []
):
return_mapping = burton.StringMapping()
@@ -58,69 +60,6 @@ def __init__(self):
class BurtonTests(unittest.TestCase):
- @mock.patch.object(logging, "getLogger")
- def test_setup_default_logger(self, log_func):
- handlers = []
- def _add_handler(handler):
- handlers.append(handler)
-
- logger = mock.Mock()
- logger.addHandler.side_effect = _add_handler
- log_func.return_value = logger
-
- burton.setup_default_logger()
-
- self.assertEquals(len(handlers), 2)
- self.assertEquals(
- type(handlers[0]),
- type(logging.StreamHandler(sys.stdout))
- )
- self.assertEquals(
- type(handlers[1]),
- type(burton.BurtonLoggingHandler())
- )
-
- @mock.patch.object(logging, "getLogger")
- def test_config_logger(self, log_func):
- handlers = []
- def _add_handler(handler):
- handlers.append(handler)
-
- logger = mock.Mock()
- logger.addHandler.side_effect = _add_handler
- log_func.return_value = logger
-
- sample_filename = "burton_test_file"
- config_dict = {
- burton.Config.logging_level : "info",
- burton.Config.log_filename : "None",
- }
-
- def _config_get(key):
- return config_dict[key]
-
- conf = mock.Mock()
- conf.get.side_effect = _config_get
-
- logging_levels = {
- "debug" : logging.DEBUG,
- "info" : logging.INFO,
- "warning" : logging.WARNING,
- "error" : logging.ERROR,
- "critical" : logging.CRITICAL
- }
-
- num_times_called = 0
- for logging_level in logging_levels:
- config_dict[burton.Config.logging_level] = logging_level
-
- burton.config_logger(conf)
- logger.setLevel.assert_called_with(logging_levels[logging_level])
- num_times_called += 1
-
- self.assertEquals(num_times_called, 5)
- self.assertEquals(len(handlers), 0)
-
def test_class_from_string(self):
self.assertEquals(burton._class_from_string(None), None)
self.assertEquals(burton._class_from_string("burton.FakeClass"), None)
@@ -349,7 +288,7 @@ def test_check_for_unmapped_strings(self):
captured_log.uninstall()
- @mock.patch("__builtin__.open")
+ @mock.patch("builtins.open")
@mock.patch.object(burton, "_open_translation_file_for_language")
def test_update_translation_file(
self,
@@ -406,7 +345,7 @@ def test_update_translation_file(
captured_log.uninstall()
- @mock.patch("__builtin__.open")
+ @mock.patch("builtins.open")
@mock.patch.object(burton, "_open_translation_file_for_language")
def test_update_translation_file_ignores_whitespace_entries(
self,
@@ -539,7 +478,7 @@ def test_get_localized_resource_instance(self):
type(TestRCParser())
)
- @mock.patch("__builtin__.open")
+ @mock.patch("builtins.open")
@mock.patch.object(os.path, "exists")
def test_open_translation_file_for_language(self, exists_func, open_func):
exists_func.return_value = False
@@ -625,7 +564,7 @@ def _config_get(key):
os.path.abspath(xlf_repo_path)
)
- @mock.patch("__builtin__.exit")
+ @mock.patch("builtins.exit")
@mock.patch.object(os, "chdir")
@mock.patch.object(os.path, "isdir")
@mock.patch.object(burton, "_create_config_instance")
@@ -825,7 +764,7 @@ def _throw_exception(conf, native_translations, vcs_class):
ran_all_tests = True
except Exception as e:
- print e
+ print(e)
self.assertFalse(True)
finally:
if os.path.exists(test_db_name):
@@ -833,7 +772,7 @@ def _throw_exception(conf, native_translations, vcs_class):
self.assertTrue(ran_all_tests)
- @mock.patch("__builtin__.exit")
+ @mock.patch("builtins.exit")
@mock.patch.object(burton, "_create_config_instance")
def test_run_fails_if_there_are_no_platforms_in_config_file(
self,
@@ -860,24 +799,7 @@ def test_run_fails_if_there_are_no_platforms_in_config_file(
exit_func.assert_called_with(1)
- @mock.patch.object(logging, "FileHandler")
- def test_log_filename(self, logging_constructor):
- config_dict = {
- burton.Config.logging_level : "info",
- burton.Config.log_filename : "some_filename.log"
- }
-
- def _config_get(key):
- return config_dict[key]
-
- conf = mock.Mock()
- conf.get.side_effect = _config_get
-
- burton.config_logger(conf)
-
- logging_constructor.assert_called_with("some_filename.log")
-
- @mock.patch("__builtin__.exit")
+ @mock.patch("builtins.exit")
@mock.patch.object(burton, "_create_config_instance")
def test_exits_if_command_line_arguments_cannot_be_parsed(
self,
@@ -904,7 +826,7 @@ def test_exits_if_command_line_arguments_cannot_be_parsed(
exit_func.assert_called_with(1)
- @mock.patch("__builtin__.exit")
+ @mock.patch("builtins.exit")
@mock.patch.object(burton, "_create_config_instance")
def test_exits_if_cannot_read_platforms_from_config(
self,
diff --git a/burton/test/configtests.py b/burton/test/configtests.py
index af2bc52..9148f03 100644
--- a/burton/test/configtests.py
+++ b/burton/test/configtests.py
@@ -1,6 +1,5 @@
import codecs
import collections
-import cStringIO
import logging
import mock
import os
@@ -8,6 +7,8 @@
import testfixtures
import unittest
+from io import StringIO
+
import burton
class ConfigTests(unittest.TestCase):
@@ -17,7 +18,7 @@ def test_set_and_get(self):
self.assertEquals(c.get("a_param"), 1)
def test_overrides_default_values_with_platform_specific_values(self):
- config_fp = cStringIO.StringIO("""
+ config_fp = StringIO("""
[DEFAULT]
default_param = 0
overidden_param = 0
@@ -37,7 +38,7 @@ def test_overrides_default_values_with_platform_specific_values(self):
self.assertEquals(c.get("overidden_param"), 1)
def test_returns_false_if_missing_required_variable(self):
- config_fp = cStringIO.StringIO("""
+ config_fp = StringIO("""
[DEFAULT]
overidden_param = 0
@@ -57,7 +58,7 @@ def test_returns_false_if_missing_required_variable(self):
captured_log.uninstall()
def test_returns_false_if_config_file_contains_unknown_variable(self):
- config_fp = cStringIO.StringIO("""
+ config_fp = StringIO("""
[DEFAULT]
default_param = 0
overidden_param = 0
@@ -79,7 +80,7 @@ def test_returns_false_if_config_file_contains_unknown_variable(self):
captured_log.uninstall()
def test_returns_false_if_config_file_does_not_contain_platform(self):
- config_fp = cStringIO.StringIO("""
+ config_fp = StringIO("""
[DEFAULT]
default_param = 0
overidden_param = 0
@@ -100,7 +101,7 @@ def test_returns_false_if_config_file_does_not_contain_platform(self):
captured_log.uninstall()
def test_parses_json_values(self):
- config_fp = cStringIO.StringIO("""
+ config_fp = StringIO("""
[DEFAULT]
default_param = [ "1", 2, "three" ]
overidden_param = 0
@@ -119,7 +120,7 @@ def test_parses_json_values(self):
)
def test_calls_custom_methods_for_specified_keys(self):
- config_fp = cStringIO.StringIO("""
+ config_fp = StringIO("""
[DEFAULT]
default_param = 0
overidden_param = 0
@@ -145,7 +146,7 @@ def test_calls_custom_methods_for_specified_keys(self):
target.custom_function.assert_called_with(0)
def test_creates_regexes_from_file_extensions(self):
- config_fp = cStringIO.StringIO("""
+ config_fp = StringIO("""
[DEFAULT]
extensions_to_parse = [ ]
@@ -157,15 +158,12 @@ def test_creates_regexes_from_file_extensions(self):
self.assertTrue(c.readfp(config_fp, "platform1"))
self.assertEquals(
- map(
- lambda(regex): regex.pattern,
- c.get(burton.Config.extensions_to_parse)
- ),
+ [regex.pattern for regex in c.get(burton.Config.extensions_to_parse)],
[ ".*\.resx$", ".*\.nib$" ]
)
def test_creates_regexes_from_disallowed_paths(self):
- config_fp = cStringIO.StringIO("""
+ config_fp = StringIO("""
[DEFAULT]
disallowed_paths = [ ]
@@ -177,15 +175,12 @@ def test_creates_regexes_from_disallowed_paths(self):
self.assertTrue(c.readfp(config_fp, "platform1"))
self.assertEquals(
- map(
- lambda(regex): regex.pattern,
- c.get(burton.Config.disallowed_paths)
- ),
+ [regex.pattern for regex in c.get(burton.Config.disallowed_paths)],
[ "Shared Code/cpp_core/output", "build" ]
)
def test_creates_regexes_from_mapping_files(self):
- config_fp = cStringIO.StringIO("""
+ config_fp = StringIO("""
[DEFAULT]
mapping_files = [ ]
@@ -197,10 +192,7 @@ def test_creates_regexes_from_mapping_files(self):
self.assertTrue(c.readfp(config_fp, "platform1"))
self.assertEquals(
- map(
- lambda(regex): regex.pattern,
- c.get(burton.Config.mapping_files)
- ),
+ [regex.pattern for regex in c.get(burton.Config.mapping_files)],
[ "\\.strings$", "\\.rc$", "\\.resx$" ]
)
@@ -278,7 +270,7 @@ def test_parse_command_line_options(self):
(
burton.logger_name,
"ERROR",
- "\n\t".join(c._command_line_mapping.keys())
+ "\n\t".join(list(c._command_line_mapping.keys()))
)
)
@@ -351,7 +343,7 @@ def test_returns_false_if_missing_command_line_arguments(self):
captured_log.uninstall()
def test_uses_defaults(self):
- config_fp = cStringIO.StringIO("""
+ config_fp = StringIO("""
[DEFAULT]
[platform1]
""".replace(" ", ""))
@@ -374,7 +366,7 @@ def test_uses_defaults(self):
@mock.patch.object(os.path, "exists")
def test_parse_config_file(self, mock_path_exists_func):
- config_fp = cStringIO.StringIO("""
+ config_fp = StringIO("""
[DEFAULT]
[platform1]
""".replace(" ", ""))
@@ -415,7 +407,7 @@ def test_parse_config_file_creates_new_file_when_necessary(
def test_create_new_config_file(self):
lines = []
def _write(line):
- lines.append(line)
+ lines.append(line.decode())
write_fp = mock.Mock()
write_fp.write = mock.Mock(side_effect = _write)
@@ -441,7 +433,7 @@ def _write(line):
captured_log.uninstall()
self.assertEquals(
- "".join(lines),
+ str.encode("".join(lines)),
c._get_default_config_file().read()
)
@@ -460,7 +452,7 @@ def _config_get(key):
conf.get = mock.Mock(side_effect = _config_get)
mock_path_exists_func.return_value = True
- open_func.return_value = cStringIO.StringIO("""ignore1
+ open_func.return_value = StringIO("""ignore1
ignore2
ignore3""")
@@ -492,14 +484,14 @@ def test_parse_value(self):
finally:
self.assertTrue(test_passed)
- @mock.patch("__builtin__.open")
+ @mock.patch("builtins.open")
def test_open_for_reading(self, open_func):
c = burton.Config()
c._open_for_reading("filename")
open_func.assert_called_with("filename", "r")
- @mock.patch("__builtin__.open")
+ @mock.patch("builtins.open")
def test_open_for_writing(self, open_func):
c = burton.Config()
c._open_for_writing("filename")
@@ -514,7 +506,7 @@ def test_root_path_defaults_to_cwd(self):
@mock.patch.object(os.path, "exists")
def test_parse_config_file_for_next_platform(self, mock_path_exists_func):
def _open_file(filename):
- config_fp = cStringIO.StringIO("""
+ config_fp = StringIO("""
[DEFAULT]
default_param = 0
overidden_param = 0
diff --git a/burton/translation/__init__.py b/burton/translation/__init__.py
index a0e16ae..61465a0 100644
--- a/burton/translation/__init__.py
+++ b/burton/translation/__init__.py
@@ -1,2 +1,2 @@
-from base import Base
-from xlf import XLF
+from .base import Base
+from .xlf import XLF
diff --git a/burton/translation/base.py b/burton/translation/base.py
index ea67152..ea25e10 100644
--- a/burton/translation/base.py
+++ b/burton/translation/base.py
@@ -57,8 +57,9 @@ def get_translation_dict(self):
this object. The return value can be freely modified without affecting
the contents of this object"""
copied_dict = self._translation_dict.copy()
+ keys = list(copied_dict.keys())
- for key in copied_dict.keys():
+ for key in keys:
if not key.endswith(ellipsis) and not key.endswith(three_dots):
mac_key = key + ellipsis
win_key = key + three_dots
diff --git a/burton/translation/test/xlftests.py b/burton/translation/test/xlftests.py
index 35749e2..6452f0b 100644
--- a/burton/translation/test/xlftests.py
+++ b/burton/translation/test/xlftests.py
@@ -1,11 +1,12 @@
-import cStringIO
import random
import unittest
+from io import BytesIO
+
from burton import translation
class XLFTests(unittest.TestCase):
- test_xlf = """
+ test_xlf = str.encode("""
@@ -28,9 +29,9 @@ class XLFTests(unittest.TestCase):