Pier Angelo Vendrame pushed to branch base-browser-128.6.0esr-14.5-1 at The Tor Project / Applications / Tor Browser
Commits:
-
5f316b25
by Henry Wilkes at 2025-01-22T12:13:24+01:00
-
9fc48603
by Henry Wilkes at 2025-01-22T12:13:34+01:00
11 changed files:
- .gitlab-ci.yml
- + .gitlab/ci/jobs/update-translations.yml
- + tools/base-browser/l10n/combine-translation-versions.py
- + tools/base-browser/l10n/combine/__init__.py
- + tools/base-browser/l10n/combine/combine.py
- + tools/base-browser/l10n/combine/tests/README
- + tools/base-browser/l10n/combine/tests/__init__.py
- + tools/base-browser/l10n/combine/tests/test_android.py
- + tools/base-browser/l10n/combine/tests/test_dtd.py
- + tools/base-browser/l10n/combine/tests/test_fluent.py
- + tools/base-browser/l10n/combine/tests/test_properties.py
Changes:
1 | 1 | stages:
|
2 | 2 | - lint
|
3 | + - update-translations
|
|
3 | 4 | |
4 | 5 | variables:
|
5 | 6 | IMAGE_PATH: containers.torproject.org/tpo/applications/tor-browser/base:latest
|
1 | +.update-translation-base:
|
|
2 | + stage: update-translations
|
|
3 | + rules:
|
|
4 | + - if: ($TRANSLATION_FILES != "" && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PIPELINE_SOURCE == "push")
|
|
5 | + changes:
|
|
6 | + - "**/*.ftl"
|
|
7 | + - "**/*.properties"
|
|
8 | + - "**/*.dtd"
|
|
9 | + - "**/*strings.xml"
|
|
10 | + - "**/update-translations.yml"
|
|
11 | + - "**/l10n/combine/combine.py"
|
|
12 | + - "**/l10n/combine-translation-versions.py"
|
|
13 | + - if: ($TRANSLATION_FILES != "" && $FORCE_UPDATE_TRANSLATIONS == "true")
|
|
14 | + variables:
|
|
15 | + COMBINED_FILES_JSON: "combined-translation-files.json"
|
|
16 | + TRANSLATION_FILES: ''
|
|
17 | + |
|
18 | + |
|
19 | +combine-en-US-translations:
|
|
20 | + extends: .update-translation-base
|
|
21 | + needs: []
|
|
22 | + image: python
|
|
23 | + variables:
|
|
24 | + PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
|
|
25 | + cache:
|
|
26 | + paths:
|
|
27 | + - .cache/pip
|
|
28 | + # Artifact is for translation project job
|
|
29 | + artifacts:
|
|
30 | + paths:
|
|
31 | + - "$COMBINED_FILES_JSON"
|
|
32 | + expire_in: "60 min"
|
|
33 | + reports:
|
|
34 | + dotenv: job_id.env
|
|
35 | + # Don't load artifacts for this job.
|
|
36 | + dependencies: []
|
|
37 | + script:
|
|
38 | + # Save this CI_JOB_ID to the dotenv file to be used in the variables for the
|
|
39 | + # push-en-US-translations job.
|
|
40 | + - echo 'COMBINE_TRANSLATIONS_JOB_ID='"$CI_JOB_ID" >job_id.env
|
|
41 | + - pip install compare_locales
|
|
42 | + - python ./tools/base-browser/l10n/combine-translation-versions.py "$CI_COMMIT_BRANCH" "$TRANSLATION_FILES" "$COMBINED_FILES_JSON"
|
|
43 | + |
|
44 | +push-en-US-translations:
|
|
45 | + extends: .update-translation-base
|
|
46 | + needs:
|
|
47 | + - job: combine-en-US-translations
|
|
48 | + variables:
|
|
49 | + COMBINED_FILES_JSON_URL: "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/jobs/${COMBINE_TRANSLATIONS_JOB_ID}/artifacts/${COMBINED_FILES_JSON}"
|
|
50 | + trigger:
|
|
51 | + strategy: depend
|
|
52 | + project: tor-browser-translation-bot/translation
|
|
53 | + branch: tor-browser-ci |
1 | +import argparse
|
|
2 | +import json
|
|
3 | +import logging
|
|
4 | +import os
|
|
5 | +import re
|
|
6 | +import subprocess
|
|
7 | + |
|
8 | +from combine import combine_files
|
|
9 | + |
|
10 | +arg_parser = argparse.ArgumentParser(
|
|
11 | + description="Combine a translation file across two different versions"
|
|
12 | +)
|
|
13 | + |
|
14 | +arg_parser.add_argument(
|
|
15 | + "current_branch", metavar="<current-branch>", help="branch for the newest version"
|
|
16 | +)
|
|
17 | +arg_parser.add_argument(
|
|
18 | + "files", metavar="<files>", help="JSON specifying the translation files"
|
|
19 | +)
|
|
20 | +arg_parser.add_argument("outname", metavar="<json>", help="name of the json output")
|
|
21 | + |
|
22 | +args = arg_parser.parse_args()
|
|
23 | + |
|
24 | +logging.basicConfig()
|
|
25 | +logger = logging.getLogger("combine-translation-versions")
|
|
26 | +logger.setLevel(logging.INFO)
|
|
27 | + |
|
28 | + |
|
29 | +def in_pink(msg: str) -> str:
|
|
30 | + """Present a message as pink in the terminal output.
|
|
31 | + |
|
32 | + :param msg: The message to wrap in pink.
|
|
33 | + :returns: The message to print to terminal.
|
|
34 | + """
|
|
35 | + # Pink and bold.
|
|
36 | + return f"\x1b[1;38;5;212m{msg}\x1b[0m"
|
|
37 | + |
|
38 | + |
|
39 | +def git_run(git_args: list[str]) -> None:
|
|
40 | + """Run a git command.
|
|
41 | + |
|
42 | + :param git_args: The arguments that should follow "git".
|
|
43 | + """
|
|
44 | + # Add some text to give context to git's stderr appearing in log.
|
|
45 | + logger.info("Running: " + in_pink("git " + " ".join(git_args)))
|
|
46 | + subprocess.run(["git", *git_args], check=True)
|
|
47 | + |
|
48 | + |
|
49 | +def git_text(git_args: list[str]) -> str:
|
|
50 | + """Get the text output for a git command.
|
|
51 | + |
|
52 | + :param git_args: The arguments that should follow "git".
|
|
53 | + :returns: The stdout of the command.
|
|
54 | + """
|
|
55 | + logger.info("Running: " + in_pink("git " + " ".join(git_args)))
|
|
56 | + return subprocess.run(
|
|
57 | + ["git", *git_args], text=True, check=True, stdout=subprocess.PIPE
|
|
58 | + ).stdout
|
|
59 | + |
|
60 | + |
|
61 | +def git_lines(git_args: list[str]) -> list[str]:
|
|
62 | + """Get the lines from a git command.
|
|
63 | + |
|
64 | + :param git_args: The arguments that should follow "git".
|
|
65 | + :returns: The non-empty lines from stdout of the command.
|
|
66 | + """
|
|
67 | + return [line for line in git_text(git_args).split("\n") if line]
|
|
68 | + |
|
69 | + |
|
70 | +class TranslationFile:
|
|
71 | + """Represents a translation file."""
|
|
72 | + |
|
73 | + def __init__(self, path: str, content: str) -> None:
|
|
74 | + self.path = path
|
|
75 | + self.content = content
|
|
76 | + |
|
77 | + |
|
78 | +class BrowserBranch:
|
|
79 | + """Represents a browser git branch."""
|
|
80 | + |
|
81 | + def __init__(self, branch_name: str, is_head: bool = False) -> None:
|
|
82 | + """Create a new instance.
|
|
83 | + |
|
84 | + :param branch_name: The branch's git name.
|
|
85 | + :param is_head: Whether the branch matches "HEAD".
|
|
86 | + """
|
|
87 | + version_match = re.match(
|
|
88 | + r"(?P<prefix>[a-z]+\-browser)\-"
|
|
89 | + r"(?P<firefox>[0-9]+(?:\.[0-9]+){1,2})esr\-"
|
|
90 | + r"(?P<browser>[0-9]+\.[05])\-"
|
|
91 | + r"(?P<number>[0-9]+)$",
|
|
92 | + branch_name,
|
|
93 | + )
|
|
94 | + |
|
95 | + if not version_match:
|
|
96 | + raise ValueError(f"Unable to parse the version from the ref {branch_name}")
|
|
97 | + |
|
98 | + self.name = branch_name
|
|
99 | + self.prefix = version_match.group("prefix")
|
|
100 | + self.browser_version = version_match.group("browser")
|
|
101 | + self._is_head = is_head
|
|
102 | + self._ref = "HEAD" if is_head else f"origin/{branch_name}"
|
|
103 | + |
|
104 | + firefox_nums = [int(n) for n in version_match.group("firefox").split(".")]
|
|
105 | + if len(firefox_nums) == 2:
|
|
106 | + firefox_nums.append(0)
|
|
107 | + browser_nums = [int(n) for n in self.browser_version.split(".")]
|
|
108 | + branch_number = int(version_match.group("number"))
|
|
109 | + # Prioritise the firefox ESR version, then the browser version then the
|
|
110 | + # branch number.
|
|
111 | + self._ordered = (
|
|
112 | + firefox_nums[0],
|
|
113 | + firefox_nums[1],
|
|
114 | + firefox_nums[2],
|
|
115 | + browser_nums[0],
|
|
116 | + browser_nums[1],
|
|
117 | + branch_number,
|
|
118 | + )
|
|
119 | + |
|
120 | + # Minor version for browser is only ever "0" or "5", so we can convert
|
|
121 | + # the version to an integer.
|
|
122 | + self._browser_int_version = int(2 * float(self.browser_version))
|
|
123 | + |
|
124 | + self._file_paths: list[str] | None = None
|
|
125 | + |
|
126 | + def release_below(self, other: "BrowserBranch", num: int) -> bool:
|
|
127 | + """Determine whether another branch is within range of a previous
|
|
128 | + browser release.
|
|
129 | + |
|
130 | + The browser versions are expected to increment by "0.5", and a previous
|
|
131 | + release branch's version is expected to be `num * 0.5` behind the
|
|
132 | + current one.
|
|
133 | + |
|
134 | + :param other: The branch to compare.
|
|
135 | + :param num: The number of "0.5" releases behind to test with.
|
|
136 | + """
|
|
137 | + return other._browser_int_version == self._browser_int_version - num
|
|
138 | + |
|
139 | + def __lt__(self, other: "BrowserBranch") -> bool:
|
|
140 | + return self._ordered < other._ordered
|
|
141 | + |
|
142 | + def __gt__(self, other: "BrowserBranch") -> bool:
|
|
143 | + return self._ordered > other._ordered
|
|
144 | + |
|
145 | + def _matching_dirs(self, path: str, dir_list: list[str]) -> bool:
|
|
146 | + """Test that a path is contained in the list of dirs.
|
|
147 | + |
|
148 | + :param path: The path to check.
|
|
149 | + :param dir_list: The list of directories to check against.
|
|
150 | + :returns: Whether the path matches.
|
|
151 | + """
|
|
152 | + for dir_path in dir_list:
|
|
153 | + if os.path.commonpath([dir_path, path]) == dir_path:
|
|
154 | + return True
|
|
155 | + return False
|
|
156 | + |
|
157 | + def get_file(
|
|
158 | + self, filename: str, search_dirs: list[str] | None
|
|
159 | + ) -> TranslationFile | None:
|
|
160 | + """Fetch the file content for the named file in this branch.
|
|
161 | + |
|
162 | + :param filename: The name of the file to fetch the content for.
|
|
163 | + :param search_dirs: The directories to restrict the search to, or None
|
|
164 | + to search for the file anywhere.
|
|
165 | + :returns: The file, or `None` if no file could be found.
|
|
166 | + """
|
|
167 | + if self._file_paths is None:
|
|
168 | + if not self._is_head:
|
|
169 | + # Minimal fetch of non-HEAD branch to get the file paths.
|
|
170 | + # Individual file blobs will be downloaded as needed.
|
|
171 | + git_run(
|
|
172 | + ["fetch", "--depth=1", "--filter=blob:none", "origin", self.name]
|
|
173 | + )
|
|
174 | + self._file_paths = git_lines(
|
|
175 | + ["ls-tree", "-r", "--format=%(path)", self._ref]
|
|
176 | + )
|
|
177 | + |
|
178 | + matching = [
|
|
179 | + path
|
|
180 | + for path in self._file_paths
|
|
181 | + if os.path.basename(path) == filename
|
|
182 | + and (search_dirs is None or self._matching_dirs(path, search_dirs))
|
|
183 | + ]
|
|
184 | + if not matching:
|
|
185 | + return None
|
|
186 | + if len(matching) > 1:
|
|
187 | + raise Exception(f"Multiple occurrences of {filename}")
|
|
188 | + |
|
189 | + path = matching[0]
|
|
190 | + |
|
191 | + return TranslationFile(
|
|
192 | + path=path, content=git_text(["cat-file", "blob", f"{self._ref}:{path}"])
|
|
193 | + )
|
|
194 | + |
|
195 | + |
|
196 | +def get_stable_branch(
|
|
197 | + compare_version: BrowserBranch,
|
|
198 | +) -> tuple[BrowserBranch, BrowserBranch | None]:
|
|
199 | + """Find the most recent stable branch in the origin repository.
|
|
200 | + |
|
201 | + :param compare_version: The development branch to compare against.
|
|
202 | + :returns: The stable and legacy branches. If no legacy branch is found,
|
|
203 | + `None` will be returned instead.
|
|
204 | + """
|
|
205 | + # We search for build1 tags. These are added *after* the rebase of browser
|
|
206 | + # commits, so the corresponding branch should contain our strings.
|
|
207 | + # Moreover, we *assume* that the branch with the most recent ESR version
|
|
208 | + # with such a tag will be used in the *next* stable build in
|
|
209 | + # tor-browser-build.
|
|
210 | + tag_glob = f"{compare_version.prefix}-*-build1"
|
|
211 | + |
|
212 | + # To speed up, only fetch the tags without blobs.
|
|
213 | + git_run(
|
|
214 | + ["fetch", "--depth=1", "--filter=object:type=tag", "origin", "tag", tag_glob]
|
|
215 | + )
|
|
216 | + stable_branches = []
|
|
217 | + legacy_branches = []
|
|
218 | + stable_annotation_regex = re.compile(r"\bstable\b")
|
|
219 | + legacy_annotation_regex = re.compile(r"\blegacy\b")
|
|
220 | + tag_pattern = re.compile(
|
|
221 | + rf"^{re.escape(compare_version.prefix)}-[^-]+esr-[^-]+-[^-]+-build1$"
|
|
222 | + )
|
|
223 | + |
|
224 | + for build_tag, annotation in (
|
|
225 | + line.split(" ", 1) for line in git_lines(["tag", "-n1", "--list", tag_glob])
|
|
226 | + ):
|
|
227 | + if not tag_pattern.match(build_tag):
|
|
228 | + continue
|
|
229 | + is_stable = bool(stable_annotation_regex.search(annotation))
|
|
230 | + is_legacy = bool(legacy_annotation_regex.search(annotation))
|
|
231 | + if not is_stable and not is_legacy:
|
|
232 | + continue
|
|
233 | + try:
|
|
234 | + # Branch name is the same as the tag, minus "-build1".
|
|
235 | + branch = BrowserBranch(re.sub(r"-build1$", "", build_tag))
|
|
236 | + except ValueError:
|
|
237 | + logger.warning(f"Could not read the version for {build_tag}")
|
|
238 | + continue
|
|
239 | + if branch.prefix != compare_version.prefix:
|
|
240 | + continue
|
|
241 | + if is_stable:
|
|
242 | + # Stable can be one release version behind.
|
|
243 | + # NOTE: In principle, when switching between versions there may be a
|
|
244 | + # window of time where the development branch has not yet progressed
|
|
245 | + # to the next "0.5" release, so has the same browser version as the
|
|
246 | + # stable branch. So we also allow for matching browser versions.
|
|
247 | + # NOTE:
|
|
248 | + # 1. The "Will be unused in" message will not make sense, but we do
|
|
249 | + # not expect string differences in this scenario.
|
|
250 | + # 2. We do not expect this scenario to last for long.
|
|
251 | + if not (
|
|
252 | + compare_version.release_below(branch, 1)
|
|
253 | + or compare_version.release_below(branch, 0)
|
|
254 | + ):
|
|
255 | + continue
|
|
256 | + stable_branches.append(branch)
|
|
257 | + elif is_legacy:
|
|
258 | + # Legacy can be two release versions behind.
|
|
259 | + # We also allow for being just one version behind.
|
|
260 | + if not (
|
|
261 | + compare_version.release_below(branch, 2)
|
|
262 | + or compare_version.release_below(branch, 1)
|
|
263 | + ):
|
|
264 | + continue
|
|
265 | + legacy_branches.append(branch)
|
|
266 | + |
|
267 | + if not stable_branches:
|
|
268 | + raise Exception("No stable build1 branch found")
|
|
269 | + |
|
270 | + return (
|
|
271 | + # Return the stable branch with the highest version.
|
|
272 | + max(stable_branches),
|
|
273 | + max(legacy_branches) if legacy_branches else None,
|
|
274 | + )
|
|
275 | + |
|
276 | + |
|
277 | +current_branch = BrowserBranch(args.current_branch, is_head=True)
|
|
278 | + |
|
279 | +stable_branch, legacy_branch = get_stable_branch(current_branch)
|
|
280 | + |
|
281 | +if os.environ.get("TRANSLATION_INCLUDE_LEGACY", "") != "true":
|
|
282 | + legacy_branch = None
|
|
283 | + |
|
284 | +files_list = []
|
|
285 | + |
|
286 | +for file_dict in json.loads(args.files):
|
|
287 | + name = file_dict["name"]
|
|
288 | + where_dirs = file_dict.get("where", None)
|
|
289 | + current_file = current_branch.get_file(name, where_dirs)
|
|
290 | + stable_file = stable_branch.get_file(name, where_dirs)
|
|
291 | + |
|
292 | + if current_file is None and stable_file is None:
|
|
293 | + # No file in either branch.
|
|
294 | + logger.warning(f"{name} does not exist in either the current or stable branch")
|
|
295 | + elif current_file is None:
|
|
296 | + logger.warning(f"{name} deleted in the current branch")
|
|
297 | + elif stable_file is None:
|
|
298 | + logger.warning(f"{name} does not exist in the stable branch")
|
|
299 | + elif current_file.path != stable_file.path:
|
|
300 | + logger.warning(
|
|
301 | + f"{name} has different paths in the current and stable branch. "
|
|
302 | + f"{current_file.path} : {stable_file.path}"
|
|
303 | + )
|
|
304 | + |
|
305 | + content = combine_files(
|
|
306 | + name,
|
|
307 | + None if current_file is None else current_file.content,
|
|
308 | + None if stable_file is None else stable_file.content,
|
|
309 | + f"Will be unused in Tor Browser {current_branch.browser_version}!",
|
|
310 | + )
|
|
311 | + |
|
312 | + if legacy_branch and not file_dict.get("exclude-legacy", False):
|
|
313 | + legacy_file = legacy_branch.get_file(name, where_dirs)
|
|
314 | + if legacy_file is not None and current_file is None and stable_file is None:
|
|
315 | + logger.warning(f"{name} still exists in the legacy branch")
|
|
316 | + elif legacy_file is None:
|
|
317 | + logger.warning(f"{name} does not exist in the legacy branch")
|
|
318 | + elif stable_file is not None and legacy_file.path != stable_file.path:
|
|
319 | + logger.warning(
|
|
320 | + f"{name} has different paths in the stable and legacy branch. "
|
|
321 | + f"{stable_file.path} : {legacy_file.path}"
|
|
322 | + )
|
|
323 | + elif current_file is not None and legacy_file.path != current_file.path:
|
|
324 | + logger.warning(
|
|
325 | + f"{name} has different paths in the current and legacy branch. "
|
|
326 | + f"{current_file.path} : {legacy_file.path}"
|
|
327 | + )
|
|
328 | + |
|
329 | + content = combine_files(
|
|
330 | + name,
|
|
331 | + content,
|
|
332 | + legacy_file.content,
|
|
333 | + f"Unused in Tor Browser {stable_branch.browser_version}!",
|
|
334 | + )
|
|
335 | + elif legacy_branch:
|
|
336 | + logger.info(f"Excluding legacy branch for {name}")
|
|
337 | + |
|
338 | + files_list.append(
|
|
339 | + {
|
|
340 | + "name": name,
|
|
341 | + # If "directory" is unspecified, we place the file directly beneath
|
|
342 | + # en-US/ in the translation repository. i.e. "".
|
|
343 | + "directory": file_dict.get("directory", ""),
|
|
344 | + "branch": file_dict["branch"],
|
|
345 | + "content": content,
|
|
346 | + }
|
|
347 | + )
|
|
348 | + |
|
349 | + |
|
350 | +ci_commit = os.environ.get("CI_COMMIT_SHA", "")
|
|
351 | +ci_url_base = os.environ.get("CI_PROJECT_URL", "")
|
|
352 | + |
|
353 | +json_data = {
|
|
354 | + "commit": ci_commit,
|
|
355 | + "commit-url": f"{ci_url_base}/-/commit/{ci_commit}"
|
|
356 | + if (ci_commit and ci_url_base)
|
|
357 | + else "",
|
|
358 | + "project-path": os.environ.get("CI_PROJECT_PATH", ""),
|
|
359 | + "current-branch": current_branch.name,
|
|
360 | + "stable-branch": stable_branch.name,
|
|
361 | + "files": files_list,
|
|
362 | +}
|
|
363 | + |
|
364 | +if legacy_branch:
|
|
365 | + json_data["legacy-branch"] = legacy_branch.name
|
|
366 | + |
|
367 | +with open(args.outname, "w") as file:
|
|
368 | + json.dump(json_data, file) |
1 | +# flake8: noqa
|
|
2 | + |
|
3 | +from .combine import combine_files |
1 | +import re
|
|
2 | +from typing import TYPE_CHECKING, Any
|
|
3 | + |
|
4 | +from compare_locales.parser import getParser
|
|
5 | +from compare_locales.parser.android import AndroidEntity, DocumentWrapper
|
|
6 | +from compare_locales.parser.base import Comment, Entity, Junk, Whitespace
|
|
7 | +from compare_locales.parser.dtd import DTDEntity
|
|
8 | +from compare_locales.parser.fluent import FluentComment, FluentEntity
|
|
9 | +from compare_locales.parser.properties import PropertiesEntity
|
|
10 | + |
|
11 | +if TYPE_CHECKING:
|
|
12 | + from collections.abc import Iterable
|
|
13 | + |
|
14 | + |
|
15 | +def combine_files(
|
|
16 | + filename: str,
|
|
17 | + new_content: str | None,
|
|
18 | + old_content: str | None,
|
|
19 | + comment_prefix: str,
|
|
20 | +) -> str | None:
|
|
21 | + """Combine two translation files into one to include all strings from both.
|
|
22 | + The new content is presented first, and any strings only found in the old
|
|
23 | + content are placed at the end with an additional comment.
|
|
24 | + |
|
25 | + :param filename: The filename for the file, determines the format.
|
|
26 | + :param new_content: The new content for the file, or None if it has been
|
|
27 | + deleted.
|
|
28 | + :param old_content: The old content for the file, or None if it did not
|
|
29 | + exist before.
|
|
30 | + :comment_prefix: A comment to include for any strings that are only found in
|
|
31 | + the old content. This will be placed before any other comments for the
|
|
32 | + string.
|
|
33 | + |
|
34 | + :returns: The combined content, or None if both given contents are None.
|
|
35 | + """
|
|
36 | + if new_content is None and old_content is None:
|
|
37 | + return None
|
|
38 | + |
|
39 | + # getParser from compare_locale returns the same instance for the same file
|
|
40 | + # extension.
|
|
41 | + parser = getParser(filename)
|
|
42 | + |
|
43 | + is_android = filename.endswith(".xml")
|
|
44 | + if new_content is None:
|
|
45 | + if is_android:
|
|
46 | + # File was deleted, add some document parts.
|
|
47 | + content_start = (
|
|
48 | + '<?xml version="1.0" encoding="utf-8" standalone="yes"?>\n<resources>\n'
|
|
49 | + )
|
|
50 | + content_end = "</resources>\n"
|
|
51 | + else:
|
|
52 | + # Treat as an empty file.
|
|
53 | + content_start = ""
|
|
54 | + content_end = ""
|
|
55 | + existing_keys = []
|
|
56 | + else:
|
|
57 | + parser.readUnicode(new_content)
|
|
58 | + |
|
59 | + # Start with the same content as the current file.
|
|
60 | + # For android strings, we want to keep the final "</resources>" until after.
|
|
61 | + if is_android:
|
|
62 | + closing_match = re.match(
|
|
63 | + r"^(.*)(</resources>\s*)$", parser.ctx.contents, re.DOTALL
|
|
64 | + )
|
|
65 | + if not closing_match:
|
|
66 | + raise ValueError("Missing a final </resources>")
|
|
67 | + content_start = closing_match.group(1)
|
|
68 | + content_end = closing_match.group(2)
|
|
69 | + else:
|
|
70 | + content_start = parser.ctx.contents
|
|
71 | + content_end = ""
|
|
72 | + existing_keys = [entry.key for entry in parser.walk(only_localizable=True)]
|
|
73 | + |
|
74 | + # For Fluent, we want to prefix the strings using GroupComments.
|
|
75 | + # On weblate this will cause all the strings that fall under the GroupComment's
|
|
76 | + # scope to have the prefix added to their "notes".
|
|
77 | + # We set up an initial GroupComment for the first string we find. This will also
|
|
78 | + # end the scope of the last GroupComment in the new translation file.
|
|
79 | + # This will be replaced with a the next GroupComment when it is found.
|
|
80 | + fluent_group_comment_prefix = f"\n## {comment_prefix}\n"
|
|
81 | + fluent_group_comment: str | None = fluent_group_comment_prefix
|
|
82 | + |
|
83 | + # For other formats, we want to keep all the comment lines that come directly
|
|
84 | + # before the string.
|
|
85 | + # In compare_locales.parser, only the comment line directly before an Entity
|
|
86 | + # counts as the pre_comment for that Entity. I.e. only this line will be
|
|
87 | + # included in Entity.all
|
|
88 | + # However, in weblate every comment line that comes before the Entity is
|
|
89 | + # included as a comment. So we also want to keep these additional comments to
|
|
90 | + # preserve them for weblate.
|
|
91 | + # We gather these extra comments in stacked_comments, and clear them whenever we
|
|
92 | + # reach an Entity or a blank line (Whitespace is more than "\n").
|
|
93 | + stacked_comments: list[str] = []
|
|
94 | + |
|
95 | + additions: list[str] = []
|
|
96 | + |
|
97 | + entry_iter: Iterable[Any] = ()
|
|
98 | + # If the file does not exist in the old branch, don't make any additions.
|
|
99 | + if old_content is not None:
|
|
100 | + parser.readUnicode(old_content)
|
|
101 | + entry_iter = parser.walk(only_localizable=False)
|
|
102 | + for entry in entry_iter:
|
|
103 | + if isinstance(entry, Junk):
|
|
104 | + raise ValueError(f"Unexpected Junk: {entry.all}")
|
|
105 | + if isinstance(entry, Whitespace):
|
|
106 | + # Clear stacked comments if more than one empty line.
|
|
107 | + if entry.all != "\n":
|
|
108 | + stacked_comments.clear()
|
|
109 | + continue
|
|
110 | + if isinstance(entry, Comment):
|
|
111 | + if isinstance(entry, FluentComment):
|
|
112 | + # Don't stack Fluent comments.
|
|
113 | + # Only the comments included in Entity.pre_comment count towards
|
|
114 | + # that Entity's comment.
|
|
115 | + if entry.all.startswith("##"):
|
|
116 | + # A Fluent GroupComment
|
|
117 | + if entry.all == "##":
|
|
118 | + # Empty GroupComment. Used to end the scope of a previous
|
|
119 | + # GroupComment.
|
|
120 | + # Replace this with our prefix comment.
|
|
121 | + fluent_group_comment = fluent_group_comment_prefix
|
|
122 | + else:
|
|
123 | + # Prefix the group comment.
|
|
124 | + fluent_group_comment = (
|
|
125 | + f"{fluent_group_comment_prefix}{entry.all}\n"
|
|
126 | + )
|
|
127 | + else:
|
|
128 | + stacked_comments.append(entry.all)
|
|
129 | + continue
|
|
130 | + if isinstance(entry, DocumentWrapper):
|
|
131 | + # Not needed.
|
|
132 | + continue
|
|
133 | + |
|
134 | + if not isinstance(entry, Entity):
|
|
135 | + raise ValueError(f"Unexpected type: {entry.__class__.__name__}")
|
|
136 | + |
|
137 | + if entry.key in existing_keys:
|
|
138 | + # Already included this string in the new translation file.
|
|
139 | + # Drop the gathered comments for this Entity.
|
|
140 | + stacked_comments.clear()
|
|
141 | + continue
|
|
142 | + |
|
143 | + if isinstance(entry, FluentEntity):
|
|
144 | + if fluent_group_comment is not None:
|
|
145 | + # We have a found GroupComment which has not been included yet.
|
|
146 | + # All following Entity's will be under its scope, until the next
|
|
147 | + # GroupComment.
|
|
148 | + additions.append(fluent_group_comment)
|
|
149 | + # Added GroupComment, so don't need to add again.
|
|
150 | + fluent_group_comment = None
|
|
151 | + elif isinstance(entry, DTDEntity):
|
|
152 | + # Include our additional comment before we print the rest for this
|
|
153 | + # Entity.
|
|
154 | + additions.append(f"<!-- LOCALIZATION NOTE: {comment_prefix} -->")
|
|
155 | + elif isinstance(entry, PropertiesEntity):
|
|
156 | + additions.append(f"# {comment_prefix}")
|
|
157 | + elif isinstance(entry, AndroidEntity):
|
|
158 | + additions.append(f"<!-- {comment_prefix} -->")
|
|
159 | + else:
|
|
160 | + raise ValueError(f"Unexpected Entity type: {entry.__class__.__name__}")
|
|
161 | + |
|
162 | + # Add any other comment lines that came directly before this Entity.
|
|
163 | + additions.extend(stacked_comments)
|
|
164 | + stacked_comments.clear()
|
|
165 | + additions.append(entry.all)
|
|
166 | + |
|
167 | + content_middle = ""
|
|
168 | + |
|
169 | + if additions:
|
|
170 | + # New line before and after the additions
|
|
171 | + additions.insert(0, "")
|
|
172 | + additions.append("")
|
|
173 | + if is_android:
|
|
174 | + content_middle = "\n ".join(additions)
|
|
175 | + else:
|
|
176 | + content_middle = "\n".join(additions)
|
|
177 | + |
|
178 | + # Remove " " in otherwise blank lines.
|
|
179 | + content_middle = re.sub("^ +$", "", content_middle, flags=re.MULTILINE)
|
|
180 | + |
|
181 | + return content_start + content_middle + content_end |
1 | +python tests to be run with pytest.
|
|
2 | +Requires the compare-locales package. |
1 | +import textwrap
|
|
2 | + |
|
3 | +from combine import combine_files
|
|
4 | + |
|
5 | + |
|
6 | +def wrap_in_xml(content):
|
|
7 | + if content is None:
|
|
8 | + return None
|
|
9 | + # Allow for indents to make the tests more readable.
|
|
10 | + content = textwrap.dedent(content)
|
|
11 | + return f"""\
|
|
12 | +<?xml version="1.0" encoding="utf-8" standalone="yes"?>
|
|
13 | +<resources>
|
|
14 | +{textwrap.indent(content, " ")}</resources>
|
|
15 | +"""
|
|
16 | + |
|
17 | + |
|
18 | +def assert_result(new_content, old_content, expect):
|
|
19 | + new_content = wrap_in_xml(new_content)
|
|
20 | + old_content = wrap_in_xml(old_content)
|
|
21 | + expect = wrap_in_xml(expect)
|
|
22 | + assert expect == combine_files(
|
|
23 | + "test_strings.xml", new_content, old_content, "REMOVED STRING"
|
|
24 | + )
|
|
25 | + |
|
26 | + |
|
27 | +def test_combine_empty():
|
|
28 | + assert_result(None, None, None)
|
|
29 | + |
|
30 | + |
|
31 | +def test_combine_new_file():
|
|
32 | + # New file with no old content.
|
|
33 | + assert_result(
|
|
34 | + """\
|
|
35 | + <string name="string_1">First</string>
|
|
36 | + <string name="string_2">Second</string>
|
|
37 | + """,
|
|
38 | + None,
|
|
39 | + """\
|
|
40 | + <string name="string_1">First</string>
|
|
41 | + <string name="string_2">Second</string>
|
|
42 | + """,
|
|
43 | + )
|
|
44 | + |
|
45 | + |
|
46 | +def test_combine_removed_file():
|
|
47 | + # Entire file was removed.
|
|
48 | + assert_result(
|
|
49 | + None,
|
|
50 | + """\
|
|
51 | + <string name="string_1">First</string>
|
|
52 | + <string name="string_2">Second</string>
|
|
53 | + """,
|
|
54 | + """\
|
|
55 | + |
|
56 | + <!-- REMOVED STRING -->
|
|
57 | + <string name="string_1">First</string>
|
|
58 | + <!-- REMOVED STRING -->
|
|
59 | + <string name="string_2">Second</string>
|
|
60 | + """,
|
|
61 | + )
|
|
62 | + |
|
63 | + |
|
64 | +def test_no_change():
|
|
65 | + content = """\
|
|
66 | + <string name="string_1">First</string>
|
|
67 | + <string name="string_2">Second</string>
|
|
68 | + """
|
|
69 | + assert_result(content, content, content)
|
|
70 | + |
|
71 | + |
|
72 | +def test_added_string():
|
|
73 | + assert_result(
|
|
74 | + """\
|
|
75 | + <string name="string_1">First</string>
|
|
76 | + <string name="string_new">NEW</string>
|
|
77 | + <string name="string_2">Second</string>
|
|
78 | + """,
|
|
79 | + """\
|
|
80 | + <string name="string_1">First</string>
|
|
81 | + <string name="string_2">Second</string>
|
|
82 | + """,
|
|
83 | + """\
|
|
84 | + <string name="string_1">First</string>
|
|
85 | + <string name="string_new">NEW</string>
|
|
86 | + <string name="string_2">Second</string>
|
|
87 | + """,
|
|
88 | + )
|
|
89 | + |
|
90 | + |
|
91 | +def test_removed_string():
|
|
92 | + assert_result(
|
|
93 | + """\
|
|
94 | + <string name="string_1">First</string>
|
|
95 | + <string name="string_2">Second</string>
|
|
96 | + """,
|
|
97 | + """\
|
|
98 | + <string name="string_1">First</string>
|
|
99 | + <string name="removed">REMOVED</string>
|
|
100 | + <string name="string_2">Second</string>
|
|
101 | + """,
|
|
102 | + """\
|
|
103 | + <string name="string_1">First</string>
|
|
104 | + <string name="string_2">Second</string>
|
|
105 | + |
|
106 | + <!-- REMOVED STRING -->
|
|
107 | + <string name="removed">REMOVED</string>
|
|
108 | + """,
|
|
109 | + )
|
|
110 | + |
|
111 | + |
|
112 | +def test_removed_and_added():
|
|
113 | + assert_result(
|
|
114 | + """\
|
|
115 | + <string name="new_1">New string</string>
|
|
116 | + <string name="string_1">First</string>
|
|
117 | + <string name="string_2">Second</string>
|
|
118 | + <string name="new_2">New string 2</string>
|
|
119 | + """,
|
|
120 | + """\
|
|
121 | + <string name="string_1">First</string>
|
|
122 | + <string name="removed_1">First removed</string>
|
|
123 | + <string name="removed_2">Second removed</string>
|
|
124 | + <string name="string_2">Second</string>
|
|
125 | + <string name="removed_3">Third removed</string>
|
|
126 | + """,
|
|
127 | + """\
|
|
128 | + <string name="new_1">New string</string>
|
|
129 | + <string name="string_1">First</string>
|
|
130 | + <string name="string_2">Second</string>
|
|
131 | + <string name="new_2">New string 2</string>
|
|
132 | + |
|
133 | + <!-- REMOVED STRING -->
|
|
134 | + <string name="removed_1">First removed</string>
|
|
135 | + <!-- REMOVED STRING -->
|
|
136 | + <string name="removed_2">Second removed</string>
|
|
137 | + <!-- REMOVED STRING -->
|
|
138 | + <string name="removed_3">Third removed</string>
|
|
139 | + """,
|
|
140 | + )
|
|
141 | + |
|
142 | + |
|
143 | +def test_updated():
|
|
144 | + # String content was updated.
|
|
145 | + assert_result(
|
|
146 | + """\
|
|
147 | + <string name="changed_string">NEW</string>
|
|
148 | + """,
|
|
149 | + """\
|
|
150 | + <string name="changed_string">OLD</string>
|
|
151 | + """,
|
|
152 | + """\
|
|
153 | + <string name="changed_string">NEW</string>
|
|
154 | + """,
|
|
155 | + )
|
|
156 | + |
|
157 | + |
|
158 | +def test_updated_comment():
|
|
159 | + # String comment was updated.
|
|
160 | + assert_result(
|
|
161 | + """\
|
|
162 | + <!-- NEW -->
|
|
163 | + <string name="changed_string">string</string>
|
|
164 | + """,
|
|
165 | + """\
|
|
166 | + <!-- OLD -->
|
|
167 | + <string name="changed_string">string</string>
|
|
168 | + """,
|
|
169 | + """\
|
|
170 | + <!-- NEW -->
|
|
171 | + <string name="changed_string">string</string>
|
|
172 | + """,
|
|
173 | + )
|
|
174 | + # Comment added.
|
|
175 | + assert_result(
|
|
176 | + """\
|
|
177 | + <!-- NEW -->
|
|
178 | + <string name="changed_string">string</string>
|
|
179 | + """,
|
|
180 | + """\
|
|
181 | + <string name="changed_string">string</string>
|
|
182 | + """,
|
|
183 | + """\
|
|
184 | + <!-- NEW -->
|
|
185 | + <string name="changed_string">string</string>
|
|
186 | + """,
|
|
187 | + )
|
|
188 | + # Comment removed.
|
|
189 | + assert_result(
|
|
190 | + """\
|
|
191 | + <string name="changed_string">string</string>
|
|
192 | + """,
|
|
193 | + """\
|
|
194 | + <!-- OLD -->
|
|
195 | + <string name="changed_string">string</string>
|
|
196 | + """,
|
|
197 | + """\
|
|
198 | + <string name="changed_string">string</string>
|
|
199 | + """,
|
|
200 | + )
|
|
201 | + |
|
202 | + # With file comments
|
|
203 | + assert_result(
|
|
204 | + """\
|
|
205 | + <!-- NEW file comment -->
|
|
206 | + |
|
207 | + <!-- NEW -->
|
|
208 | + <string name="changed_string">string</string>
|
|
209 | + """,
|
|
210 | + """\
|
|
211 | + <!-- OLD file comment -->
|
|
212 | + |
|
213 | + <!-- OLD -->
|
|
214 | + <string name="changed_string">string</string>
|
|
215 | + """,
|
|
216 | + """\
|
|
217 | + <!-- NEW file comment -->
|
|
218 | + |
|
219 | + <!-- NEW -->
|
|
220 | + <string name="changed_string">string</string>
|
|
221 | + """,
|
|
222 | + )
|
|
223 | + |
|
224 | + |
|
225 | +def test_reordered():
|
|
226 | + # String was re_ordered.
|
|
227 | + assert_result(
|
|
228 | + """\
|
|
229 | + <string name="string_1">value</string>
|
|
230 | + <string name="moved_string">move</string>
|
|
231 | + """,
|
|
232 | + """\
|
|
233 | + <string name="moved_string">move</string>
|
|
234 | + <string name="string_1">value</string>
|
|
235 | + """,
|
|
236 | + """\
|
|
237 | + <string name="string_1">value</string>
|
|
238 | + <string name="moved_string">move</string>
|
|
239 | + """,
|
|
240 | + )
|
|
241 | + |
|
242 | + |
|
243 | +def test_removed_string_with_comment():
|
|
244 | + assert_result(
|
|
245 | + """\
|
|
246 | + <!-- Comment for first. -->
|
|
247 | + <string name="string_1">First</string>
|
|
248 | + <string name="string_2">Second</string>
|
|
249 | + """,
|
|
250 | + """\
|
|
251 | + <!-- Comment for first. -->
|
|
252 | + <string name="string_1">First</string>
|
|
253 | + <!-- Comment for removed. -->
|
|
254 | + <string name="removed">REMOVED</string>
|
|
255 | + <string name="string_2">Second</string>
|
|
256 | + """,
|
|
257 | + """\
|
|
258 | + <!-- Comment for first. -->
|
|
259 | + <string name="string_1">First</string>
|
|
260 | + <string name="string_2">Second</string>
|
|
261 | + |
|
262 | + <!-- REMOVED STRING -->
|
|
263 | + <!-- Comment for removed. -->
|
|
264 | + <string name="removed">REMOVED</string>
|
|
265 | + """,
|
|
266 | + )
|
|
267 | + |
|
268 | + # With file comments and multi-line.
|
|
269 | + # All comments prior to a removed string are moved with it, until another
|
|
270 | + # entity or blank line is reached.
|
|
271 | + assert_result(
|
|
272 | + """\
|
|
273 | + <!-- First File comment -->
|
|
274 | + |
|
275 | + <!-- Comment for first. -->
|
|
276 | + <!-- Comment 2 for first. -->
|
|
277 | + <string name="string_1">First</string>
|
|
278 | + |
|
279 | + <!-- Second -->
|
|
280 | + <!-- File comment -->
|
|
281 | + |
|
282 | + <string name="string_2">Second</string>
|
|
283 | + """,
|
|
284 | + """\
|
|
285 | + <!-- First File comment -->
|
|
286 | + |
|
287 | + <!-- Comment for first. -->
|
|
288 | + <!-- Comment 2 for first. -->
|
|
289 | + <string name="string_1">First</string>
|
|
290 | + <string name="removed_1">First removed</string>
|
|
291 | + <!-- Comment for second removed. -->
|
|
292 | + <string name="removed_2">Second removed</string>
|
|
293 | + |
|
294 | + <!-- Removed file comment -->
|
|
295 | + |
|
296 | + <!-- Comment 1 for third removed -->
|
|
297 | + <!-- Comment 2 for third removed -->
|
|
298 | + <string name="removed_3">Third removed</string>
|
|
299 | + |
|
300 | + <!-- Second -->
|
|
301 | + <!-- File comment -->
|
|
302 | + |
|
303 | + <string name="removed_4">Fourth removed</string>
|
|
304 | + <string name="string_2">Second</string>
|
|
305 | + """,
|
|
306 | + """\
|
|
307 | + <!-- First File comment -->
|
|
308 | + |
|
309 | + <!-- Comment for first. -->
|
|
310 | + <!-- Comment 2 for first. -->
|
|
311 | + <string name="string_1">First</string>
|
|
312 | + |
|
313 | + <!-- Second -->
|
|
314 | + <!-- File comment -->
|
|
315 | + |
|
316 | + <string name="string_2">Second</string>
|
|
317 | + |
|
318 | + <!-- REMOVED STRING -->
|
|
319 | + <string name="removed_1">First removed</string>
|
|
320 | + <!-- REMOVED STRING -->
|
|
321 | + <!-- Comment for second removed. -->
|
|
322 | + <string name="removed_2">Second removed</string>
|
|
323 | + <!-- REMOVED STRING -->
|
|
324 | + <!-- Comment 1 for third removed -->
|
|
325 | + <!-- Comment 2 for third removed -->
|
|
326 | + <string name="removed_3">Third removed</string>
|
|
327 | + <!-- REMOVED STRING -->
|
|
328 | + <string name="removed_4">Fourth removed</string>
|
|
329 | + """,
|
|
330 | + ) |
1 | +import textwrap
|
|
2 | + |
|
3 | +from combine import combine_files
|
|
4 | + |
|
5 | + |
|
6 | +def assert_result(new_content, old_content, expect):
|
|
7 | + # Allow for indents to make the tests more readable.
|
|
8 | + if new_content is not None:
|
|
9 | + new_content = textwrap.dedent(new_content)
|
|
10 | + if old_content is not None:
|
|
11 | + old_content = textwrap.dedent(old_content)
|
|
12 | + if expect is not None:
|
|
13 | + expect = textwrap.dedent(expect)
|
|
14 | + assert expect == combine_files(
|
|
15 | + "test.dtd", new_content, old_content, "REMOVED STRING"
|
|
16 | + )
|
|
17 | + |
|
18 | + |
|
19 | +def test_combine_empty():
|
|
20 | + assert_result(None, None, None)
|
|
21 | + |
|
22 | + |
|
23 | +def test_combine_new_file():
|
|
24 | + # New file with no old content.
|
|
25 | + assert_result(
|
|
26 | + """\
|
|
27 | + <!ENTITY string.1 "First">
|
|
28 | + <!ENTITY string.2 "Second">
|
|
29 | + """,
|
|
30 | + None,
|
|
31 | + """\
|
|
32 | + <!ENTITY string.1 "First">
|
|
33 | + <!ENTITY string.2 "Second">
|
|
34 | + """,
|
|
35 | + )
|
|
36 | + |
|
37 | + |
|
38 | +def test_combine_removed_file():
|
|
39 | + # Entire file was removed.
|
|
40 | + assert_result(
|
|
41 | + None,
|
|
42 | + """\
|
|
43 | + <!ENTITY string.1 "First">
|
|
44 | + <!ENTITY string.2 "Second">
|
|
45 | + """,
|
|
46 | + """\
|
|
47 | + |
|
48 | + <!-- LOCALIZATION NOTE: REMOVED STRING -->
|
|
49 | + <!ENTITY string.1 "First">
|
|
50 | + <!-- LOCALIZATION NOTE: REMOVED STRING -->
|
|
51 | + <!ENTITY string.2 "Second">
|
|
52 | + """,
|
|
53 | + )
|
|
54 | + |
|
55 | + |
|
56 | +def test_no_change():
|
|
57 | + content = """\
|
|
58 | + <!ENTITY string.1 "First">
|
|
59 | + <!ENTITY string.2 "Second">
|
|
60 | + """
|
|
61 | + assert_result(content, content, content)
|
|
62 | + |
|
63 | + |
|
64 | +def test_added_string():
|
|
65 | + assert_result(
|
|
66 | + """\
|
|
67 | + <!ENTITY string.1 "First">
|
|
68 | + <!ENTITY string.new "NEW">
|
|
69 | + <!ENTITY string.2 "Second">
|
|
70 | + """,
|
|
71 | + """\
|
|
72 | + <!ENTITY string.1 "First">
|
|
73 | + <!ENTITY string.2 "Second">
|
|
74 | + """,
|
|
75 | + """\
|
|
76 | + <!ENTITY string.1 "First">
|
|
77 | + <!ENTITY string.new "NEW">
|
|
78 | + <!ENTITY string.2 "Second">
|
|
79 | + """,
|
|
80 | + )
|
|
81 | + |
|
82 | + |
|
83 | +def test_removed_string():
|
|
84 | + assert_result(
|
|
85 | + """\
|
|
86 | + <!ENTITY string.1 "First">
|
|
87 | + <!ENTITY string.2 "Second">
|
|
88 | + """,
|
|
89 | + """\
|
|
90 | + <!ENTITY string.1 "First">
|
|
91 | + <!ENTITY removed "REMOVED">
|
|
92 | + <!ENTITY string.2 "Second">
|
|
93 | + """,
|
|
94 | + """\
|
|
95 | + <!ENTITY string.1 "First">
|
|
96 | + <!ENTITY string.2 "Second">
|
|
97 | + |
|
98 | + <!-- LOCALIZATION NOTE: REMOVED STRING -->
|
|
99 | + <!ENTITY removed "REMOVED">
|
|
100 | + """,
|
|
101 | + )
|
|
102 | + |
|
103 | + |
|
104 | +def test_removed_and_added():
|
|
105 | + assert_result(
|
|
106 | + """\
|
|
107 | + <!ENTITY new.1 "New string">
|
|
108 | + <!ENTITY string.1 "First">
|
|
109 | + <!ENTITY string.2 "Second">
|
|
110 | + <!ENTITY new.2 "New string 2">
|
|
111 | + """,
|
|
112 | + """\
|
|
113 | + <!ENTITY string.1 "First">
|
|
114 | + <!ENTITY removed.1 "First removed">
|
|
115 | + <!ENTITY removed.2 "Second removed">
|
|
116 | + <!ENTITY string.2 "Second">
|
|
117 | + <!ENTITY removed.3 "Third removed">
|
|
118 | + """,
|
|
119 | + """\
|
|
120 | + <!ENTITY new.1 "New string">
|
|
121 | + <!ENTITY string.1 "First">
|
|
122 | + <!ENTITY string.2 "Second">
|
|
123 | + <!ENTITY new.2 "New string 2">
|
|
124 | + |
|
125 | + <!-- LOCALIZATION NOTE: REMOVED STRING -->
|
|
126 | + <!ENTITY removed.1 "First removed">
|
|
127 | + <!-- LOCALIZATION NOTE: REMOVED STRING -->
|
|
128 | + <!ENTITY removed.2 "Second removed">
|
|
129 | + <!-- LOCALIZATION NOTE: REMOVED STRING -->
|
|
130 | + <!ENTITY removed.3 "Third removed">
|
|
131 | + """,
|
|
132 | + )
|
|
133 | + |
|
134 | + |
|
135 | +def test_updated():
|
|
136 | + # String content was updated.
|
|
137 | + assert_result(
|
|
138 | + """\
|
|
139 | + <!ENTITY changed.string "NEW">
|
|
140 | + """,
|
|
141 | + """\
|
|
142 | + <!ENTITY changed.string "OLD">
|
|
143 | + """,
|
|
144 | + """\
|
|
145 | + <!ENTITY changed.string "NEW">
|
|
146 | + """,
|
|
147 | + )
|
|
148 | + |
|
149 | + |
|
150 | +def test_updated_comment():
|
|
151 | + # String comment was updated.
|
|
152 | + assert_result(
|
|
153 | + """\
|
|
154 | + <!-- LOCALIZATION NOTE: NEW -->
|
|
155 | + <!ENTITY changed.string "string">
|
|
156 | + """,
|
|
157 | + """\
|
|
158 | + <!-- LOCALIZATION NOTE: OLD -->
|
|
159 | + <!ENTITY changed.string "string">
|
|
160 | + """,
|
|
161 | + """\
|
|
162 | + <!-- LOCALIZATION NOTE: NEW -->
|
|
163 | + <!ENTITY changed.string "string">
|
|
164 | + """,
|
|
165 | + )
|
|
166 | + # Comment added.
|
|
167 | + assert_result(
|
|
168 | + """\
|
|
169 | + <!-- LOCALIZATION NOTE: NEW -->
|
|
170 | + <!ENTITY changed.string "string">
|
|
171 | + """,
|
|
172 | + """\
|
|
173 | + <!ENTITY changed.string "string">
|
|
174 | + """,
|
|
175 | + """\
|
|
176 | + <!-- LOCALIZATION NOTE: NEW -->
|
|
177 | + <!ENTITY changed.string "string">
|
|
178 | + """,
|
|
179 | + )
|
|
180 | + # Comment removed.
|
|
181 | + assert_result(
|
|
182 | + """\
|
|
183 | + <!ENTITY changed.string "string">
|
|
184 | + """,
|
|
185 | + """\
|
|
186 | + <!-- LOCALIZATION NOTE: OLD -->
|
|
187 | + <!ENTITY changed.string "string">
|
|
188 | + """,
|
|
189 | + """\
|
|
190 | + <!ENTITY changed.string "string">
|
|
191 | + """,
|
|
192 | + )
|
|
193 | + |
|
194 | + # With multiple comments
|
|
195 | + assert_result(
|
|
196 | + """\
|
|
197 | + <!-- NEW FILE COMMENT -->
|
|
198 | + |
|
199 | + <!-- LOCALIZATION NOTE: NEW -->
|
|
200 | + <!ENTITY changed.string "string">
|
|
201 | + """,
|
|
202 | + """\
|
|
203 | + <!-- OLD -->
|
|
204 | + |
|
205 | + <!-- LOCALIZATION NOTE: OLD -->
|
|
206 | + <!ENTITY changed.string "string">
|
|
207 | + """,
|
|
208 | + """\
|
|
209 | + <!-- NEW FILE COMMENT -->
|
|
210 | + |
|
211 | + <!-- LOCALIZATION NOTE: NEW -->
|
|
212 | + <!ENTITY changed.string "string">
|
|
213 | + """,
|
|
214 | + )
|
|
215 | + |
|
216 | + |
|
217 | +def test_reordered():
|
|
218 | + # String was re.ordered.
|
|
219 | + assert_result(
|
|
220 | + """\
|
|
221 | + <!ENTITY string.1 "value">
|
|
222 | + <!ENTITY moved.string "move">
|
|
223 | + """,
|
|
224 | + """\
|
|
225 | + <!ENTITY moved.string "move">
|
|
226 | + <!ENTITY string.1 "value">
|
|
227 | + """,
|
|
228 | + """\
|
|
229 | + <!ENTITY string.1 "value">
|
|
230 | + <!ENTITY moved.string "move">
|
|
231 | + """,
|
|
232 | + )
|
|
233 | + |
|
234 | + |
|
235 | +def test_removed_string_with_comment():
|
|
236 | + assert_result(
|
|
237 | + """\
|
|
238 | + <!-- LOCALIZATION NOTE: Comment for first. -->
|
|
239 | + <!ENTITY string.1 "First">
|
|
240 | + <!ENTITY string.2 "Second">
|
|
241 | + """,
|
|
242 | + """\
|
|
243 | + <!-- LOCALIZATION NOTE: Comment for first. -->
|
|
244 | + <!ENTITY string.1 "First">
|
|
245 | + <!-- LOCALIZATION NOTE: Comment for removed. -->
|
|
246 | + <!ENTITY removed "REMOVED">
|
|
247 | + <!ENTITY string.2 "Second">
|
|
248 | + """,
|
|
249 | + """\
|
|
250 | + <!-- LOCALIZATION NOTE: Comment for first. -->
|
|
251 | + <!ENTITY string.1 "First">
|
|
252 | + <!ENTITY string.2 "Second">
|
|
253 | + |
|
254 | + <!-- LOCALIZATION NOTE: REMOVED STRING -->
|
|
255 | + <!-- LOCALIZATION NOTE: Comment for removed. -->
|
|
256 | + <!ENTITY removed "REMOVED">
|
|
257 | + """,
|
|
258 | + )
|
|
259 | + |
|
260 | + # With multiple lines of comments.
|
|
261 | + |
|
262 | + assert_result(
|
|
263 | + """\
|
|
264 | + <!-- First file comment -->
|
|
265 | + |
|
266 | + <!-- LOCALIZATION NOTE: Comment for first. -->
|
|
267 | + <!-- LOCALIZATION NOTE: Comment 2 for first. -->
|
|
268 | + <!ENTITY string.1 "First">
|
|
269 | + |
|
270 | + <!-- Second
|
|
271 | + - file
|
|
272 | + - comment -->
|
|
273 | + |
|
274 | + <!ENTITY string.2 "Second">
|
|
275 | + """,
|
|
276 | + """\
|
|
277 | + <!-- First file comment -->
|
|
278 | + |
|
279 | + <!-- LOCALIZATION NOTE: Comment for first. -->
|
|
280 | + <!ENTITY string.1 "First">
|
|
281 | + <!ENTITY removed.1 "First removed">
|
|
282 | + <!-- LOCALIZATION NOTE: Comment for second removed. -->
|
|
283 | + <!ENTITY removed.2 "Second removed">
|
|
284 | + |
|
285 | + <!-- Removed file comment -->
|
|
286 | + |
|
287 | + <!-- LOCALIZATION NOTE: Comment for third removed. -->
|
|
288 | + <!-- LOCALIZATION NOTE: Comment 2 for
|
|
289 | + third removed. -->
|
|
290 | + <!ENTITY removed.3 "Third removed">
|
|
291 | + |
|
292 | + <!-- Second
|
|
293 | + - file
|
|
294 | + - comment -->
|
|
295 | + |
|
296 | + <!ENTITY removed.4 "Fourth removed">
|
|
297 | + <!ENTITY string.2 "Second">
|
|
298 | + """,
|
|
299 | + """\
|
|
300 | + <!-- First file comment -->
|
|
301 | + |
|
302 | + <!-- LOCALIZATION NOTE: Comment for first. -->
|
|
303 | + <!-- LOCALIZATION NOTE: Comment 2 for first. -->
|
|
304 | + <!ENTITY string.1 "First">
|
|
305 | + |
|
306 | + <!-- Second
|
|
307 | + - file
|
|
308 | + - comment -->
|
|
309 | + |
|
310 | + <!ENTITY string.2 "Second">
|
|
311 | + |
|
312 | + <!-- LOCALIZATION NOTE: REMOVED STRING -->
|
|
313 | + <!ENTITY removed.1 "First removed">
|
|
314 | + <!-- LOCALIZATION NOTE: REMOVED STRING -->
|
|
315 | + <!-- LOCALIZATION NOTE: Comment for second removed. -->
|
|
316 | + <!ENTITY removed.2 "Second removed">
|
|
317 | + <!-- LOCALIZATION NOTE: REMOVED STRING -->
|
|
318 | + <!-- LOCALIZATION NOTE: Comment for third removed. -->
|
|
319 | + <!-- LOCALIZATION NOTE: Comment 2 for
|
|
320 | + third removed. -->
|
|
321 | + <!ENTITY removed.3 "Third removed">
|
|
322 | + <!-- LOCALIZATION NOTE: REMOVED STRING -->
|
|
323 | + <!ENTITY removed.4 "Fourth removed">
|
|
324 | + """,
|
|
325 | + ) |
1 | +import textwrap
|
|
2 | + |
|
3 | +from combine import combine_files
|
|
4 | + |
|
5 | + |
|
6 | +def assert_result(new_content, old_content, expect):
|
|
7 | + # Allow for indents to make the tests more readable.
|
|
8 | + if new_content is not None:
|
|
9 | + new_content = textwrap.dedent(new_content)
|
|
10 | + if old_content is not None:
|
|
11 | + old_content = textwrap.dedent(old_content)
|
|
12 | + if expect is not None:
|
|
13 | + expect = textwrap.dedent(expect)
|
|
14 | + assert expect == combine_files(
|
|
15 | + "test.ftl", new_content, old_content, "REMOVED STRING"
|
|
16 | + )
|
|
17 | + |
|
18 | + |
|
19 | +def test_combine_empty():
|
|
20 | + assert_result(None, None, None)
|
|
21 | + |
|
22 | + |
|
23 | +def test_combine_new_file():
|
|
24 | + # New file with no old content.
|
|
25 | + assert_result(
|
|
26 | + """\
|
|
27 | + string-1 = First
|
|
28 | + string-2 = Second
|
|
29 | + """,
|
|
30 | + None,
|
|
31 | + """\
|
|
32 | + string-1 = First
|
|
33 | + string-2 = Second
|
|
34 | + """,
|
|
35 | + )
|
|
36 | + |
|
37 | + |
|
38 | +def test_combine_removed_file():
|
|
39 | + # Entire file was removed.
|
|
40 | + assert_result(
|
|
41 | + None,
|
|
42 | + """\
|
|
43 | + string-1 = First
|
|
44 | + string-2 = Second
|
|
45 | + """,
|
|
46 | + """\
|
|
47 | + |
|
48 | + |
|
49 | + ## REMOVED STRING
|
|
50 | + |
|
51 | + string-1 = First
|
|
52 | + string-2 = Second
|
|
53 | + """,
|
|
54 | + )
|
|
55 | + |
|
56 | + |
|
57 | +def test_no_change():
|
|
58 | + content = """\
|
|
59 | + string-1 = First
|
|
60 | + string-2 = Second
|
|
61 | + """
|
|
62 | + assert_result(content, content, content)
|
|
63 | + |
|
64 | + |
|
65 | +def test_added_string():
|
|
66 | + assert_result(
|
|
67 | + """\
|
|
68 | + string-1 = First
|
|
69 | + string-new = NEW
|
|
70 | + string-2 = Second
|
|
71 | + """,
|
|
72 | + """\
|
|
73 | + string-1 = First
|
|
74 | + string-2 = Second
|
|
75 | + """,
|
|
76 | + """\
|
|
77 | + string-1 = First
|
|
78 | + string-new = NEW
|
|
79 | + string-2 = Second
|
|
80 | + """,
|
|
81 | + )
|
|
82 | + |
|
83 | + |
|
84 | +def test_removed_string():
|
|
85 | + assert_result(
|
|
86 | + """\
|
|
87 | + string-1 = First
|
|
88 | + string-2 = Second
|
|
89 | + """,
|
|
90 | + """\
|
|
91 | + string-1 = First
|
|
92 | + removed = REMOVED
|
|
93 | + string-2 = Second
|
|
94 | + """,
|
|
95 | + """\
|
|
96 | + string-1 = First
|
|
97 | + string-2 = Second
|
|
98 | + |
|
99 | + |
|
100 | + ## REMOVED STRING
|
|
101 | + |
|
102 | + removed = REMOVED
|
|
103 | + """,
|
|
104 | + )
|
|
105 | + |
|
106 | + |
|
107 | +def test_removed_and_added():
|
|
108 | + assert_result(
|
|
109 | + """\
|
|
110 | + new-1 = New string
|
|
111 | + string-1 =
|
|
112 | + .attr = First
|
|
113 | + string-2 = Second
|
|
114 | + new-2 =
|
|
115 | + .title = New string 2
|
|
116 | + """,
|
|
117 | + """\
|
|
118 | + string-1 =
|
|
119 | + .attr = First
|
|
120 | + removed-1 = First removed
|
|
121 | + removed-2 =
|
|
122 | + .attr = Second removed
|
|
123 | + string-2 = Second
|
|
124 | + removed-3 = Third removed
|
|
125 | + """,
|
|
126 | + """\
|
|
127 | + new-1 = New string
|
|
128 | + string-1 =
|
|
129 | + .attr = First
|
|
130 | + string-2 = Second
|
|
131 | + new-2 =
|
|
132 | + .title = New string 2
|
|
133 | + |
|
134 | + |
|
135 | + ## REMOVED STRING
|
|
136 | + |
|
137 | + removed-1 = First removed
|
|
138 | + removed-2 =
|
|
139 | + .attr = Second removed
|
|
140 | + removed-3 = Third removed
|
|
141 | + """,
|
|
142 | + )
|
|
143 | + |
|
144 | + |
|
145 | +def test_updated():
|
|
146 | + # String content was updated.
|
|
147 | + assert_result(
|
|
148 | + """\
|
|
149 | + changed-string = NEW
|
|
150 | + """,
|
|
151 | + """\
|
|
152 | + changed-string = OLD
|
|
153 | + """,
|
|
154 | + """\
|
|
155 | + changed-string = NEW
|
|
156 | + """,
|
|
157 | + )
|
|
158 | + |
|
159 | + |
|
160 | +def test_updated_comment():
|
|
161 | + # String comment was updated.
|
|
162 | + assert_result(
|
|
163 | + """\
|
|
164 | + # NEW
|
|
165 | + changed-string = string
|
|
166 | + """,
|
|
167 | + """\
|
|
168 | + # OLD
|
|
169 | + changed-string = string
|
|
170 | + """,
|
|
171 | + """\
|
|
172 | + # NEW
|
|
173 | + changed-string = string
|
|
174 | + """,
|
|
175 | + )
|
|
176 | + # Comment added.
|
|
177 | + assert_result(
|
|
178 | + """\
|
|
179 | + # NEW
|
|
180 | + changed-string = string
|
|
181 | + """,
|
|
182 | + """\
|
|
183 | + changed-string = string
|
|
184 | + """,
|
|
185 | + """\
|
|
186 | + # NEW
|
|
187 | + changed-string = string
|
|
188 | + """,
|
|
189 | + )
|
|
190 | + # Comment removed.
|
|
191 | + assert_result(
|
|
192 | + """\
|
|
193 | + changed-string = string
|
|
194 | + """,
|
|
195 | + """\
|
|
196 | + # OLD
|
|
197 | + changed-string = string
|
|
198 | + """,
|
|
199 | + """\
|
|
200 | + changed-string = string
|
|
201 | + """,
|
|
202 | + )
|
|
203 | + |
|
204 | + # With group comments.
|
|
205 | + assert_result(
|
|
206 | + """\
|
|
207 | + ## GROUP NEW
|
|
208 | + |
|
209 | + # NEW
|
|
210 | + changed-string = string
|
|
211 | + """,
|
|
212 | + """\
|
|
213 | + ## GROUP OLD
|
|
214 | + |
|
215 | + # OLD
|
|
216 | + changed-string = string
|
|
217 | + """,
|
|
218 | + """\
|
|
219 | + ## GROUP NEW
|
|
220 | + |
|
221 | + # NEW
|
|
222 | + changed-string = string
|
|
223 | + """,
|
|
224 | + )
|
|
225 | + |
|
226 | + |
|
227 | +def test_reordered():
|
|
228 | + # String was re-ordered.
|
|
229 | + assert_result(
|
|
230 | + """\
|
|
231 | + string-1 = value
|
|
232 | + moved-string = move
|
|
233 | + """,
|
|
234 | + """\
|
|
235 | + moved-string = move
|
|
236 | + string-1 = value
|
|
237 | + """,
|
|
238 | + """\
|
|
239 | + string-1 = value
|
|
240 | + moved-string = move
|
|
241 | + """,
|
|
242 | + )
|
|
243 | + |
|
244 | + |
|
245 | +def test_removed_string_with_comment():
|
|
246 | + assert_result(
|
|
247 | + """\
|
|
248 | + # Comment for first.
|
|
249 | + string-1 = First
|
|
250 | + string-2 = Second
|
|
251 | + """,
|
|
252 | + """\
|
|
253 | + # Comment for first.
|
|
254 | + string-1 = First
|
|
255 | + # Comment for removed.
|
|
256 | + removed = REMOVED
|
|
257 | + string-2 = Second
|
|
258 | + """,
|
|
259 | + """\
|
|
260 | + # Comment for first.
|
|
261 | + string-1 = First
|
|
262 | + string-2 = Second
|
|
263 | + |
|
264 | + |
|
265 | + ## REMOVED STRING
|
|
266 | + |
|
267 | + # Comment for removed.
|
|
268 | + removed = REMOVED
|
|
269 | + """,
|
|
270 | + )
|
|
271 | + |
|
272 | + # Group comments are combined with the "REMOVED STRING" comments.
|
|
273 | + # If strings have no group comment, then a single "REMOVED STRING" is
|
|
274 | + # included for them.
|
|
275 | + assert_result(
|
|
276 | + """\
|
|
277 | + ## First Group comment
|
|
278 | + |
|
279 | + # Comment for first.
|
|
280 | + string-1 = First
|
|
281 | + |
|
282 | + ##
|
|
283 | + |
|
284 | + no-group = No group comment
|
|
285 | + |
|
286 | + ## Second
|
|
287 | + ## Group comment
|
|
288 | + |
|
289 | + string-2 = Second
|
|
290 | + """,
|
|
291 | + """\
|
|
292 | + ## First Group comment
|
|
293 | + |
|
294 | + # Comment for first.
|
|
295 | + string-1 = First
|
|
296 | + removed-1 = First removed
|
|
297 | + # Comment for second removed.
|
|
298 | + removed-2 = Second removed
|
|
299 | + |
|
300 | + ##
|
|
301 | + |
|
302 | + no-group = No group comment
|
|
303 | + removed-3 = Third removed
|
|
304 | + |
|
305 | + ## Second
|
|
306 | + ## Group comment
|
|
307 | + |
|
308 | + removed-4 = Fourth removed
|
|
309 | + string-2 = Second
|
|
310 | + """,
|
|
311 | + """\
|
|
312 | + ## First Group comment
|
|
313 | + |
|
314 | + # Comment for first.
|
|
315 | + string-1 = First
|
|
316 | + |
|
317 | + ##
|
|
318 | + |
|
319 | + no-group = No group comment
|
|
320 | + |
|
321 | + ## Second
|
|
322 | + ## Group comment
|
|
323 | + |
|
324 | + string-2 = Second
|
|
325 | + |
|
326 | + |
|
327 | + ## REMOVED STRING
|
|
328 | + ## First Group comment
|
|
329 | + |
|
330 | + removed-1 = First removed
|
|
331 | + # Comment for second removed.
|
|
332 | + removed-2 = Second removed
|
|
333 | + |
|
334 | + ## REMOVED STRING
|
|
335 | + |
|
336 | + removed-3 = Third removed
|
|
337 | + |
|
338 | + ## REMOVED STRING
|
|
339 | + ## Second
|
|
340 | + ## Group comment
|
|
341 | + |
|
342 | + removed-4 = Fourth removed
|
|
343 | + """,
|
|
344 | + ) |
1 | +import textwrap
|
|
2 | + |
|
3 | +from combine import combine_files
|
|
4 | + |
|
5 | + |
|
6 | +def assert_result(new_content, old_content, expect):
|
|
7 | + # Allow for indents to make the tests more readable.
|
|
8 | + if new_content is not None:
|
|
9 | + new_content = textwrap.dedent(new_content)
|
|
10 | + if old_content is not None:
|
|
11 | + old_content = textwrap.dedent(old_content)
|
|
12 | + if expect is not None:
|
|
13 | + expect = textwrap.dedent(expect)
|
|
14 | + assert expect == combine_files(
|
|
15 | + "test.properties", new_content, old_content, "REMOVED STRING"
|
|
16 | + )
|
|
17 | + |
|
18 | + |
|
19 | +def test_combine_empty():
|
|
20 | + assert_result(None, None, None)
|
|
21 | + |
|
22 | + |
|
23 | +def test_combine_new_file():
|
|
24 | + # New file with no old content.
|
|
25 | + assert_result(
|
|
26 | + """\
|
|
27 | + string.1 = First
|
|
28 | + string.2 = Second
|
|
29 | + """,
|
|
30 | + None,
|
|
31 | + """\
|
|
32 | + string.1 = First
|
|
33 | + string.2 = Second
|
|
34 | + """,
|
|
35 | + )
|
|
36 | + |
|
37 | + |
|
38 | +def test_combine_removed_file():
|
|
39 | + # Entire file was removed.
|
|
40 | + assert_result(
|
|
41 | + None,
|
|
42 | + """\
|
|
43 | + string.1 = First
|
|
44 | + string.2 = Second
|
|
45 | + """,
|
|
46 | + """\
|
|
47 | + |
|
48 | + # REMOVED STRING
|
|
49 | + string.1 = First
|
|
50 | + # REMOVED STRING
|
|
51 | + string.2 = Second
|
|
52 | + """,
|
|
53 | + )
|
|
54 | + |
|
55 | + |
|
56 | +def test_no_change():
|
|
57 | + content = """\
|
|
58 | + string.1 = First
|
|
59 | + string.2 = Second
|
|
60 | + """
|
|
61 | + assert_result(content, content, content)
|
|
62 | + |
|
63 | + |
|
64 | +def test_added_string():
|
|
65 | + assert_result(
|
|
66 | + """\
|
|
67 | + string.1 = First
|
|
68 | + string.new = NEW
|
|
69 | + string.2 = Second
|
|
70 | + """,
|
|
71 | + """\
|
|
72 | + string.1 = First
|
|
73 | + string.2 = Second
|
|
74 | + """,
|
|
75 | + """\
|
|
76 | + string.1 = First
|
|
77 | + string.new = NEW
|
|
78 | + string.2 = Second
|
|
79 | + """,
|
|
80 | + )
|
|
81 | + |
|
82 | + |
|
83 | +def test_removed_string():
|
|
84 | + assert_result(
|
|
85 | + """\
|
|
86 | + string.1 = First
|
|
87 | + string.2 = Second
|
|
88 | + """,
|
|
89 | + """\
|
|
90 | + string.1 = First
|
|
91 | + removed = REMOVED
|
|
92 | + string.2 = Second
|
|
93 | + """,
|
|
94 | + """\
|
|
95 | + string.1 = First
|
|
96 | + string.2 = Second
|
|
97 | + |
|
98 | + # REMOVED STRING
|
|
99 | + removed = REMOVED
|
|
100 | + """,
|
|
101 | + )
|
|
102 | + |
|
103 | + |
|
104 | +def test_removed_and_added():
|
|
105 | + assert_result(
|
|
106 | + """\
|
|
107 | + new.1 = New string
|
|
108 | + string.1 = First
|
|
109 | + string.2 = Second
|
|
110 | + new.2 = New string 2
|
|
111 | + """,
|
|
112 | + """\
|
|
113 | + string.1 = First
|
|
114 | + removed.1 = First removed
|
|
115 | + removed.2 = Second removed
|
|
116 | + string.2 = Second
|
|
117 | + removed.3 = Third removed
|
|
118 | + """,
|
|
119 | + """\
|
|
120 | + new.1 = New string
|
|
121 | + string.1 = First
|
|
122 | + string.2 = Second
|
|
123 | + new.2 = New string 2
|
|
124 | + |
|
125 | + # REMOVED STRING
|
|
126 | + removed.1 = First removed
|
|
127 | + # REMOVED STRING
|
|
128 | + removed.2 = Second removed
|
|
129 | + # REMOVED STRING
|
|
130 | + removed.3 = Third removed
|
|
131 | + """,
|
|
132 | + )
|
|
133 | + |
|
134 | + |
|
135 | +def test_updated():
|
|
136 | + # String content was updated.
|
|
137 | + assert_result(
|
|
138 | + """\
|
|
139 | + changed.string = NEW
|
|
140 | + """,
|
|
141 | + """\
|
|
142 | + changed.string = OLD
|
|
143 | + """,
|
|
144 | + """\
|
|
145 | + changed.string = NEW
|
|
146 | + """,
|
|
147 | + )
|
|
148 | + |
|
149 | + |
|
150 | +def test_updated_comment():
|
|
151 | + # String comment was updated.
|
|
152 | + assert_result(
|
|
153 | + """\
|
|
154 | + # NEW
|
|
155 | + changed.string = string
|
|
156 | + """,
|
|
157 | + """\
|
|
158 | + # OLD
|
|
159 | + changed.string = string
|
|
160 | + """,
|
|
161 | + """\
|
|
162 | + # NEW
|
|
163 | + changed.string = string
|
|
164 | + """,
|
|
165 | + )
|
|
166 | + # Comment added.
|
|
167 | + assert_result(
|
|
168 | + """\
|
|
169 | + # NEW
|
|
170 | + changed.string = string
|
|
171 | + """,
|
|
172 | + """\
|
|
173 | + changed.string = string
|
|
174 | + """,
|
|
175 | + """\
|
|
176 | + # NEW
|
|
177 | + changed.string = string
|
|
178 | + """,
|
|
179 | + )
|
|
180 | + # Comment removed.
|
|
181 | + assert_result(
|
|
182 | + """\
|
|
183 | + changed.string = string
|
|
184 | + """,
|
|
185 | + """\
|
|
186 | + # OLD
|
|
187 | + changed.string = string
|
|
188 | + """,
|
|
189 | + """\
|
|
190 | + changed.string = string
|
|
191 | + """,
|
|
192 | + )
|
|
193 | + |
|
194 | + # With file comments
|
|
195 | + assert_result(
|
|
196 | + """\
|
|
197 | + # NEW file comment
|
|
198 | + |
|
199 | + # NEW
|
|
200 | + changed.string = string
|
|
201 | + """,
|
|
202 | + """\
|
|
203 | + # OLD file comment
|
|
204 | + |
|
205 | + # OLD
|
|
206 | + changed.string = string
|
|
207 | + """,
|
|
208 | + """\
|
|
209 | + # NEW file comment
|
|
210 | + |
|
211 | + # NEW
|
|
212 | + changed.string = string
|
|
213 | + """,
|
|
214 | + )
|
|
215 | + |
|
216 | + |
|
217 | +def test_reordered():
|
|
218 | + # String was re.ordered.
|
|
219 | + assert_result(
|
|
220 | + """\
|
|
221 | + string.1 = value
|
|
222 | + moved.string = move
|
|
223 | + """,
|
|
224 | + """\
|
|
225 | + moved.string = move
|
|
226 | + string.1 = value
|
|
227 | + """,
|
|
228 | + """\
|
|
229 | + string.1 = value
|
|
230 | + moved.string = move
|
|
231 | + """,
|
|
232 | + )
|
|
233 | + |
|
234 | + |
|
235 | +def test_removed_string_with_comment():
|
|
236 | + assert_result(
|
|
237 | + """\
|
|
238 | + # Comment for first.
|
|
239 | + string.1 = First
|
|
240 | + string.2 = Second
|
|
241 | + """,
|
|
242 | + """\
|
|
243 | + # Comment for first.
|
|
244 | + string.1 = First
|
|
245 | + # Comment for removed.
|
|
246 | + removed = REMOVED
|
|
247 | + string.2 = Second
|
|
248 | + """,
|
|
249 | + """\
|
|
250 | + # Comment for first.
|
|
251 | + string.1 = First
|
|
252 | + string.2 = Second
|
|
253 | + |
|
254 | + # REMOVED STRING
|
|
255 | + # Comment for removed.
|
|
256 | + removed = REMOVED
|
|
257 | + """,
|
|
258 | + )
|
|
259 | + |
|
260 | + # With file comments and multi-line.
|
|
261 | + # All comments prior to a removed string are moved with it, until another
|
|
262 | + # entity or blank line is reached.
|
|
263 | + assert_result(
|
|
264 | + """\
|
|
265 | + # First File comment
|
|
266 | + |
|
267 | + # Comment for first.
|
|
268 | + # Comment 2 for first.
|
|
269 | + string.1 = First
|
|
270 | + |
|
271 | + # Second
|
|
272 | + # File comment
|
|
273 | + |
|
274 | + string.2 = Second
|
|
275 | + """,
|
|
276 | + """\
|
|
277 | + # First File comment
|
|
278 | + |
|
279 | + # Comment for first.
|
|
280 | + # Comment 2 for first.
|
|
281 | + string.1 = First
|
|
282 | + removed.1 = First removed
|
|
283 | + # Comment for second removed.
|
|
284 | + removed.2 = Second removed
|
|
285 | + |
|
286 | + # Removed file comment
|
|
287 | + |
|
288 | + # Comment 1 for third removed
|
|
289 | + # Comment 2 for third removed
|
|
290 | + removed.3 = Third removed
|
|
291 | + |
|
292 | + # Second
|
|
293 | + # File comment
|
|
294 | + |
|
295 | + removed.4 = Fourth removed
|
|
296 | + string.2 = Second
|
|
297 | + """,
|
|
298 | + """\
|
|
299 | + # First File comment
|
|
300 | + |
|
301 | + # Comment for first.
|
|
302 | + # Comment 2 for first.
|
|
303 | + string.1 = First
|
|
304 | + |
|
305 | + # Second
|
|
306 | + # File comment
|
|
307 | + |
|
308 | + string.2 = Second
|
|
309 | + |
|
310 | + # REMOVED STRING
|
|
311 | + removed.1 = First removed
|
|
312 | + # REMOVED STRING
|
|
313 | + # Comment for second removed.
|
|
314 | + removed.2 = Second removed
|
|
315 | + # REMOVED STRING
|
|
316 | + # Comment 1 for third removed
|
|
317 | + # Comment 2 for third removed
|
|
318 | + removed.3 = Third removed
|
|
319 | + # REMOVED STRING
|
|
320 | + removed.4 = Fourth removed
|
|
321 | + """,
|
|
322 | + ) |