mirror of
https://github.com/servo/servo.git
synced 2025-08-03 20:50:07 +01:00
Auto merge of #24512 - marmeladema:issue-23607/compat, r=SimonSapin
More changes for Python3 compatibility Following #24435 for #23607 here are even more changes to be compatible with Python3. I managed to get `./mach build` properly work with Python3 but `test-tidy` does not work yet because of a lot of problems in `web-platform-tests` which i will have to deal with at some point. Because flake8 appears to be incompatible with the distro package in some way, i had to change to way we call flake8 (which was deprecated anyway). With this change, we should be able to update flake8 to a recent version but subprocess can be surprising on Windows platform so i'd like someone to try out those changes. --- <!-- Thank you for contributing to Servo! Please replace each `[ ]` by `[X]` when the step is complete, and replace `___` with appropriate data: --> - [x] `./mach build -d` does not report any errors - [x] `./mach test-tidy` does not report any errors <!-- Also, please make sure that "Allow edits from maintainers" checkbox is checked, so that we can help you if you get stuck somewhere along the way.--> <!-- Pull requests that do not address these steps are welcome, but they will require additional verification as part of the review process. -->
This commit is contained in:
commit
3e7a0bfc42
11 changed files with 79 additions and 72 deletions
|
@ -166,7 +166,7 @@ def _activate_virtualenv(topdir, is_firefox):
|
|||
# We want to upgrade pip when virtualenv created for the first time
|
||||
need_pip_upgrade = True
|
||||
|
||||
execfile(activate_path, dict(__file__=activate_path))
|
||||
exec(compile(open(activate_path).read(), activate_path, 'exec'), dict(__file__=activate_path))
|
||||
|
||||
python = _get_exec_path(PYTHON_NAMES, is_valid_path=check_exec_path)
|
||||
if not python:
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
# since `--system-site-packages` is enabled
|
||||
|
||||
blessings == 1.6
|
||||
distro == 1.4
|
||||
mach == 0.6.0
|
||||
mozdebug == 0.1
|
||||
mozinfo == 0.8
|
||||
|
@ -33,6 +34,6 @@ boto3 == 1.4.4
|
|||
certifi
|
||||
|
||||
# For Python3 compatibility
|
||||
six
|
||||
six == 1.12
|
||||
|
||||
-e python/tidy
|
||||
|
|
|
@ -8,10 +8,12 @@ from distutils.spawn import find_executable
|
|||
from distutils.version import LooseVersion
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import distro
|
||||
import shutil
|
||||
import subprocess
|
||||
import six
|
||||
import six.moves.urllib as urllib
|
||||
from six.moves import input
|
||||
from subprocess import PIPE
|
||||
from zipfile import BadZipfile
|
||||
|
||||
|
@ -253,7 +255,7 @@ def salt(context, force=False):
|
|||
print('Something went wrong while bootstrapping')
|
||||
return retcode
|
||||
|
||||
proceed = raw_input(
|
||||
proceed = input(
|
||||
'Proposed changes are above, proceed with bootstrap? [y/N]: '
|
||||
)
|
||||
if proceed.lower() not in ['y', 'yes']:
|
||||
|
@ -342,9 +344,11 @@ LINUX_SPECIFIC_BOOTSTRAPPERS = {
|
|||
|
||||
|
||||
def get_linux_distribution():
|
||||
distro, version, _ = platform.linux_distribution()
|
||||
distrib, version, _ = distro.linux_distribution()
|
||||
distrib = six.ensure_str(distrib)
|
||||
version = six.ensure_str(version)
|
||||
|
||||
if distro == 'LinuxMint':
|
||||
if distrib == 'LinuxMint':
|
||||
if '.' in version:
|
||||
major, _ = version.split('.', 1)
|
||||
else:
|
||||
|
@ -357,10 +361,10 @@ def get_linux_distribution():
|
|||
elif major == '17':
|
||||
base_version = '14.04'
|
||||
else:
|
||||
raise Exception('unsupported version of %s: %s' % (distro, version))
|
||||
raise Exception('unsupported version of %s: %s' % (distrib, version))
|
||||
|
||||
distro, version = 'Ubuntu', base_version
|
||||
elif distro.lower() == 'elementary':
|
||||
distrib, version = 'Ubuntu', base_version
|
||||
elif distrib.lower() == 'elementary':
|
||||
if version == '5.0':
|
||||
base_version = '18.04'
|
||||
elif version[0:3] == '0.4':
|
||||
|
@ -372,21 +376,21 @@ def get_linux_distribution():
|
|||
elif version == '0.1':
|
||||
base_version = '10.10'
|
||||
else:
|
||||
raise Exception('unsupported version of %s: %s' % (distro, version))
|
||||
distro, version = 'Ubuntu', base_version
|
||||
elif distro.lower() == 'ubuntu':
|
||||
raise Exception('unsupported version of %s: %s' % (distrib, version))
|
||||
distrib, version = 'Ubuntu', base_version
|
||||
elif distrib.lower() == 'ubuntu':
|
||||
if version > '19.04':
|
||||
raise Exception('unsupported version of %s: %s' % (distro, version))
|
||||
raise Exception('unsupported version of %s: %s' % (distrib, version))
|
||||
# Fixme: we should allow checked/supported versions only
|
||||
elif distro.lower() not in [
|
||||
elif distrib.lower() not in [
|
||||
'centos',
|
||||
'centos linux',
|
||||
'debian',
|
||||
'fedora',
|
||||
]:
|
||||
raise Exception('mach bootstrap does not support %s, please file a bug' % distro)
|
||||
raise Exception('mach bootstrap does not support %s, please file a bug' % distrib)
|
||||
|
||||
return distro, version
|
||||
return distrib, version
|
||||
|
||||
|
||||
def bootstrap(context, force=False, specific=None):
|
||||
|
@ -396,9 +400,9 @@ def bootstrap(context, force=False, specific=None):
|
|||
if "windows-msvc" in host_triple():
|
||||
bootstrapper = windows_msvc
|
||||
elif "linux-gnu" in host_triple():
|
||||
distro, version = get_linux_distribution()
|
||||
distrib, version = get_linux_distribution()
|
||||
|
||||
context.distro = distro
|
||||
context.distro = distrib
|
||||
context.distro_version = version
|
||||
bootstrapper = LINUX_SPECIFIC_BOOTSTRAPPERS.get(specific, linux)
|
||||
|
||||
|
|
|
@ -18,17 +18,19 @@ import locale
|
|||
import os
|
||||
from os import path
|
||||
import platform
|
||||
import distro
|
||||
import re
|
||||
import contextlib
|
||||
import subprocess
|
||||
from subprocess import PIPE
|
||||
import six
|
||||
import sys
|
||||
import tarfile
|
||||
import zipfile
|
||||
from xml.etree.ElementTree import XML
|
||||
from servo.util import download_file
|
||||
import six.moves.urllib as urllib
|
||||
from bootstrap import check_gstreamer_lib
|
||||
from .bootstrap import check_gstreamer_lib
|
||||
|
||||
from mach.decorators import CommandArgument
|
||||
from mach.registrar import Registrar
|
||||
|
@ -133,10 +135,10 @@ def normalize_env(env):
|
|||
# want UTF-8, they shouldn't pass in a unicode instance.
|
||||
normalized_env = {}
|
||||
for k, v in env.items():
|
||||
if isinstance(k, unicode):
|
||||
if isinstance(k, six.text_type):
|
||||
k = k.encode('utf-8', 'strict')
|
||||
|
||||
if isinstance(v, unicode):
|
||||
if isinstance(v, six.text_type):
|
||||
v = v.encode('utf-8', 'strict')
|
||||
|
||||
normalized_env[k] = v
|
||||
|
@ -357,7 +359,7 @@ class CommandBase(object):
|
|||
print()
|
||||
return 1
|
||||
raise
|
||||
version = tuple(map(int, re.match("rustup (\d+)\.(\d+)\.(\d+)", version_line).groups()))
|
||||
version = tuple(map(int, re.match(b"rustup (\d+)\.(\d+)\.(\d+)", version_line).groups()))
|
||||
if version < (1, 11, 0):
|
||||
print("rustup is at version %s.%s.%s, Servo requires 1.11.0 or more recent." % version)
|
||||
print("Try running 'rustup self update'.")
|
||||
|
@ -679,8 +681,10 @@ install them, let us know by filing a bug!")
|
|||
append_to_path_env(path.join(libpath, "pkgconfig"), env, "PKG_CONFIG_PATH")
|
||||
|
||||
if sys.platform == "linux2":
|
||||
distro, version, _ = platform.linux_distribution()
|
||||
if distro == "Ubuntu" and (version == "16.04" or version == "14.04"):
|
||||
distrib, version, _ = distro.linux_distribution()
|
||||
distrib = six.ensure_str(distrib)
|
||||
version = six.ensure_str(version)
|
||||
if distrib == "Ubuntu" and (version == "16.04" or version == "14.04"):
|
||||
env["HARFBUZZ_SYS_NO_PKG_CONFIG"] = "true"
|
||||
|
||||
if extra_path:
|
||||
|
@ -763,7 +767,7 @@ install them, let us know by filing a bug!")
|
|||
]).strip())
|
||||
|
||||
git_info.append('')
|
||||
git_info.append(git_sha)
|
||||
git_info.append(six.ensure_str(git_sha))
|
||||
if git_is_dirty:
|
||||
git_info.append('dirty')
|
||||
|
||||
|
|
|
@ -350,7 +350,7 @@ class MachCommands(CommandBase):
|
|||
|
||||
run_file = path.abspath(path.join(test_file_dir, "runtests.py"))
|
||||
run_globals = {"__file__": run_file}
|
||||
execfile(run_file, run_globals)
|
||||
exec(compile(open(run_file).read(), run_file, 'exec'), run_globals)
|
||||
|
||||
verbose = not quiet
|
||||
return run_globals["run_tests"](tests, verbose)
|
||||
|
@ -442,7 +442,7 @@ class MachCommands(CommandBase):
|
|||
kwargs["binary_args"] = binary_args
|
||||
|
||||
run_globals = {"__file__": run_file}
|
||||
execfile(run_file, run_globals)
|
||||
exec(compile(open(run_file).read(), run_file, 'exec'), run_globals)
|
||||
return run_globals["run_tests"](**kwargs)
|
||||
|
||||
@Command('update-manifest',
|
||||
|
@ -473,7 +473,7 @@ class MachCommands(CommandBase):
|
|||
return 1
|
||||
|
||||
run_globals = {"__file__": run_file}
|
||||
execfile(run_file, run_globals)
|
||||
exec(compile(open(run_file).read(), run_file, 'exec'), run_globals)
|
||||
return run_globals["update_tests"](**kwargs)
|
||||
|
||||
@Command('filter-intermittents',
|
||||
|
@ -913,7 +913,7 @@ testing/web-platform/mozilla/tests for Servo-only tests""" % reference_path)
|
|||
"components", "net", "tests",
|
||||
"cookie_http_state_utils.py"))
|
||||
run_globals = {"__file__": run_file}
|
||||
execfile(run_file, run_globals)
|
||||
exec(compile(open(run_file).read(), run_file, 'exec'), run_globals)
|
||||
return run_globals["update_test_file"](cache_dir)
|
||||
|
||||
@Command('update-webgl',
|
||||
|
@ -934,7 +934,7 @@ testing/web-platform/mozilla/tests for Servo-only tests""" % reference_path)
|
|||
shutil.rmtree(dest_folder)
|
||||
|
||||
run_globals = {"__file__": run_file}
|
||||
execfile(run_file, run_globals)
|
||||
exec(compile(open(run_file).read(), run_file, 'exec'), run_globals)
|
||||
return run_globals["update_conformance"](version, dest_folder, None, patches_dir)
|
||||
|
||||
@Command('smoketest',
|
||||
|
|
|
@ -9,14 +9,12 @@
|
|||
|
||||
from __future__ import print_function
|
||||
|
||||
import contextlib
|
||||
import fnmatch
|
||||
import imp
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from io import StringIO
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
@ -24,7 +22,7 @@ import colorama
|
|||
import toml
|
||||
import voluptuous
|
||||
import yaml
|
||||
from licenseck import OLD_MPL, MPL, APACHE, COPYRIGHT, licenses_toml, licenses_dep_toml
|
||||
from .licenseck import OLD_MPL, MPL, APACHE, COPYRIGHT, licenses_toml, licenses_dep_toml
|
||||
from six import iteritems
|
||||
topdir = os.path.abspath(os.path.dirname(sys.argv[0]))
|
||||
wpt = os.path.join(topdir, "tests", "wpt")
|
||||
|
@ -327,28 +325,21 @@ def check_by_line(file_name, lines):
|
|||
|
||||
|
||||
def check_flake8(file_name, contents):
|
||||
from flake8.main import check_code
|
||||
|
||||
if not file_name.endswith(".py"):
|
||||
raise StopIteration
|
||||
|
||||
@contextlib.contextmanager
|
||||
def stdout_redirect(where):
|
||||
sys.stdout = where
|
||||
try:
|
||||
yield where
|
||||
finally:
|
||||
sys.stdout = sys.__stdout__
|
||||
|
||||
ignore = {
|
||||
"W291", # trailing whitespace; the standard tidy process will enforce no trailing whitespace
|
||||
"E501", # 80 character line length; the standard tidy process will enforce line length
|
||||
}
|
||||
|
||||
output = StringIO()
|
||||
with stdout_redirect(output):
|
||||
check_code(contents, ignore=ignore)
|
||||
for error in output.getvalue().splitlines():
|
||||
output = ""
|
||||
try:
|
||||
args = ["flake8", "--ignore=" + ",".join(ignore), file_name]
|
||||
subprocess.check_output(args)
|
||||
except subprocess.CalledProcessError as e:
|
||||
output = e.output
|
||||
for error in output.splitlines():
|
||||
_, line_num, _, message = error.split(":", 3)
|
||||
yield line_num, message.strip()
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ import os
|
|||
import sys
|
||||
import subprocess
|
||||
import platform
|
||||
from six import itervalues, iteritems
|
||||
|
||||
DEFAULT_MOVE_UP_CODE = u"\x1b[A"
|
||||
DEFAULT_CLEAR_EOL_CODE = u"\x1b[K"
|
||||
|
@ -105,7 +106,7 @@ class GroupingFormatter(base.BaseFormatter):
|
|||
return new_display + "No tests running.\n"
|
||||
|
||||
def suite_start(self, data):
|
||||
self.number_of_tests = sum(len(tests) for tests in data["tests"].itervalues())
|
||||
self.number_of_tests = sum(len(tests) for tests in itervalues(data["tests"]))
|
||||
self.start_time = data["time"]
|
||||
|
||||
if self.number_of_tests == 0:
|
||||
|
@ -182,7 +183,7 @@ class GroupingFormatter(base.BaseFormatter):
|
|||
else:
|
||||
failures_by_stack[failure['stack']].append(failure)
|
||||
|
||||
for (stack, failures) in failures_by_stack.iteritems():
|
||||
for (stack, failures) in iteritems(failures_by_stack):
|
||||
output += make_subtests_failure(test_name, failures, stack)
|
||||
return output
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ import os
|
|||
import sys
|
||||
import tempfile
|
||||
from collections import defaultdict
|
||||
from six import iterkeys, iteritems
|
||||
|
||||
from mozlog.structured import commandline
|
||||
from wptrunner.wptcommandline import get_test_paths, set_from_config
|
||||
|
@ -58,7 +59,7 @@ def update(logger, wpt_dir, check_clean=True, rebuild=False):
|
|||
|
||||
|
||||
def _update(logger, test_paths, rebuild):
|
||||
for url_base, paths in test_paths.iteritems():
|
||||
for url_base, paths in iteritems(test_paths):
|
||||
manifest_path = os.path.join(paths["metadata_path"], "MANIFEST.json")
|
||||
cache_subdir = os.path.relpath(os.path.dirname(manifest_path),
|
||||
os.path.dirname(__file__))
|
||||
|
@ -75,7 +76,7 @@ def _update(logger, test_paths, rebuild):
|
|||
def _check_clean(logger, test_paths):
|
||||
manifests_by_path = {}
|
||||
rv = 0
|
||||
for url_base, paths in test_paths.iteritems():
|
||||
for url_base, paths in iteritems(test_paths):
|
||||
tests_path = paths["tests_path"]
|
||||
manifest_path = os.path.join(paths["metadata_path"], "MANIFEST.json")
|
||||
|
||||
|
@ -102,7 +103,7 @@ def _check_clean(logger, test_paths):
|
|||
|
||||
manifests_by_path[manifest_path] = (old_manifest, new_manifest)
|
||||
|
||||
for manifest_path, (old_manifest, new_manifest) in manifests_by_path.iteritems():
|
||||
for manifest_path, (old_manifest, new_manifest) in iteritems(manifests_by_path):
|
||||
if not diff_manifests(logger, manifest_path, old_manifest, new_manifest):
|
||||
logger.error("Manifest %s is outdated, use |./mach update-manifest| to fix." % manifest_path)
|
||||
rv = 1
|
||||
|
@ -136,8 +137,8 @@ def diff_manifests(logger, manifest_path, old_manifest, new_manifest):
|
|||
test_id = tuple(test_id)
|
||||
items[path].add((test_type, test_id))
|
||||
|
||||
old_paths = set(old_items.iterkeys())
|
||||
new_paths = set(new_items.iterkeys())
|
||||
old_paths = set(iterkeys(old_items))
|
||||
new_paths = set(iterkeys(new_items))
|
||||
|
||||
added_paths = new_paths - old_paths
|
||||
deleted_paths = old_paths - new_paths
|
||||
|
@ -166,9 +167,9 @@ def diff_manifests(logger, manifest_path, old_manifest, new_manifest):
|
|||
# Manifest currently has some list vs tuple inconsistencies that break
|
||||
# a simple equality comparison.
|
||||
new_paths = {(key, value[0], value[1])
|
||||
for (key, value) in new_manifest.to_json()["paths"].iteritems()}
|
||||
for (key, value) in iteritems(new_manifest.to_json()["paths"])}
|
||||
old_paths = {(key, value[0], value[1])
|
||||
for (key, value) in old_manifest.to_json()["paths"].iteritems()}
|
||||
for (key, value) in iteritems(old_manifest.to_json()["paths"])}
|
||||
if old_paths != new_paths:
|
||||
logger.warning("Manifest %s contains correct tests but file hashes changed." % manifest_path) # noqa
|
||||
clean = False
|
||||
|
|
|
@ -8,7 +8,7 @@ import gzip
|
|||
import json
|
||||
import os
|
||||
import requests
|
||||
import urlparse
|
||||
import six.moves.urllib as urllib
|
||||
|
||||
treeherder_base = "https://treeherder.mozilla.org/"
|
||||
|
||||
|
@ -53,7 +53,7 @@ def download(url, prefix, dest, force_suffix=True):
|
|||
|
||||
def get_blobber_url(branch, job):
|
||||
job_id = job["id"]
|
||||
resp = requests.get(urlparse.urljoin(treeherder_base,
|
||||
resp = requests.get(urllib.parse.urljoin(treeherder_base,
|
||||
"/api/project/%s/artifact/?job_id=%i&name=Job%%20Info" % (branch,
|
||||
job_id)))
|
||||
job_data = resp.json()
|
||||
|
@ -71,13 +71,13 @@ def get_blobber_url(branch, job):
|
|||
|
||||
|
||||
def get_structured_logs(branch, commit, dest=None):
|
||||
resp = requests.get(urlparse.urljoin(treeherder_base, "/api/project/%s/resultset/?revision=%s" % (branch, commit)))
|
||||
resp = requests.get(urllib.parse.urljoin(treeherder_base, "/api/project/%s/resultset/?revision=%s" % (branch, commit)))
|
||||
|
||||
revision_data = resp.json()
|
||||
|
||||
result_set = revision_data["results"][0]["id"]
|
||||
|
||||
resp = requests.get(urlparse.urljoin(treeherder_base, "/api/project/%s/jobs/?result_set_id=%s&count=2000&exclusion_profile=false" % (branch, result_set)))
|
||||
resp = requests.get(urllib.parse.urljoin(treeherder_base, "/api/project/%s/jobs/?result_set_id=%s&count=2000&exclusion_profile=false" % (branch, result_set)))
|
||||
|
||||
job_data = resp.json()
|
||||
|
||||
|
|
|
@ -2,8 +2,10 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import json
|
||||
from urlparse import urljoin
|
||||
from six.moves.urllib.parse import urljoin
|
||||
requests = None
|
||||
|
||||
class GitHubError(Exception):
|
||||
|
@ -46,7 +48,7 @@ class GitHub(object):
|
|||
if 200 <= resp.status_code < 300:
|
||||
return resp.json()
|
||||
else:
|
||||
print resp.status_code, resp.json()
|
||||
print(resp.status_code, resp.json())
|
||||
raise GitHubError(resp.status_code, resp.json())
|
||||
|
||||
def repo(self, owner, name):
|
||||
|
|
|
@ -1,15 +1,18 @@
|
|||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import urlparse
|
||||
import six.moves.urllib as urllib
|
||||
from six.moves import input
|
||||
from six import iteritems
|
||||
|
||||
from wptrunner.update.sync import UpdateCheckout
|
||||
from wptrunner.update.tree import get_unique_name
|
||||
from wptrunner.update.base import Step, StepRunner, exit_clean, exit_unclean
|
||||
|
||||
from .tree import Commit, GitTree, Patch
|
||||
import github
|
||||
from .github import GitHub
|
||||
|
||||
|
||||
|
@ -155,9 +158,9 @@ class SelectCommits(Step):
|
|||
while True:
|
||||
commits = state.source_commits[:]
|
||||
for i, commit in enumerate(commits):
|
||||
print "%i:\t%s" % (i, commit.message.summary)
|
||||
print("%i:\t%s" % (i, commit.message.summary))
|
||||
|
||||
remove = raw_input("Provide a space-separated list of any commits numbers to remove from the list to upstream:\n").strip()
|
||||
remove = input("Provide a space-separated list of any commits numbers to remove from the list to upstream:\n").strip()
|
||||
remove_idx = set()
|
||||
invalid = False
|
||||
for item in remove.split(" "):
|
||||
|
@ -178,10 +181,10 @@ class SelectCommits(Step):
|
|||
|
||||
keep_commits = [(i,cmt) for i,cmt in enumerate(commits) if i not in remove_idx]
|
||||
#TODO: consider printed removed commits
|
||||
print "Selected the following commits to keep:"
|
||||
print("Selected the following commits to keep:")
|
||||
for i, commit in keep_commits:
|
||||
print "%i:\t%s" % (i, commit.message.summary)
|
||||
confirm = raw_input("Keep the above commits? y/n\n").strip().lower()
|
||||
print("%i:\t%s" % (i, commit.message.summary))
|
||||
confirm = input("Keep the above commits? y/n\n").strip().lower()
|
||||
|
||||
if confirm == "y":
|
||||
state.source_commits = [item[1] for item in keep_commits]
|
||||
|
@ -210,7 +213,7 @@ class MovePatches(Step):
|
|||
try:
|
||||
state.sync_tree.import_patch(stripped_patch, 1 + strip_count)
|
||||
except:
|
||||
print patch.diff
|
||||
print(patch.diff)
|
||||
raise
|
||||
state.commits_loaded = i
|
||||
|
||||
|
@ -262,7 +265,7 @@ class MergeUpstream(Step):
|
|||
if "merge_index" not in state:
|
||||
state.merge_index = 0
|
||||
|
||||
org, name = urlparse.urlsplit(state.sync["remote_url"]).path[1:].split("/")
|
||||
org, name = urllib.parse.urlsplit(state.sync["remote_url"]).path[1:].split("/")
|
||||
if name.endswith(".git"):
|
||||
name = name[:-4]
|
||||
state.gh_repo = gh.repo(org, name)
|
||||
|
@ -285,7 +288,7 @@ class UpdateLastSyncData(Step):
|
|||
data = {"local": state.local_tree.rev,
|
||||
"upstream": state.sync_tree.rev}
|
||||
with open(state.sync_data_path, "w") as f:
|
||||
for key, value in data.iteritems():
|
||||
for key, value in iteritems(data):
|
||||
f.write("%s: %s\n" % (key, value))
|
||||
# This gets added to the patch later on
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue