Port some code to Python3

This commit is contained in:
Vincent Ricard 2020-12-28 22:31:49 +01:00 committed by Josh Matthews
parent f73370088b
commit a627dde0d0
24 changed files with 1439 additions and 2341 deletions

View file

@ -65,11 +65,11 @@ impl<'a> phf_shared::PhfHash for Bytes<'a> {
} }
fn find_python() -> String { fn find_python() -> String {
env::var("PYTHON2").ok().unwrap_or_else(|| { env::var("PYTHON3").ok().unwrap_or_else(|| {
let candidates = if cfg!(windows) { let candidates = if cfg!(windows) {
["python2.7.exe", "python27.exe", "python.exe"] ["python3.8.exe", "python38.exe", "python.exe"]
} else { } else {
["python2.7", "python2", "python"] ["python3.8", "python3", "python"]
}; };
for &name in &candidates { for &name in &candidates {
if Command::new(name) if Command::new(name)
@ -82,7 +82,7 @@ fn find_python() -> String {
} }
} }
panic!( panic!(
"Can't find python (tried {})! Try fixing PATH or setting the PYTHON2 env var", "Can't find python (tried {})! Try fixing PATH or setting the PYTHON3 env var",
candidates.join(", ") candidates.join(", ")
) )
}) })

View file

@ -344,7 +344,7 @@ class CGMethodCall(CGThing):
distinguishingArg = "HandleValue::from_raw(args.get(%d))" % distinguishingIndex distinguishingArg = "HandleValue::from_raw(args.get(%d))" % distinguishingIndex
def pickFirstSignature(condition, filterLambda): def pickFirstSignature(condition, filterLambda):
sigs = filter(filterLambda, possibleSignatures) sigs = list(filter(filterLambda, possibleSignatures))
assert len(sigs) < 2 assert len(sigs) < 2
if len(sigs) > 0: if len(sigs) > 0:
call = getPerSignatureCall(sigs[0], distinguishingIndex) call = getPerSignatureCall(sigs[0], distinguishingIndex)
@ -2117,7 +2117,7 @@ class CGImports(CGWrapper):
members += [constructor] members += [constructor]
if d.proxy: if d.proxy:
members += [o for o in d.operations.values() if o] members += [o for o in list(d.operations.values()) if o]
for m in members: for m in members:
if m.isMethod(): if m.isMethod():
@ -2557,7 +2557,7 @@ def UnionTypes(descriptors, dictionaries, callbacks, typedefs, config):
]) ])
# Sort unionStructs by key, retrieve value # Sort unionStructs by key, retrieve value
unionStructs = (i[1] for i in sorted(unionStructs.items(), key=operator.itemgetter(0))) unionStructs = (i[1] for i in sorted(list(unionStructs.items()), key=operator.itemgetter(0)))
return CGImports(CGList(unionStructs, "\n\n"), return CGImports(CGList(unionStructs, "\n\n"),
descriptors=[], descriptors=[],
@ -4455,9 +4455,10 @@ class CGEnum(CGThing):
pub enum %s { pub enum %s {
%s %s
} }
""" % (ident, ",\n ".join(map(getEnumValueName, enum.values()))) """ % (ident, ",\n ".join(map(getEnumValueName, list(enum.values()))))
pairs = ",\n ".join(['("%s", super::%s::%s)' % (val, ident, getEnumValueName(val)) for val in enum.values()]) pairs = ",\n ".join(['("%s", super::%s::%s)' % (val, ident, getEnumValueName(val))
for val in list(enum.values())])
inner = string.Template("""\ inner = string.Template("""\
use crate::dom::bindings::conversions::ConversionResult; use crate::dom::bindings::conversions::ConversionResult;
@ -4640,9 +4641,8 @@ class CGUnionStruct(CGThing):
return "Rc" return "Rc"
return "" return ""
templateVars = map(lambda t: (getUnionTypeTemplateVars(t, self.descriptorProvider), templateVars = [(getUnionTypeTemplateVars(t, self.descriptorProvider),
getTypeWrapper(t)), getTypeWrapper(t)) for t in self.type.flatMemberTypes]
self.type.flatMemberTypes)
enumValues = [ enumValues = [
" %s(%s)," % (v["name"], "%s<%s>" % (wrapper, v["typeName"]) if wrapper else v["typeName"]) " %s(%s)," % (v["name"], "%s<%s>" % (wrapper, v["typeName"]) if wrapper else v["typeName"])
for (v, wrapper) in templateVars for (v, wrapper) in templateVars
@ -4701,7 +4701,7 @@ class CGUnionConversionStruct(CGThing):
" Ok(None) => (),\n" " Ok(None) => (),\n"
"}\n") % (self.type, name, self.type, name) "}\n") % (self.type, name, self.type, name)
interfaceMemberTypes = filter(lambda t: t.isNonCallbackInterface(), memberTypes) interfaceMemberTypes = [t for t in memberTypes if t.isNonCallbackInterface()]
if len(interfaceMemberTypes) > 0: if len(interfaceMemberTypes) > 0:
typeNames = [get_name(memberType) for memberType in interfaceMemberTypes] typeNames = [get_name(memberType) for memberType in interfaceMemberTypes]
interfaceObject = CGList(CGGeneric(get_match(typeName)) for typeName in typeNames) interfaceObject = CGList(CGGeneric(get_match(typeName)) for typeName in typeNames)
@ -4709,7 +4709,7 @@ class CGUnionConversionStruct(CGThing):
else: else:
interfaceObject = None interfaceObject = None
arrayObjectMemberTypes = filter(lambda t: t.isSequence(), memberTypes) arrayObjectMemberTypes = [t for t in memberTypes if t.isSequence()]
if len(arrayObjectMemberTypes) > 0: if len(arrayObjectMemberTypes) > 0:
assert len(arrayObjectMemberTypes) == 1 assert len(arrayObjectMemberTypes) == 1
typeName = arrayObjectMemberTypes[0].name typeName = arrayObjectMemberTypes[0].name
@ -4718,7 +4718,7 @@ class CGUnionConversionStruct(CGThing):
else: else:
arrayObject = None arrayObject = None
callbackMemberTypes = filter(lambda t: t.isCallback() or t.isCallbackInterface(), memberTypes) callbackMemberTypes = [t for t in memberTypes if t.isCallback() or t.isCallbackInterface()]
if len(callbackMemberTypes) > 0: if len(callbackMemberTypes) > 0:
assert len(callbackMemberTypes) == 1 assert len(callbackMemberTypes) == 1
typeName = callbackMemberTypes[0].name typeName = callbackMemberTypes[0].name
@ -4726,7 +4726,7 @@ class CGUnionConversionStruct(CGThing):
else: else:
callbackObject = None callbackObject = None
dictionaryMemberTypes = filter(lambda t: t.isDictionary(), memberTypes) dictionaryMemberTypes = [t for t in memberTypes if t.isDictionary()]
if len(dictionaryMemberTypes) > 0: if len(dictionaryMemberTypes) > 0:
assert len(dictionaryMemberTypes) == 1 assert len(dictionaryMemberTypes) == 1
typeName = dictionaryMemberTypes[0].name typeName = dictionaryMemberTypes[0].name
@ -4735,7 +4735,7 @@ class CGUnionConversionStruct(CGThing):
else: else:
dictionaryObject = None dictionaryObject = None
objectMemberTypes = filter(lambda t: t.isObject(), memberTypes) objectMemberTypes = [t for t in memberTypes if t.isObject()]
if len(objectMemberTypes) > 0: if len(objectMemberTypes) > 0:
assert len(objectMemberTypes) == 1 assert len(objectMemberTypes) == 1
typeName = objectMemberTypes[0].name typeName = objectMemberTypes[0].name
@ -4744,7 +4744,7 @@ class CGUnionConversionStruct(CGThing):
else: else:
object = None object = None
mozMapMemberTypes = filter(lambda t: t.isRecord(), memberTypes) mozMapMemberTypes = [t for t in memberTypes if t.isRecord()]
if len(mozMapMemberTypes) > 0: if len(mozMapMemberTypes) > 0:
assert len(mozMapMemberTypes) == 1 assert len(mozMapMemberTypes) == 1
typeName = mozMapMemberTypes[0].name typeName = mozMapMemberTypes[0].name
@ -4790,9 +4790,9 @@ class CGUnionConversionStruct(CGThing):
typename = get_name(memberType) typename = get_name(memberType)
return CGGeneric(get_match(typename)) return CGGeneric(get_match(typename))
other = [] other = []
stringConversion = map(getStringOrPrimitiveConversion, stringTypes) stringConversion = list(map(getStringOrPrimitiveConversion, stringTypes))
numericConversion = map(getStringOrPrimitiveConversion, numericTypes) numericConversion = list(map(getStringOrPrimitiveConversion, numericTypes))
booleanConversion = map(getStringOrPrimitiveConversion, booleanTypes) booleanConversion = list(map(getStringOrPrimitiveConversion, booleanTypes))
if stringConversion: if stringConversion:
if booleanConversion: if booleanConversion:
other.append(CGIfWrapper("value.get().is_boolean()", booleanConversion[0])) other.append(CGIfWrapper("value.get().is_boolean()", booleanConversion[0]))
@ -5958,7 +5958,7 @@ class CGInterfaceTrait(CGThing):
rettype) rettype)
if descriptor.proxy: if descriptor.proxy:
for name, operation in descriptor.operations.iteritems(): for name, operation in descriptor.operations.items():
if not operation or operation.isStringifier(): if not operation or operation.isStringifier():
continue continue
@ -6488,7 +6488,7 @@ class CGDescriptor(CGThing):
post='\n') post='\n')
if reexports: if reexports:
reexports = ', '.join(map(lambda name: reexportedName(name), reexports)) reexports = ', '.join([reexportedName(name) for name in reexports])
cgThings = CGList([CGGeneric('pub use self::%s::{%s};' % (toBindingNamespace(descriptor.name), reexports)), cgThings = CGList([CGGeneric('pub use self::%s::{%s};' % (toBindingNamespace(descriptor.name), reexports)),
cgThings], '\n') cgThings], '\n')
@ -7824,7 +7824,7 @@ impl Clone for TopTypeId {
# TypeId enum. # TypeId enum.
return "%s(%sTypeId)" % (name, name) if name in hierarchy else name return "%s(%sTypeId)" % (name, name) if name in hierarchy else name
for base, derived in hierarchy.iteritems(): for base, derived in hierarchy.items():
variants = [] variants = []
if config.getDescriptor(base).concrete: if config.getDescriptor(base).concrete:
variants.append(CGGeneric(base)) variants.append(CGGeneric(base))

View file

@ -73,7 +73,7 @@ class Configuration:
def getDescriptors(self, **filters): def getDescriptors(self, **filters):
"""Gets the descriptors that match the given filters.""" """Gets the descriptors that match the given filters."""
curr = self.descriptors curr = self.descriptors
for key, val in filters.iteritems(): for key, val in filters.items():
if key == 'webIDLFile': if key == 'webIDLFile':
def getter(x): def getter(x):
return x.interface.filename() return x.interface.filename()
@ -104,14 +104,14 @@ class Configuration:
else: else:
def getter(x): def getter(x):
return getattr(x, key) return getattr(x, key)
curr = filter(lambda x: getter(x) == val, curr) curr = [x for x in curr if getter(x) == val]
return curr return curr
def getEnums(self, webIDLFile): def getEnums(self, webIDLFile):
return filter(lambda e: e.filename() == webIDLFile, self.enums) return [e for e in self.enums if e.filename() == webIDLFile]
def getTypedefs(self, webIDLFile): def getTypedefs(self, webIDLFile):
return filter(lambda e: e.filename() == webIDLFile, self.typedefs) return [e for e in self.typedefs if e.filename() == webIDLFile]
@staticmethod @staticmethod
def _filterForFile(items, webIDLFile=""): def _filterForFile(items, webIDLFile=""):
@ -119,7 +119,7 @@ class Configuration:
if not webIDLFile: if not webIDLFile:
return items return items
return filter(lambda x: x.filename() == webIDLFile, items) return [x for x in items if x.filename() == webIDLFile]
def getDictionaries(self, webIDLFile=""): def getDictionaries(self, webIDLFile=""):
return self._filterForFile(self.dictionaries, webIDLFile=webIDLFile) return self._filterForFile(self.dictionaries, webIDLFile=webIDLFile)
@ -327,7 +327,7 @@ class Descriptor(DescriptorProvider):
if config == '*': if config == '*':
iface = self.interface iface = self.interface
while iface: while iface:
add('all', map(lambda m: m.name, iface.members), attribute) add('all', [m.name for m in iface.members], attribute)
iface = iface.parent iface = iface.parent
else: else:
add('all', [config], attribute) add('all', [config], attribute)

View file

@ -4,7 +4,7 @@
""" A WebIDL parser. """ """ A WebIDL parser. """
from __future__ import print_function
from ply import lex, yacc from ply import lex, yacc
import re import re
import os import os
@ -57,7 +57,7 @@ def enum(*names, **kw):
if "base" not in kw: if "base" not in kw:
return Foo(names) return Foo(names)
return Foo(chain(kw["base"].attrs.keys(), names)) return Foo(chain(list(kw["base"].attrs.keys()), names))
class WebIDLError(Exception): class WebIDLError(Exception):
@ -124,6 +124,9 @@ class BuiltinLocation(object):
return (isinstance(other, BuiltinLocation) and return (isinstance(other, BuiltinLocation) and
self.msg == other.msg) self.msg == other.msg)
def __hash__(self):
return hash(self.msg)
def filename(self): def filename(self):
return '<builtin>' return '<builtin>'
@ -2360,6 +2363,9 @@ class IDLNullableType(IDLParametrizedType):
def __eq__(self, other): def __eq__(self, other):
return isinstance(other, IDLNullableType) and self.inner == other.inner return isinstance(other, IDLNullableType) and self.inner == other.inner
def __hash__(self):
return hash(self.inner)
def __str__(self): def __str__(self):
return self.inner.__str__() + "OrNull" return self.inner.__str__() + "OrNull"
@ -2522,6 +2528,9 @@ class IDLSequenceType(IDLParametrizedType):
def __eq__(self, other): def __eq__(self, other):
return isinstance(other, IDLSequenceType) and self.inner == other.inner return isinstance(other, IDLSequenceType) and self.inner == other.inner
def __hash__(self):
return hash(self.inner)
def __str__(self): def __str__(self):
return self.inner.__str__() + "Sequence" return self.inner.__str__() + "Sequence"
@ -2933,6 +2942,9 @@ class IDLWrapperType(IDLType):
self._identifier == other._identifier and self._identifier == other._identifier and
self.builtin == other.builtin) self.builtin == other.builtin)
def __hash__(self):
return hash((self._identifier, self.builtin))
def __str__(self): def __str__(self):
return str(self.name) + " (Wrapper)" return str(self.name) + " (Wrapper)"
@ -3301,6 +3313,12 @@ class IDLBuiltinType(IDLType):
return "MaybeShared" + str(self.name) return "MaybeShared" + str(self.name)
return str(self.name) return str(self.name)
def __eq__(self, other):
return other and self.location == other.location and self.name == other.name and self._typeTag == other._typeTag
def __hash__(self):
return hash((self.location, self.name, self._typeTag))
def prettyName(self): def prettyName(self):
return IDLBuiltinType.PrettyNames[self._typeTag] return IDLBuiltinType.PrettyNames[self._typeTag]
@ -3628,7 +3646,7 @@ integerTypeSizes = {
def matchIntegerValueToType(value): def matchIntegerValueToType(value):
for type, extremes in integerTypeSizes.items(): for type, extremes in list(integerTypeSizes.items()):
(min, max) = extremes (min, max) = extremes
if value <= max and value >= min: if value <= max and value >= min:
return BuiltinTypes[type] return BuiltinTypes[type]
@ -3707,7 +3725,7 @@ class IDLValue(IDLObject):
elif self.type.isString() and type.isEnum(): elif self.type.isString() and type.isEnum():
# Just keep our string, but make sure it's a valid value for this enum # Just keep our string, but make sure it's a valid value for this enum
enum = type.unroll().inner enum = type.unroll().inner
if self.value not in enum.values(): if self.value not in list(enum.values()):
raise WebIDLError("'%s' is not a valid default value for enum %s" raise WebIDLError("'%s' is not a valid default value for enum %s"
% (self.value, enum.identifier.name), % (self.value, enum.identifier.name),
[location, enum.location]) [location, enum.location])
@ -4789,7 +4807,7 @@ class IDLAttribute(IDLInterfaceMember):
"CrossOriginWritable", "CrossOriginWritable",
"SetterThrows", "SetterThrows",
] ]
for (key, value) in self._extendedAttrDict.items(): for (key, value) in list(self._extendedAttrDict.items()):
if key in allowedExtAttrs: if key in allowedExtAttrs:
if value is not True: if value is not True:
raise WebIDLError("[%s] with a value is currently " raise WebIDLError("[%s] with a value is currently "
@ -5479,7 +5497,7 @@ class IDLMethod(IDLInterfaceMember, IDLScope):
[attr.location]) [attr.location])
if identifier == "CrossOriginCallable" and self.isStatic(): if identifier == "CrossOriginCallable" and self.isStatic():
raise WebIDLError("[CrossOriginCallable] is only allowed on non-static " raise WebIDLError("[CrossOriginCallable] is only allowed on non-static "
"attributes" "attributes",
[attr.location, self.location]) [attr.location, self.location])
elif identifier == "Pure": elif identifier == "Pure":
if not attr.noArguments(): if not attr.noArguments():
@ -5721,6 +5739,7 @@ class Tokenizer(object):
"FLOATLITERAL", "FLOATLITERAL",
"IDENTIFIER", "IDENTIFIER",
"STRING", "STRING",
"COMMENTS",
"WHITESPACE", "WHITESPACE",
"OTHER" "OTHER"
] ]
@ -5753,8 +5772,12 @@ class Tokenizer(object):
t.value = t.value[1:-1] t.value = t.value[1:-1]
return t return t
def t_COMMENTS(self, t):
r'(\/\*(.|\n)*?\*\/)|(\/\/.*)'
pass
def t_WHITESPACE(self, t): def t_WHITESPACE(self, t):
r'[\t\n\r ]+|[\t\n\r ]*((//[^\n]*|/\*.*?\*/)[\t\n\r ]*)+' r'[\t\n\r ]+'
pass pass
def t_ELLIPSIS(self, t): def t_ELLIPSIS(self, t):
@ -5840,7 +5863,7 @@ class Tokenizer(object):
"async": "ASYNC", "async": "ASYNC",
} }
tokens.extend(keywords.values()) tokens.extend(list(keywords.values()))
def t_error(self, t): def t_error(self, t):
raise WebIDLError("Unrecognized Input", raise WebIDLError("Unrecognized Input",
@ -5849,23 +5872,21 @@ class Tokenizer(object):
lexpos=self.lexer.lexpos, lexpos=self.lexer.lexpos,
filename=self.filename)]) filename=self.filename)])
def __init__(self, outputdir, lexer=None): def __init__(self, lexer=None):
if lexer: if lexer:
self.lexer = lexer self.lexer = lexer
else: else:
self.lexer = lex.lex(object=self, self.lexer = lex.lex(object=self)
outputdir=outputdir,
lextab='webidllex',
reflags=re.DOTALL)
class SqueakyCleanLogger(object): class SqueakyCleanLogger(object):
errorWhitelist = [ errorWhitelist = [
# Web IDL defines the WHITESPACE token, but doesn't actually # Web IDL defines the WHITESPACE and COMMENTS token, but doesn't actually
# use it ... so far. # use it ... so far.
"Token 'WHITESPACE' defined, but not used", "Token 'WHITESPACE' defined, but not used",
# And that means we have an unused token "Token 'COMMENTS' defined, but not used",
"There is 1 unused token", # And that means we have unused tokens
"There are 2 unused tokens",
# Web IDL defines a OtherOrComma rule that's only used in # Web IDL defines a OtherOrComma rule that's only used in
# ExtendedAttributeInner, which we don't use yet. # ExtendedAttributeInner, which we don't use yet.
"Rule 'OtherOrComma' defined, but not used", "Rule 'OtherOrComma' defined, but not used",
@ -7506,22 +7527,11 @@ class Parser(Tokenizer):
raise WebIDLError("invalid syntax", [Location(self.lexer, p.lineno, p.lexpos, self._filename)]) raise WebIDLError("invalid syntax", [Location(self.lexer, p.lineno, p.lexpos, self._filename)])
def __init__(self, outputdir='', lexer=None): def __init__(self, outputdir='', lexer=None):
Tokenizer.__init__(self, outputdir, lexer) Tokenizer.__init__(self, lexer)
logger = SqueakyCleanLogger() logger = SqueakyCleanLogger()
try: try:
self.parser = yacc.yacc(module=self, self.parser = yacc.yacc(module=self, errorlog=logger, debug=False)
outputdir=outputdir,
tabmodule='webidlyacc',
errorlog=logger,
debug=False
# Pickling the grammar is a speedup in
# some cases (older Python?) but a
# significant slowdown in others.
# We're not pickling for now, until it
# becomes a speedup again.
# , picklefile='WebIDLGrammar.pkl'
)
finally: finally:
logger.reportGrammarErrors() logger.reportGrammarErrors()
@ -7553,12 +7563,12 @@ class Parser(Tokenizer):
return type return type
def parse(self, t, filename=None): def parse(self, t, filename=None):
self.lexer.input(t) self._filename = filename
self.lexer.input(t.decode(encoding = 'utf-8'))
# for tok in iter(self.lexer.token, None): # for tok in iter(self.lexer.token, None):
# print tok # print tok
self._filename = filename
self._productions.extend(self.parser.parse(lexer=self.lexer, tracking=True)) self._productions.extend(self.parser.parse(lexer=self.lexer, tracking=True))
self._filename = None self._filename = None

View file

@ -3,7 +3,7 @@ http://www.dabeaz.com/ply/
Licensed under BSD. Licensed under BSD.
This directory contains just the code and license from PLY version 3.3; This directory contains just the code and license from PLY version 4.0;
the full distribution (see the URL) also contains examples, tests, the full distribution (see the URL) also contains examples, tests,
documentation, and a longer README. documentation, and a longer README.

View file

@ -1,4 +1,6 @@
# PLY package # PLY package
# Author: David Beazley (dave@dabeaz.com) # Author: David Beazley (dave@dabeaz.com)
# https://dabeaz.com/ply/index.html
__version__ = '4.0'
__all__ = ['lex','yacc'] __all__ = ['lex','yacc']

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -52,7 +52,7 @@ def main():
module = CGBindingRoot(config, prefix, filename).define() module = CGBindingRoot(config, prefix, filename).define()
if module: if module:
with open(os.path.join(out_dir, prefix + ".rs"), "wb") as f: with open(os.path.join(out_dir, prefix + ".rs"), "wb") as f:
f.write(module) f.write(module.encode("utf-8"))
def make_dir(path): def make_dir(path):
@ -66,7 +66,7 @@ def generate(config, name, filename):
root = getattr(GlobalGenRoots, name)(config) root = getattr(GlobalGenRoots, name)(config)
code = root.define() code = root.define()
with open(filename, "wb") as f: with open(filename, "wb") as f:
f.write(code) f.write(code.encode("utf-8"))
def add_css_properties_attributes(css_properties_json, parser): def add_css_properties_attributes(css_properties_json, parser):

View file

@ -40,11 +40,11 @@ fn error(message: &str) {
} }
fn find_python() -> String { fn find_python() -> String {
env::var("PYTHON2").ok().unwrap_or_else(|| { env::var("PYTHON3").ok().unwrap_or_else(|| {
let candidates = if cfg!(windows) { let candidates = if cfg!(windows) {
["python2.7.exe", "python27.exe", "python.exe"] ["python3.8.exe", "python38.exe", "python.exe"]
} else { } else {
["python2.7", "python2", "python"] ["python3.8", "python3", "python"]
}; };
for &name in &candidates { for &name in &candidates {
if Command::new(name) if Command::new(name)
@ -57,7 +57,7 @@ fn find_python() -> String {
} }
} }
panic!( panic!(
"Can't find python (tried {})! Try fixing PATH or setting the PYTHON2 env var", "Can't find python (tried {})! Try fixing PATH or setting the PYTHON3 env var",
candidates.join(", ") candidates.join(", ")
) )
}) })

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python3
# Copyright 2018 The Servo Project Developers. See the COPYRIGHT # Copyright 2018 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution. # file at the top-level directory of this distribution.

View file

@ -57,7 +57,7 @@ def tasks(task_for):
"try-mac": [macos_unit], "try-mac": [macos_unit],
"try-linux": [linux_tidy_unit, linux_docs_check, linux_release], "try-linux": [linux_tidy_unit, linux_docs_check, linux_release],
"try-windows": [windows_unit, windows_arm64, windows_uwp_x64], "try-windows": [windows_arm64, windows_uwp_x64],
"try-arm": [windows_arm64], "try-arm": [windows_arm64],
"try-wpt": [linux_wpt], "try-wpt": [linux_wpt],
"try-wpt-2020": [linux_wpt_layout_2020], "try-wpt-2020": [linux_wpt_layout_2020],
@ -130,6 +130,8 @@ windows_build_env = {
}, },
"all": { "all": {
"PYTHON3": "%HOMEDRIVE%%HOMEPATH%\\python3\\python.exe", "PYTHON3": "%HOMEDRIVE%%HOMEPATH%\\python3\\python.exe",
"PYTHONPATH": "%HOMEDRIVE%%HOMEPATH%\\python3",
"PYTHONHOME": "%HOMEDRIVE%%HOMEPATH%\\python3",
"LINKER": "lld-link.exe", "LINKER": "lld-link.exe",
"MOZTOOLS_PATH_PREPEND": "%HOMEDRIVE%%HOMEPATH%\\git\\cmd", "MOZTOOLS_PATH_PREPEND": "%HOMEDRIVE%%HOMEPATH%\\git\\cmd",
}, },
@ -155,11 +157,11 @@ def linux_tidy_unit_untrusted():
.with_env(**build_env, **unix_build_env, **linux_build_env) .with_env(**build_env, **unix_build_env, **linux_build_env)
.with_repo_bundle() .with_repo_bundle()
.with_script(""" .with_script("""
./mach test-tidy --no-progress --all python3 ./mach test-tidy --no-progress --all
./mach test-tidy --no-progress --self-test python3 ./mach test-tidy --no-progress --self-test
./mach bootstrap-gstreamer python3 ./mach bootstrap-gstreamer
./mach build --dev python3 ./mach build --dev
./mach test-unit python3 ./mach test-unit
./etc/ci/lockfile_changed.sh ./etc/ci/lockfile_changed.sh
./etc/memory_reports_over_time.py --test ./etc/memory_reports_over_time.py --test
@ -175,8 +177,7 @@ def linux_tidy_unit():
.with_treeherder("Linux x64", "Tidy+Unit") .with_treeherder("Linux x64", "Tidy+Unit")
.with_max_run_time_minutes(75) .with_max_run_time_minutes(75)
.with_script(""" .with_script("""
./mach test-tidy --no-progress --all python3 ./mach test-tidy --no-progress --all
python3 ./mach test-tidy --no-progress --all --no-wpt
python3 ./mach build --dev python3 ./mach build --dev
python3 ./mach test-unit python3 ./mach test-unit
python3 ./mach package --dev python3 ./mach package --dev
@ -201,7 +202,7 @@ def linux_docs_check():
linux_build_task("Docs + check") linux_build_task("Docs + check")
.with_treeherder("Linux x64", "Doc+Check") .with_treeherder("Linux x64", "Doc+Check")
.with_script(""" .with_script("""
RUSTDOCFLAGS="--disable-minification" ./mach doc RUSTDOCFLAGS="--disable-minification" python3 ./mach doc
( (
cd target/doc cd target/doc
git init git init
@ -219,7 +220,7 @@ def linux_docs_check():
# The reverse order would not increase the total amount of work to do, # The reverse order would not increase the total amount of work to do,
# but would reduce the amount of parallelism available. # but would reduce the amount of parallelism available.
""" """
./mach check python3 ./mach check
""") """)
.with_artifacts("/repo/target/doc/docs.bundle") .with_artifacts("/repo/target/doc/docs.bundle")
.find_or_create("docs." + CONFIG.tree_hash()) .find_or_create("docs." + CONFIG.tree_hash())
@ -243,7 +244,7 @@ def upload_docs():
open("/root/.git-credentials", "w").write("https://git:%s@github.com/" % token) open("/root/.git-credentials", "w").write("https://git:%s@github.com/" % token)
""") """)
.with_script(""" .with_script("""
python -c "$PY" python3 -c "$PY"
git init --bare git init --bare
git config credential.helper store git config credential.helper store
git fetch --quiet docs.bundle git fetch --quiet docs.bundle
@ -274,9 +275,9 @@ def macos_unit():
macos_build_task("Dev build + unit tests") macos_build_task("Dev build + unit tests")
.with_treeherder("macOS x64", "Unit") .with_treeherder("macOS x64", "Unit")
.with_script(""" .with_script("""
./mach build --dev --verbose python3 ./mach build --dev --verbose
./mach test-unit python3 ./mach test-unit
./mach package --dev python3 ./mach package --dev
./etc/ci/macos_package_smoketest.sh target/debug/servo-tech-demo.dmg ./etc/ci/macos_package_smoketest.sh target/debug/servo-tech-demo.dmg
./etc/ci/lockfile_changed.sh ./etc/ci/lockfile_changed.sh
""") """)
@ -296,8 +297,8 @@ def with_rust_nightly():
.with_treeherder("Linux x64", "RustNightly") .with_treeherder("Linux x64", "RustNightly")
.with_script(""" .with_script("""
echo "nightly" > rust-toolchain echo "nightly" > rust-toolchain
./mach build --dev python3 ./mach build --dev
./mach test-unit python3 ./mach test-unit
""") """)
.create() .create()
) )
@ -354,10 +355,10 @@ def uwp_nightly(rdp=False):
"secrets:get:project/servo/windows-codesign-cert/latest", "secrets:get:project/servo/windows-codesign-cert/latest",
) )
.with_script( .with_script(
"python mach build --release --target=x86_64-uwp-windows-msvc", "python3 mach build --release --target=x86_64-uwp-windows-msvc",
"python mach build --release --target=aarch64-uwp-windows-msvc", "python3 mach build --release --target=aarch64-uwp-windows-msvc",
"python mach package --release --target=x86_64-uwp-windows-msvc --uwp=x64 --uwp=arm64", "python3 mach package --release --target=x86_64-uwp-windows-msvc --uwp=x64 --uwp=arm64",
"python mach upload-nightly uwp --secret-from-taskcluster", "python3 mach upload-nightly uwp --secret-from-taskcluster",
) )
.with_artifacts(appx_artifact) .with_artifacts(appx_artifact)
.with_max_run_time_minutes(3 * 60) .with_max_run_time_minutes(3 * 60)
@ -418,9 +419,9 @@ def linux_nightly():
.with_scopes("secrets:get:project/servo/s3-upload-credentials") .with_scopes("secrets:get:project/servo/s3-upload-credentials")
# Not reusing the build made for WPT because it has debug assertions # Not reusing the build made for WPT because it has debug assertions
.with_script( .with_script(
"./mach build --release", "python3 ./mach build --release",
"./mach package --release", "python3 ./mach package --release",
"./mach upload-nightly linux --secret-from-taskcluster", "python3 ./mach upload-nightly linux --secret-from-taskcluster",
) )
.with_artifacts("/repo/target/release/servo-tech-demo.tar.gz") .with_artifacts("/repo/target/release/servo-tech-demo.tar.gz")
.find_or_create("build.linux_x64_nightly" + CONFIG.tree_hash()) .find_or_create("build.linux_x64_nightly" + CONFIG.tree_hash())
@ -432,8 +433,8 @@ def linux_release():
linux_build_task("Release build") linux_build_task("Release build")
.with_treeherder("Linux x64", "Release") .with_treeherder("Linux x64", "Release")
.with_script( .with_script(
"./mach build --release", "python3 ./mach build --release",
"./mach package --release", "python3 ./mach package --release",
) )
.find_or_create("build.linux_x64_release" + CONFIG.tree_hash()) .find_or_create("build.linux_x64_release" + CONFIG.tree_hash())
) )
@ -449,10 +450,10 @@ def macos_nightly():
"secrets:get:project/servo/github-homebrew-token", "secrets:get:project/servo/github-homebrew-token",
) )
.with_script( .with_script(
"./mach build --release", "python3 ./mach build --release",
"./mach package --release", "python3 ./mach package --release",
"./etc/ci/macos_package_smoketest.sh target/release/servo-tech-demo.dmg", "./etc/ci/macos_package_smoketest.sh target/release/servo-tech-demo.dmg",
"./mach upload-nightly mac --secret-from-taskcluster", "python3 ./mach upload-nightly mac --secret-from-taskcluster",
) )
.with_artifacts("repo/target/release/servo-tech-demo.dmg") .with_artifacts("repo/target/release/servo-tech-demo.dmg")
.find_or_create("build.mac_x64_nightly." + CONFIG.tree_hash()) .find_or_create("build.mac_x64_nightly." + CONFIG.tree_hash())
@ -489,7 +490,7 @@ def macos_release_build_with_debug_assertions(priority=None):
.with_treeherder("macOS x64", "Release+A") .with_treeherder("macOS x64", "Release+A")
.with_priority(priority) .with_priority(priority)
.with_script("\n".join([ .with_script("\n".join([
"./mach build --release --verbose --with-debug-assertions", "python3 ./mach build --release --verbose --with-debug-assertions",
"./etc/ci/lockfile_changed.sh", "./etc/ci/lockfile_changed.sh",
"tar -czf target.tar.gz" + "tar -czf target.tar.gz" +
" target/release/servo" + " target/release/servo" +
@ -516,9 +517,9 @@ def linux_release_build_with_debug_assertions(layout_2020):
linux_build_task(name_prefix + "Release build, with debug assertions") linux_build_task(name_prefix + "Release build, with debug assertions")
.with_treeherder("Linux x64", treeherder_prefix + "Release+A") .with_treeherder("Linux x64", treeherder_prefix + "Release+A")
.with_script(""" .with_script("""
time ./mach rustc -V time python3 ./mach rustc -V
time ./mach fetch time python3 ./mach fetch
./mach build --release --with-debug-assertions %s -p servo python3 ./mach build --release --with-debug-assertions %s -p servo
./etc/ci/lockfile_changed.sh ./etc/ci/lockfile_changed.sh
tar -czf /target.tar.gz \ tar -czf /target.tar.gz \
target/release/servo \ target/release/servo \
@ -537,7 +538,7 @@ def macos_wpt():
priority = "high" if CONFIG.git_ref == "refs/heads/auto" else None priority = "high" if CONFIG.git_ref == "refs/heads/auto" else None
build_task = macos_release_build_with_debug_assertions(priority=priority) build_task = macos_release_build_with_debug_assertions(priority=priority)
def macos_run_task(name): def macos_run_task(name):
task = macos_task(name).with_python2().with_python3() \ task = macos_task(name).with_python3() \
.with_repo_bundle(alternate_object_dir="/var/cache/servo.git/objects") .with_repo_bundle(alternate_object_dir="/var/cache/servo.git/objects")
return with_homebrew(task, ["etc/taskcluster/macos/Brewfile"]) return with_homebrew(task, ["etc/taskcluster/macos/Brewfile"])
wpt_chunks( wpt_chunks(
@ -619,11 +620,11 @@ def wpt_chunks(platform, make_chunk_task, build_task, total_chunks, processes,
if this_chunk == 0: if this_chunk == 0:
if run_webgpu: if run_webgpu:
webgpu_script = """ webgpu_script = """
time ./mach test-wpt _webgpu --release --processes $PROCESSES \ time python3 ./mach test-wpt _webgpu --release --processes $PROCESSES \
--headless --log-raw test-webgpu.log --always-succeed \ --headless --log-raw test-webgpu.log --always-succeed \
--log-errorsummary webgpu-errorsummary.log \ --log-errorsummary webgpu-errorsummary.log \
| cat | cat
./mach filter-intermittents \ python3 ./mach filter-intermittents \
webgpu-errorsummary.log \ webgpu-errorsummary.log \
--log-intermittents webgpu-intermittents.log \ --log-intermittents webgpu-intermittents.log \
--log-filteredsummary filtered-webgpu-errorsummary.log \ --log-filteredsummary filtered-webgpu-errorsummary.log \
@ -634,7 +635,7 @@ def wpt_chunks(platform, make_chunk_task, build_task, total_chunks, processes,
webgpu_script = "" webgpu_script = ""
task.with_script(""" task.with_script("""
time python ./mach test-wpt --release --binary-arg=--multiprocess \ time python3 ./mach test-wpt --release --binary-arg=--multiprocess \
--processes $PROCESSES \ --processes $PROCESSES \
--log-raw test-wpt-mp.log \ --log-raw test-wpt-mp.log \
--log-errorsummary wpt-mp-errorsummary.log \ --log-errorsummary wpt-mp-errorsummary.log \
@ -647,30 +648,30 @@ def wpt_chunks(platform, make_chunk_task, build_task, total_chunks, processes,
--always-succeed \ --always-succeed \
url \ url \
| cat | cat
./mach filter-intermittents \ python3 ./mach filter-intermittents \
wpt-py3-errorsummary.log \ wpt-py3-errorsummary.log \
--log-intermittents wpt-py3-intermittents.log \ --log-intermittents wpt-py3-intermittents.log \
--log-filteredsummary filtered-py3-errorsummary.log \ --log-filteredsummary filtered-py3-errorsummary.log \
--tracker-api default \ --tracker-api default \
--reporter-api default --reporter-api default
time ./mach test-wpt --release --product=servodriver --headless \ time python3 ./mach test-wpt --release --product=servodriver --headless \
tests/wpt/mozilla/tests/mozilla/DOMParser.html \ tests/wpt/mozilla/tests/mozilla/DOMParser.html \
tests/wpt/mozilla/tests/css/per_glyph_font_fallback_a.html \ tests/wpt/mozilla/tests/css/per_glyph_font_fallback_a.html \
tests/wpt/mozilla/tests/css/img_simple.html \ tests/wpt/mozilla/tests/css/img_simple.html \
tests/wpt/mozilla/tests/mozilla/secure.https.html \ tests/wpt/mozilla/tests/mozilla/secure.https.html \
| cat | cat
time ./mach test-wpt --release --processes $PROCESSES --product=servodriver \ time python3 ./mach test-wpt --release --processes $PROCESSES --product=servodriver \
--headless --log-raw test-bluetooth.log \ --headless --log-raw test-bluetooth.log \
--log-errorsummary bluetooth-errorsummary.log \ --log-errorsummary bluetooth-errorsummary.log \
bluetooth \ bluetooth \
| cat | cat
time ./mach test-wpt --release --processes $PROCESSES --timeout-multiplier=4 \ time python3 ./mach test-wpt --release --processes $PROCESSES --timeout-multiplier=4 \
--headless --log-raw test-wdspec.log \ --headless --log-raw test-wdspec.log \
--log-servojson wdspec-jsonsummary.log \ --log-servojson wdspec-jsonsummary.log \
--always-succeed \ --always-succeed \
webdriver \ webdriver \
| cat | cat
./mach filter-intermittents \ python3 ./mach filter-intermittents \
wdspec-jsonsummary.log \ wdspec-jsonsummary.log \
--log-intermittents intermittents.log \ --log-intermittents intermittents.log \
--log-filteredsummary filtered-wdspec-errorsummary.log \ --log-filteredsummary filtered-wdspec-errorsummary.log \
@ -680,7 +681,7 @@ def wpt_chunks(platform, make_chunk_task, build_task, total_chunks, processes,
) )
else: else:
task.with_script(""" task.with_script("""
./mach test-wpt \ python3 ./mach test-wpt \
--release \ --release \
$WPT_ARGS \ $WPT_ARGS \
--processes $PROCESSES \ --processes $PROCESSES \
@ -690,7 +691,7 @@ def wpt_chunks(platform, make_chunk_task, build_task, total_chunks, processes,
--log-servojson wpt-jsonsummary.log \ --log-servojson wpt-jsonsummary.log \
--always-succeed \ --always-succeed \
| cat | cat
./mach filter-intermittents \ python3 ./mach filter-intermittents \
wpt-jsonsummary.log \ wpt-jsonsummary.log \
--log-intermittents intermittents.log \ --log-intermittents intermittents.log \
--log-filteredsummary filtered-wpt-errorsummary.log \ --log-filteredsummary filtered-wpt-errorsummary.log \
@ -770,7 +771,7 @@ def linux_build_task(name, *, build_env=build_env):
.with_dockerfile(dockerfile_path("build")) .with_dockerfile(dockerfile_path("build"))
.with_env(**build_env, **unix_build_env, **linux_build_env) .with_env(**build_env, **unix_build_env, **linux_build_env)
.with_repo_bundle() .with_repo_bundle()
.with_script("./mach bootstrap-gstreamer") .with_script("python3 ./mach bootstrap-gstreamer")
) )
return task return task
@ -797,12 +798,14 @@ def windows_build_task(name, package=True, arch="x86_64", rdp=False):
**windows_build_env["all"] **windows_build_env["all"]
) )
.with_repo_bundle(sparse_checkout=windows_sparse_checkout) .with_repo_bundle(sparse_checkout=windows_sparse_checkout)
.with_python2() .with_python3()
.with_directory_mount( # mozjs's virtualenv expects a DLLs folder that contains dynamic libraries.
"https://www.python.org/ftp/python/3.7.3/python-3.7.3-embed-amd64.zip", # The embedded python distribution does not come with this.
sha256="6de14c9223226cf0cd8c965ecb08c51d62c770171a256991b4fddc25188cfa8e", .with_script("""
path="python3", mkdir %HOMEDRIVE%%HOMEPATH%\\python3\\DLLs
) copy %HOMEDRIVE%%HOMEPATH%\\python3\\*.pyd %HOMEDRIVE%%HOMEPATH%\\python3\\DLLs
copy %HOMEDRIVE%%HOMEPATH%\\python3\\*.dll %HOMEDRIVE%%HOMEPATH%\\python3\\DLLs
""")
.with_rustup() .with_rustup()
) )
if arch in hashes["non-devel"] and arch in hashes["devel"]: if arch in hashes["non-devel"] and arch in hashes["devel"]:
@ -844,7 +847,7 @@ def macos_build_task(name):
.with_max_run_time_minutes(60 * 2) .with_max_run_time_minutes(60 * 2)
.with_env(**build_env, **unix_build_env, **macos_build_env) .with_env(**build_env, **unix_build_env, **macos_build_env)
.with_repo_bundle(alternate_object_dir="/var/cache/servo.git/objects") .with_repo_bundle(alternate_object_dir="/var/cache/servo.git/objects")
.with_python2() .with_python3()
.with_rustup() .with_rustup()
.with_index_and_artifacts_expire_in(build_artifacts_expire_in) .with_index_and_artifacts_expire_in(build_artifacts_expire_in)
# Debugging for surprising generic-worker behaviour # Debugging for surprising generic-worker behaviour

View file

@ -640,6 +640,30 @@ class WindowsGenericWorkerTask(GenericWorkerTask):
""") \ """) \
.with_path_from_homedir("python2", "python2\\Scripts") .with_path_from_homedir("python2", "python2\\Scripts")
def with_python3(self):
"""
For Python 3, use `with_directory_mount` and the "embeddable zip file" distribution
from python.org.
You may need to remove `python37._pth` from the ZIP in order to work around
<https://bugs.python.org/issue34841>.
"""
return self \
.with_directory_mount(
"https://www.python.org/ftp/python/3.7.3/python-3.7.3-embed-amd64.zip",
sha256="6de14c9223226cf0cd8c965ecb08c51d62c770171a256991b4fddc25188cfa8e",
path="python3",
) \
.with_path_from_homedir("python3", "python3\\Scripts") \
.with_curl_script("https://bootstrap.pypa.io/get-pip.py", "get-pip.py") \
.with_script("""
echo import site>>%HOMEDRIVE%%HOMEPATH%%\\python3\\python37._pth
echo import sys>%HOMEDRIVE%%HOMEPATH%%\\python3\\sitecustomize.py
echo sys.path.insert(0, '')>>%HOMEDRIVE%%HOMEPATH%%\\python3\\sitecustomize.py
python get-pip.py
python -m pip install virtualenv==20.2.1
""")
class UnixTaskMixin(Task): class UnixTaskMixin(Task):
def with_repo(self, alternate_object_dir=""): def with_repo(self, alternate_object_dir=""):

6
mach
View file

@ -6,7 +6,7 @@
# The beginning of this script is both valid shell and valid python, # The beginning of this script is both valid shell and valid python,
# such that the script starts with the shell and is reexecuted with # such that the script starts with the shell and is reexecuted with
# the right python. # the right python.
''':' && if [ ! -z "$MSYSTEM" ] ; then exec python "$0" "$@" ; else which python2.7 > /dev/null 2> /dev/null && exec python2.7 "$0" "$@" || exec python "$0" "$@" ; fi ''':' && if [ ! -z "$MSYSTEM" ] ; then exec python "$0" "$@" ; else which python3 > /dev/null 2> /dev/null && exec python3 "$0" "$@" || exec python "$0" "$@" ; fi
''' '''
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
@ -18,8 +18,8 @@ import sys
# Check for the current python version as some users (especially on archlinux) # Check for the current python version as some users (especially on archlinux)
# may not have python 2 installed and their /bin/python binary symlinked to # may not have python 2 installed and their /bin/python binary symlinked to
# python 3. # python 3.
if sys.version_info >= (3, 0) and sys.version_info < (3, 5): if sys.version_info < (3, 5):
print("mach does not support python 3 (< 3.5), please install python 2 or python 3 (>= 3.5)") print("mach does not support python 3 (< 3.5), please install python 3 (>= 3.5)")
sys.exit(1) sys.exit(1)

View file

@ -5,9 +5,9 @@ blessings == 1.6
distro == 1.4 distro == 1.4
mach == 1.0.0 mach == 1.0.0
mozdebug == 0.2 mozdebug == 0.2
mozinfo == 1.1.0 mozinfo == 1.2.1
mozlog == 5.0 mozlog == 7.1.0
setuptools == 39.0 setuptools == 50.3.2
toml == 0.9.2 toml == 0.9.2
# For Python linting # For Python linting
@ -30,6 +30,6 @@ boto3 == 1.4.4
certifi certifi
# For Python3 compatibility # For Python3 compatibility
six == 1.12 six == 1.15
-e python/tidy -e python/tidy

View file

@ -749,7 +749,7 @@ install them, let us know by filing a bug!")
# Shorten hash # Shorten hash
# NOTE: Partially verifies the hash, but it will still pass if it's, e.g., a tree # NOTE: Partially verifies the hash, but it will still pass if it's, e.g., a tree
git_sha = subprocess.check_output([ git_sha = subprocess.check_output([
'git', 'rev-parse', '--short', git_sha 'git', 'rev-parse', '--short', git_sha.decode('ascii')
]) ])
else: else:
# This is a regular commit # This is a regular commit
@ -999,7 +999,7 @@ install them, let us know by filing a bug!")
toolchain = self.rust_toolchain() toolchain = self.rust_toolchain()
status = subprocess.call( status = subprocess.call(
["rustup", "run", toolchain.encode("utf-8"), "rustc", "--version"], ["rustup", "run", toolchain, "rustc", "--version"],
stdout=open(os.devnull, "wb"), stdout=open(os.devnull, "wb"),
stderr=subprocess.STDOUT, stderr=subprocess.STDOUT,
) )

View file

@ -775,7 +775,11 @@ def setup_uwp_signing(ms_app_store, publisher):
def run_powershell_cmd(cmd): def run_powershell_cmd(cmd):
try: try:
return subprocess.check_output(['powershell.exe', '-NoProfile', '-Command', cmd]) return (
subprocess
.check_output(['powershell.exe', '-NoProfile', '-Command', cmd])
.decode('utf-8')
)
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
print("ERROR: PowerShell command failed: ", cmd) print("ERROR: PowerShell command failed: ", cmd)
exit(1) exit(1)
@ -841,6 +845,7 @@ def build_uwp(platforms, dev, msbuild_dir, ms_app_store):
.replace("%%PACKAGE_PLATFORMS%%", '|'.join(platforms)) .replace("%%PACKAGE_PLATFORMS%%", '|'.join(platforms))
.replace("%%CONFIGURATION%%", Configuration) .replace("%%CONFIGURATION%%", Configuration)
.replace("%%SOLUTION%%", path.join(os.getcwd(), 'support', 'hololens', 'ServoApp.sln')) .replace("%%SOLUTION%%", path.join(os.getcwd(), 'support', 'hololens', 'ServoApp.sln'))
.encode('utf-8')
) )
build_file.close() build_file.close()
# Generate an appxbundle. # Generate an appxbundle.

View file

@ -243,7 +243,8 @@ class PostBuildCommands(CommandBase):
media_stack=None, **kwargs): media_stack=None, **kwargs):
self.ensure_bootstrapped(rustup_components=["rust-docs"]) self.ensure_bootstrapped(rustup_components=["rust-docs"])
rustc_path = check_output( rustc_path = check_output(
["rustup" + BIN_SUFFIX, "which", "--toolchain", self.rust_toolchain(), "rustc"]) ["rustup" + BIN_SUFFIX, "which", "--toolchain", self.rust_toolchain(), "rustc"]
).decode('utf-8')
assert path.basename(path.dirname(rustc_path)) == "bin" assert path.basename(path.dirname(rustc_path)) == "bin"
toolchain_path = path.dirname(path.dirname(rustc_path)) toolchain_path = path.dirname(path.dirname(rustc_path))
rust_docs = path.join(toolchain_path, "share", "doc", "rust", "html") rust_docs = path.join(toolchain_path, "share", "doc", "rust", "html")

View file

@ -585,7 +585,10 @@ class MachCommands(CommandBase):
def format(outputs, description, file=sys.stdout): def format(outputs, description, file=sys.stdout):
formatted = "%s %s:\n%s" % (len(outputs), description, "\n".join(outputs)) formatted = "%s %s:\n%s" % (len(outputs), description, "\n".join(outputs))
file.write(formatted.encode("utf-8")) if file == sys.stdout:
file.write(formatted)
else:
file.write(formatted.encode("utf-8"))
if log_intermittents: if log_intermittents:
with open(log_intermittents, "wb") as file: with open(log_intermittents, "wb") as file:

View file

@ -1,4 +1,5 @@
[cross-origin-isolated.sub.https.html] [cross-origin-isolated.sub.https.html]
[self: originAgentCluster must equal true] [self: originAgentCluster must equal true]
expected: FAIL expected: FAIL
[child: originAgentCluster must equal true]
expected: FAIL

View file

@ -11005,7 +11005,7 @@
[] []
], ],
"interfaces.js": [ "interfaces.js": [
"7a105e791dd80bd42d80055a64746bbe5fece41e", "e37397aa973f5fb913e5b8097945368c2848bed8",
[] []
], ],
"nested_asap_script.js": [ "nested_asap_script.js": [

View file

@ -10,18 +10,18 @@ def fail(msg):
def main(request, response): def main(request, response):
content_type = request.headers.get('Content-Type').split("; ") content_type = request.headers.get(b'Content-Type').split(b"; ")
if len(content_type) != 2: if len(content_type) != 2:
return fail("content type length is incorrect") return fail("content type length is incorrect")
if content_type[0] != 'multipart/form-data': if content_type[0] != b'multipart/form-data':
return fail("content type first field is incorrect") return fail("content type first field is incorrect")
boundary = content_type[1].strip("boundary=") boundary = content_type[1].strip(b"boundary=")
body = "--" + boundary + "\r\nContent-Disposition: form-data; name=\"file-input\"; filename=\"upload.txt\"" body = b"--" + boundary + b"\r\nContent-Disposition: form-data; name=\"file-input\"; filename=\"upload.txt\""
body += "\r\n" + "content-type: text/plain\r\n\r\nHello\r\n--" + boundary + "--\r\n" body += b"\r\n" + b"content-type: text/plain\r\n\r\nHello\r\n--" + boundary + b"--\r\n"
if body != request.body: if body != request.body:
return fail("request body doesn't match: " + body + "+++++++" + request.body) return fail("request body doesn't match: " + body + "+++++++" + request.body)

View file

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this # License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/. # file, You can obtain one at https://mozilla.org/MPL/2.0/.
decoded = """\ decoded = b"""\
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Ut viverra neque in massa rutrum, non rutrum nunc pellentesque. Phasellus et nulla metus. Nam aliquet felis nec iaculis eleifend. Donec pretium tellus non aliquam tristique. Mauris feugiat eu velit sed maximus. Praesent fringilla lorem vel orci maximus accumsan. Fusce vel sapien ipsum. Nulla ac lectus non arcu semper laoreet. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Ut viverra neque in massa rutrum, non rutrum nunc pellentesque. Phasellus et nulla metus. Nam aliquet felis nec iaculis eleifend. Donec pretium tellus non aliquam tristique. Mauris feugiat eu velit sed maximus. Praesent fringilla lorem vel orci maximus accumsan. Fusce vel sapien ipsum. Nulla ac lectus non arcu semper laoreet.
Aliquam et massa at ex elementum dictum vitae ac purus. Sed a nunc sed dui pulvinar mollis eu sed eros. Mauris vitae ullamcorper dolor. Ut sed nisl sem. Mauris pulvinar vitae orci nec tincidunt. Integer fringilla quam in lobortis vehicula. In aliquam egestas dapibus. Suspendisse est enim, maximus non massa eget, finibus finibus lorem. Phasellus a varius ante. Pellentesque tempor dignissim nunc ut malesuada. Aliquam et massa at ex elementum dictum vitae ac purus. Sed a nunc sed dui pulvinar mollis eu sed eros. Mauris vitae ullamcorper dolor. Ut sed nisl sem. Mauris pulvinar vitae orci nec tincidunt. Integer fringilla quam in lobortis vehicula. In aliquam egestas dapibus. Suspendisse est enim, maximus non massa eget, finibus finibus lorem. Phasellus a varius ante. Pellentesque tempor dignissim nunc ut malesuada.
@ -24,15 +24,15 @@ Aenean tincidunt consequat augue, in bibendum nisl placerat in. Nulla non dignis
Nulla facilisis dui odio, at scelerisque erat cursus a. Ut arcu nunc, ullamcorper vitae orci eget, sollicitudin maximus sapien. Fusce eu arcu nunc. Integer vitae eros turpis. Fusce ac elit in nisi rutrum suscipit id consectetur lacus. Suspendisse rutrum ligula auctor fringilla cursus. Sed consequat erat in nunc lacinia, et ullamcorper velit vestibulum. Mauris sed est a tellus feugiat sagittis non nec neque. Sed eu bibendum orci. Donec diam diam, viverra sed dolor non, posuere sagittis mi. Duis rhoncus, risus sit amet luctus sodales, risus arcu faucibus enim, eu cras amet.\ Nulla facilisis dui odio, at scelerisque erat cursus a. Ut arcu nunc, ullamcorper vitae orci eget, sollicitudin maximus sapien. Fusce eu arcu nunc. Integer vitae eros turpis. Fusce ac elit in nisi rutrum suscipit id consectetur lacus. Suspendisse rutrum ligula auctor fringilla cursus. Sed consequat erat in nunc lacinia, et ullamcorper velit vestibulum. Mauris sed est a tellus feugiat sagittis non nec neque. Sed eu bibendum orci. Donec diam diam, viverra sed dolor non, posuere sagittis mi. Duis rhoncus, risus sit amet luctus sodales, risus arcu faucibus enim, eu cras amet.\
""" """
encoded = '\x1b\x99\x13 ,\x0elw\x08\x95t\xd2\xe6\xf2(\xcd\'5M\x9dg\xd7\xc9)uG\xf4P\x8c;\x92_(\xb1\x14@L\x9d$\xa2\x16\x8f\x06\xc8\x18J\xed\xac\xabd_\xfb\xbe\xcb\xab\x90]VTu\xbc\xe1\xc3\x11\x96\x81T\r\xabj\x07\xbd\xe0\xb2\xd7y\x89\x1c\x81\xfd8\x9f\x15\xcf\xf8\xf6\xe6\x84\x8d\x90Ta<\xef\xcf\xce\xcc\x95\xa4\xff;\xcaO\xe9e\x97z/\xeap\t\x0e\x17\xd9\x14\xb6\xa9V\x151\n\xd0\xe0Fh\xd8\xbd\xd2\xdcB@\xed\xfa\xbe\xea\xb1V\xa1\xe7I\xd5\xfa\x7fTV\xa0\xa4\xda\x86G>R\x1a\x84\x1fs\x1b/\x98\xd6\xfa#\xaa_\xf5\xb7-\xf8d\x99\x80\x87O\xdb\xa1\xbe\xa9\x1a\xc3\xfb\xaf!\xc4f#oa\xc4\xffY?\x8f\xf3\xfc{\xe6\x1dB\x11\xe6\xdd\xe6(\\_S \xfd\xc9\x12\x9d4\t\x1cMO\x9a0u\xfd\t\xd7z\x1d\xe4l\'y\x17\x83\xbcCb\x18\xbfs\x16\xe3\xcc\xf1\x82\x97y\xc9n\x93\xce>=\x05\xb7\x15i\x91\xc4\xe3\xceek_\xfe\xfdzQ\x18\xfa\x19/\xe7\xddfk\x15E\xb2\xf6\xf6\xddo\x05\xbeSOc\xbd\xcb\xad{Ve\x1e/\xa0Y\xac\xaf\x87Z\x0f\xc7\xf0\xd9\xda\x17\xf4\x8e%\xf5Qc\xb9[K\xd2\xe1\x86k\x14\x84k \xf8\x12\xe8,2\x7fE}RT\xd5\xcb\xe0lv\xb8~\x11\xc0Bl\x92`\xf1\xb2\xcd\xfc3\xba\xf1\xe5m\xc2mI\xc0>D\x813e\x1b\\]\xfb\xf4G\x1d\xf9,\xa6\xb8\xff@\x947I\x8d\xd1\xbc\x1c\x0c(\xde\x138\xa3\xd8\x8e`\xd6\x7f\x81 \x82\x0e\x87\xfa"\x01\xdbqzL\x8a\x7f{\xb2\xefw\x8c^\xcdS\x9c&K\x1e\x1f\xc7\xaaj\xad\x1f0\x1f\x199\x10\xaez\n\x18\x81R6v\x99j/^\xf9\xbb\x88WB\xae\x97\xc2*\xedM\x80a]\xcc\xc1\x0e{\xf8\x81\xbd,=\xdf\xe6c\x9a\xbe\x7f\nO\x8a\x99\xd1?\xfc\x88\xc4\\\x1a(\xa4\\\xf6!\x7f}\xfd\xed\xb7+\xe4\xff\xfa\xebhk\xf6\x13R@h9j\xfd\x8ev\x9b\x89l\xbe\xfe\x9d8S\x0b\xec\xb7gNk\xcc\x9a\x9fR\xed\xc5Fv/F\xc0\xef)B1u6z\xfc\xd6\x9d\x9a\x1b\x01;a\xfa$\x96\x1b\xd7\x97\xf5\x8f\x0316\xfb\xddZ\xe8;\xdf=\x80S\xed-\xf3\x13\xb5$1\x7f2CNm\xc3+KQ\x97\xafe\xf4i\x91\x8bNq=-h\x82\x9e\xed>B\xb1\xfc,\xbbU\xe1\x14\x1c\x1f\xc9\x14\xc6\xbd\xb5*\xc8\xc5\x0f\xc4l\xed\x13\\_\xf5j\xff0s\xbev\x11\xf0d\x1dl\xd8\x1f\xc0\xe5g\x98(P\x87\xbb%.\x8c\xf0~8\xdcF\x8e\xb3\xd8>\xc6\x0c\xfb\xc4_\xc3\xce\x85\xeds\x9aR\xf3\xdc\xe6\x8dI\xc7`F\x08B?U\xda\xcf\x0c\xb8r,\xa2\x07\x9b\xd3\x1c$aG\xfc\xe5\xd5\x02\x85\xe9\xca\t\x12\xf1\xf6@[C\x10\xe9:\xed\xb5T\x96\xca\x8a\xb1X\xbeaV\x15\x0cp\xd8k\xbam\xe4\xf2\x12*\x03\xebo\x14 \x17\xe6\x04\xff_\x80\x8f\x10\x85/\xe5T\x13\x15\x84o\xde\xc6\xac\x965\x0f\xa7\xa7]\xec^\xbfXd \xd8\x7fiL\xacg\xb2\xc7\xf1\xa5\xd0\x81;\xd7e\x87\x14.\x80\x01z\xe0\xd1\x9cV\xf4\x1e;\xfe\x83\x1d\x9e\xc1\xf6\xbd\xcb\x97\xe2xa]\x18\x1c\x02\xeeZ\xf4b\x08\xa0<\xde\xab3\xec\xe0K\x1b\xfe\xdaC\xe1 \xf7\xb3&?\xae\xa6u\x18\x9buaq\xcd\xefI\xc6zNO\xf1\xca:\xc5\xdfk\\\x96\xc5:\x01<pnba\'\x89N~\xda\xe25*\xd0i\xe2DZ\x90\xf4-\xc4\x99O91\xc9\xd5r9\x8f^8\xcaP \xce\xa3^\xce\x07L)\x87\x1c\xd6C\xad\x1d\x98V\xadl`\xd7\xa4\xc1\x9d"\x8c\xfa\xca\x84\xce\x16\x81A\xabRn?p\xb4\xdbC\x98\xf4\xa1z\x11\xcb\x86CJ\xb6#i\x08&T\x08\xe4\xb8\xf7\x81\x18h\x19h\xa4\xadT\xebD\x14t\x9d2\xff&O)\x17\xaa\x1a\xbb\x99\x98\x03\xc0\xc7\xda\xe0S\xdc\xfa\xb4p\x93g\x1dQ\xb5L\x1drL\xda`G\xa1\x1dGt\x9ci\xbd\x11X9ev^\x14\xa8\x9b\xd6<\x98\x8d\xcad\xd5\x92\xc8\x02\xc0\xc0\xe6M\xcb3\xac\x03\xa6WZ=\x14~\xde \xab@=#r\xb8]\x1c&\xec\xb0\x9e:_\xf2$\xe5b\xbe\x02\x03\x81yi\x1c\xc6>\x95S\x99\'\xc9\xa6o\x1a\xd7~w\xcb\xbc~\xd1XE\x056\x97\x06\ra\xa0\xd8\x1a\xcb\xd4jB\xa8\x9e\x0b\xbc\xf2\xcb3`Y"\xf4\x05\xbe\x98\xcb\xa4S\x97\x0b\xcd\x0cp\x84p\xad\xa2\x07\x8ej?\n\x96m \xdb`\x12\xd4\x11&\\\x07b\xa79\xda\xcb\xc8\x83\xed\x81\xefw\x9f\xf1oF\x0e\xab}`\xee\xb54\xef\xcc\x9f\xc1\xec\xfe\x16\x96B\xa7\x94^\xc4\x10P\xba,eb.\x08-8\t\x8a\xd3Uq\xc3S\x88Z"+J\x93\xd4\xc6\xdde\xde\x8au"l\xc6\x13(\x13\xe4\xc1\xf7c\x1d\xee\xe9)\x11xF\x16\x08"\xafK/W \xdc\xb9\xbd\xa5CY\x03Q\xf0\xe4F\xa5\x0eO\xec\xad\xb2q\x17>N-\x15\xff\xfa8\xbbs\xc4|\xcd;,\xc7\xec\'\xa3\xfa\xb9\x07\xd9Q%\xf6\x84\x10q\xe7*VQ\xa3\xbb\xc8\x89\xb7g\xe7t\xe1\xe7\xb5\xc0\x0e8\x8d\x19\xe5v\xa1\t{\x8c\x9b\x1dx#\xf9\xc5\xcb\xf4y\xb9^\x1d\xba\x06\x81\xc52\xb8p\x91\x8b\x04c,\x9a\xa7\xfa\xaa\x93V\xc5>\xe0\xe5X>H\x99\xa6X\x9b\xfa\xbe\xcd\x14\xfd\xe4\x8an\xa18\x1f\x11gc\x83\x0b\xb6RLIz[\x1e\x80\x18\xa3\x9d\xc5\xec\x87\x12\x1b\x12\xe7\xf1\x8a\xae\xb4\xea\x99\x0e2\xa2w\xe4S\xd7\xe9Pq\xfd\x9c\xd6k\xf6\xa5`\x99}\x08\xc9\x9b5\x12\xe8\x17\xe2\xcf\x9f\x9bm\xc3\xe5<\x9f5m\xa1\xa4\xb5\xf1\x87\x8d\xf5}2yte\x14V\xf6\x10\xae\xd4\xeec\xa0\xdaq@(\xd6B\xa8R\xee"v\xf3\xeef\xb7\xb1\x8a\x8cu|\x11J\xb0 \xbe\xe1\x0e\rg\xc3\x9dd\xe2\xb12\xaf\xa3T\xa9\x18\xe7\xf3\x14V\x90\x07\xfali\x91\xc8\x06\xb3\xad\xe0i@\x19"W\x19\x1b\xc9|\xca\xfb\xe1x\xa8\xe4\xd8\x19\x81u4%\xc4_\xfb\xe9\xf90fI\x0eo\x9b\x1d\x98\x13\xa9\xd5\x89\x8c\xab>\xafH\xa2\x91eVe\xea\x03\x19p\xab\xa5\xed\x06\xb9f\x80\xc60\xc0\x8b\x1c\x18\xec\xd3\xb2\xc6l\xe44TAs3\x15\xc4\xac\xac\x0c\x0baN\xcb\xb7\x17\xd9\x1a\xbeG\x88\x9b\x98R\xb0Tp\x04\xa8\x99\x86\x11\xd5_I\x07\xce\x0e\xb8\x92\'Y\xefV\xc287\xdb+\xfd\xd2D\x13\xf7\x84\xec\xd45\x19R\x16O\xa1\x119<2\xb9\xa0K\xf6G\x8e\xc6S\n\r*h\xb1\xd1p\x10\xdd\\\xa9\xd0y\x1cG\x95\xb3D\xba\xa16\xb0\xd1\x98E\x87\x08\x01l.J\xe8\xeaA\x11\xb4Yr@\x19d!\xbb\x91\x06\xf1\x8a\xc0\xcdK\xf9\xback\x14\xa8F\x99)\x9f\xe5\xaf\xce#}ITF\x131T\xab\xe0\x05*>\xbeA{>\xac\xeak\xea\x95\xf9Bw 4\xec\xac\xdc\xe8\xac\xe4\xb6v\xcd\x91\x95\x05' encoded = b'\x1b\x99\x13 ,\x0elw\x08\x95t\xd2\xe6\xf2(\xcd\'5M\x9dg\xd7\xc9)uG\xf4P\x8c;\x92_(\xb1\x14@L\x9d$\xa2\x16\x8f\x06\xc8\x18J\xed\xac\xabd_\xfb\xbe\xcb\xab\x90]VTu\xbc\xe1\xc3\x11\x96\x81T\r\xabj\x07\xbd\xe0\xb2\xd7y\x89\x1c\x81\xfd8\x9f\x15\xcf\xf8\xf6\xe6\x84\x8d\x90Ta<\xef\xcf\xce\xcc\x95\xa4\xff;\xcaO\xe9e\x97z/\xeap\t\x0e\x17\xd9\x14\xb6\xa9V\x151\n\xd0\xe0Fh\xd8\xbd\xd2\xdcB@\xed\xfa\xbe\xea\xb1V\xa1\xe7I\xd5\xfa\x7fTV\xa0\xa4\xda\x86G>R\x1a\x84\x1fs\x1b/\x98\xd6\xfa#\xaa_\xf5\xb7-\xf8d\x99\x80\x87O\xdb\xa1\xbe\xa9\x1a\xc3\xfb\xaf!\xc4f#oa\xc4\xffY?\x8f\xf3\xfc{\xe6\x1dB\x11\xe6\xdd\xe6(\\_S \xfd\xc9\x12\x9d4\t\x1cMO\x9a0u\xfd\t\xd7z\x1d\xe4l\'y\x17\x83\xbcCb\x18\xbfs\x16\xe3\xcc\xf1\x82\x97y\xc9n\x93\xce>=\x05\xb7\x15i\x91\xc4\xe3\xceek_\xfe\xfdzQ\x18\xfa\x19/\xe7\xddfk\x15E\xb2\xf6\xf6\xddo\x05\xbeSOc\xbd\xcb\xad{Ve\x1e/\xa0Y\xac\xaf\x87Z\x0f\xc7\xf0\xd9\xda\x17\xf4\x8e%\xf5Qc\xb9[K\xd2\xe1\x86k\x14\x84k \xf8\x12\xe8,2\x7fE}RT\xd5\xcb\xe0lv\xb8~\x11\xc0Bl\x92`\xf1\xb2\xcd\xfc3\xba\xf1\xe5m\xc2mI\xc0>D\x813e\x1b\\]\xfb\xf4G\x1d\xf9,\xa6\xb8\xff@\x947I\x8d\xd1\xbc\x1c\x0c(\xde\x138\xa3\xd8\x8e`\xd6\x7f\x81 \x82\x0e\x87\xfa"\x01\xdbqzL\x8a\x7f{\xb2\xefw\x8c^\xcdS\x9c&K\x1e\x1f\xc7\xaaj\xad\x1f0\x1f\x199\x10\xaez\n\x18\x81R6v\x99j/^\xf9\xbb\x88WB\xae\x97\xc2*\xedM\x80a]\xcc\xc1\x0e{\xf8\x81\xbd,=\xdf\xe6c\x9a\xbe\x7f\nO\x8a\x99\xd1?\xfc\x88\xc4\\\x1a(\xa4\\\xf6!\x7f}\xfd\xed\xb7+\xe4\xff\xfa\xebhk\xf6\x13R@h9j\xfd\x8ev\x9b\x89l\xbe\xfe\x9d8S\x0b\xec\xb7gNk\xcc\x9a\x9fR\xed\xc5Fv/F\xc0\xef)B1u6z\xfc\xd6\x9d\x9a\x1b\x01;a\xfa$\x96\x1b\xd7\x97\xf5\x8f\x0316\xfb\xddZ\xe8;\xdf=\x80S\xed-\xf3\x13\xb5$1\x7f2CNm\xc3+KQ\x97\xafe\xf4i\x91\x8bNq=-h\x82\x9e\xed>B\xb1\xfc,\xbbU\xe1\x14\x1c\x1f\xc9\x14\xc6\xbd\xb5*\xc8\xc5\x0f\xc4l\xed\x13\\_\xf5j\xff0s\xbev\x11\xf0d\x1dl\xd8\x1f\xc0\xe5g\x98(P\x87\xbb%.\x8c\xf0~8\xdcF\x8e\xb3\xd8>\xc6\x0c\xfb\xc4_\xc3\xce\x85\xeds\x9aR\xf3\xdc\xe6\x8dI\xc7`F\x08B?U\xda\xcf\x0c\xb8r,\xa2\x07\x9b\xd3\x1c$aG\xfc\xe5\xd5\x02\x85\xe9\xca\t\x12\xf1\xf6@[C\x10\xe9:\xed\xb5T\x96\xca\x8a\xb1X\xbeaV\x15\x0cp\xd8k\xbam\xe4\xf2\x12*\x03\xebo\x14 \x17\xe6\x04\xff_\x80\x8f\x10\x85/\xe5T\x13\x15\x84o\xde\xc6\xac\x965\x0f\xa7\xa7]\xec^\xbfXd \xd8\x7fiL\xacg\xb2\xc7\xf1\xa5\xd0\x81;\xd7e\x87\x14.\x80\x01z\xe0\xd1\x9cV\xf4\x1e;\xfe\x83\x1d\x9e\xc1\xf6\xbd\xcb\x97\xe2xa]\x18\x1c\x02\xeeZ\xf4b\x08\xa0<\xde\xab3\xec\xe0K\x1b\xfe\xdaC\xe1 \xf7\xb3&?\xae\xa6u\x18\x9buaq\xcd\xefI\xc6zNO\xf1\xca:\xc5\xdfk\\\x96\xc5:\x01<pnba\'\x89N~\xda\xe25*\xd0i\xe2DZ\x90\xf4-\xc4\x99O91\xc9\xd5r9\x8f^8\xcaP \xce\xa3^\xce\x07L)\x87\x1c\xd6C\xad\x1d\x98V\xadl`\xd7\xa4\xc1\x9d"\x8c\xfa\xca\x84\xce\x16\x81A\xabRn?p\xb4\xdbC\x98\xf4\xa1z\x11\xcb\x86CJ\xb6#i\x08&T\x08\xe4\xb8\xf7\x81\x18h\x19h\xa4\xadT\xebD\x14t\x9d2\xff&O)\x17\xaa\x1a\xbb\x99\x98\x03\xc0\xc7\xda\xe0S\xdc\xfa\xb4p\x93g\x1dQ\xb5L\x1drL\xda`G\xa1\x1dGt\x9ci\xbd\x11X9ev^\x14\xa8\x9b\xd6<\x98\x8d\xcad\xd5\x92\xc8\x02\xc0\xc0\xe6M\xcb3\xac\x03\xa6WZ=\x14~\xde \xab@=#r\xb8]\x1c&\xec\xb0\x9e:_\xf2$\xe5b\xbe\x02\x03\x81yi\x1c\xc6>\x95S\x99\'\xc9\xa6o\x1a\xd7~w\xcb\xbc~\xd1XE\x056\x97\x06\ra\xa0\xd8\x1a\xcb\xd4jB\xa8\x9e\x0b\xbc\xf2\xcb3`Y"\xf4\x05\xbe\x98\xcb\xa4S\x97\x0b\xcd\x0cp\x84p\xad\xa2\x07\x8ej?\n\x96m \xdb`\x12\xd4\x11&\\\x07b\xa79\xda\xcb\xc8\x83\xed\x81\xefw\x9f\xf1oF\x0e\xab}`\xee\xb54\xef\xcc\x9f\xc1\xec\xfe\x16\x96B\xa7\x94^\xc4\x10P\xba,eb.\x08-8\t\x8a\xd3Uq\xc3S\x88Z"+J\x93\xd4\xc6\xdde\xde\x8au"l\xc6\x13(\x13\xe4\xc1\xf7c\x1d\xee\xe9)\x11xF\x16\x08"\xafK/W \xdc\xb9\xbd\xa5CY\x03Q\xf0\xe4F\xa5\x0eO\xec\xad\xb2q\x17>N-\x15\xff\xfa8\xbbs\xc4|\xcd;,\xc7\xec\'\xa3\xfa\xb9\x07\xd9Q%\xf6\x84\x10q\xe7*VQ\xa3\xbb\xc8\x89\xb7g\xe7t\xe1\xe7\xb5\xc0\x0e8\x8d\x19\xe5v\xa1\t{\x8c\x9b\x1dx#\xf9\xc5\xcb\xf4y\xb9^\x1d\xba\x06\x81\xc52\xb8p\x91\x8b\x04c,\x9a\xa7\xfa\xaa\x93V\xc5>\xe0\xe5X>H\x99\xa6X\x9b\xfa\xbe\xcd\x14\xfd\xe4\x8an\xa18\x1f\x11gc\x83\x0b\xb6RLIz[\x1e\x80\x18\xa3\x9d\xc5\xec\x87\x12\x1b\x12\xe7\xf1\x8a\xae\xb4\xea\x99\x0e2\xa2w\xe4S\xd7\xe9Pq\xfd\x9c\xd6k\xf6\xa5`\x99}\x08\xc9\x9b5\x12\xe8\x17\xe2\xcf\x9f\x9bm\xc3\xe5<\x9f5m\xa1\xa4\xb5\xf1\x87\x8d\xf5}2yte\x14V\xf6\x10\xae\xd4\xeec\xa0\xdaq@(\xd6B\xa8R\xee"v\xf3\xeef\xb7\xb1\x8a\x8cu|\x11J\xb0 \xbe\xe1\x0e\rg\xc3\x9dd\xe2\xb12\xaf\xa3T\xa9\x18\xe7\xf3\x14V\x90\x07\xfali\x91\xc8\x06\xb3\xad\xe0i@\x19"W\x19\x1b\xc9|\xca\xfb\xe1x\xa8\xe4\xd8\x19\x81u4%\xc4_\xfb\xe9\xf90fI\x0eo\x9b\x1d\x98\x13\xa9\xd5\x89\x8c\xab>\xafH\xa2\x91eVe\xea\x03\x19p\xab\xa5\xed\x06\xb9f\x80\xc60\xc0\x8b\x1c\x18\xec\xd3\xb2\xc6l\xe44TAs3\x15\xc4\xac\xac\x0c\x0baN\xcb\xb7\x17\xd9\x1a\xbeG\x88\x9b\x98R\xb0Tp\x04\xa8\x99\x86\x11\xd5_I\x07\xce\x0e\xb8\x92\'Y\xefV\xc287\xdb+\xfd\xd2D\x13\xf7\x84\xec\xd45\x19R\x16O\xa1\x119<2\xb9\xa0K\xf6G\x8e\xc6S\n\r*h\xb1\xd1p\x10\xdd\\\xa9\xd0y\x1cG\x95\xb3D\xba\xa16\xb0\xd1\x98E\x87\x08\x01l.J\xe8\xeaA\x11\xb4Yr@\x19d!\xbb\x91\x06\xf1\x8a\xc0\xcdK\xf9\xback\x14\xa8F\x99)\x9f\xe5\xaf\xce#}ITF\x131T\xab\xe0\x05*>\xbeA{>\xac\xeak\xea\x95\xf9Bw 4\xec\xac\xdc\xe8\xac\xe4\xb6v\xcd\x91\x95\x05'
def main(request, response): def main(request, response):
if 'raw' in request.GET: if b'raw' in request.GET:
headers = [("Content-type", "text/plain"), headers = [(b"Content-type", b"text/plain"),
("Content-Length", len(decoded))] (b"Content-Length", len(decoded))]
return headers, decoded return headers, decoded
headers = [("Content-type", "text/plain"), headers = [(b"Content-type", b"text/plain"),
("Content-Encoding", "br"), (b"Content-Encoding", b"br"),
("Content-Length", len(encoded))] (b"Content-Length", len(encoded))]
return headers, encoded return headers, encoded

View file

@ -5,7 +5,7 @@
def main(request, response): def main(request, response):
headers = [] headers = []
if 'Content-Type' in request.GET: if b'Content-Type' in request.GET:
headers += [('Content-Type', request.GET['Content-Type'])] headers += [(b'Content-Type', request.GET[b'Content-Type'])]
with open('./resources/ahem/AHEM____.TTF') as f: with open('./resources/ahem/AHEM____.TTF', 'rb') as f:
return 200, headers, f.read() return 200, headers, f.read()