diff --git a/components/script/dom/bindings/codegen/CodegenRust.py b/components/script/dom/bindings/codegen/CodegenRust.py index 1242de82fa1..6f32780a790 100644 --- a/components/script/dom/bindings/codegen/CodegenRust.py +++ b/components/script/dom/bindings/codegen/CodegenRust.py @@ -31,7 +31,7 @@ from WebIDL import ( from Configuration import ( MakeNativeName, - MemberIsUnforgeable, + MemberIsLegacyUnforgeable, getModuleFromObject, getTypesFromCallback, getTypesFromDescriptor, @@ -587,7 +587,7 @@ def getJSToNativeConversionInfo(type, descriptorProvider, failureCode=None, isEnforceRange = type.hasEnforceRange() isClamp = type.hasClamp() - if type.treatNullAsEmpty: + if type.legacyNullToEmptyString: treatNullAs = "EmptyString" else: treatNullAs = "Default" @@ -1702,7 +1702,7 @@ class MethodDefiner(PropertyDefiner): m.isMethod() and m.isStatic() == static and (bool(m.getExtendedAttribute("CrossOriginCallable")) or not crossorigin) and not m.isIdentifierLess() - and (MemberIsUnforgeable(m, descriptor) == unforgeable or crossorigin)] + and (MemberIsLegacyUnforgeable(m, descriptor) == unforgeable or crossorigin)] else: methods = [] self.regular = [{"name": m.identifier.name, @@ -1787,8 +1787,8 @@ class MethodDefiner(PropertyDefiner): descriptor) }) - isUnforgeableInterface = bool(descriptor.interface.getExtendedAttribute("Unforgeable")) - if not static and unforgeable == isUnforgeableInterface and not crossorigin: + isLegacyUnforgeableInterface = bool(descriptor.interface.getExtendedAttribute("LegacyUnforgeable")) + if not static and unforgeable == isLegacyUnforgeableInterface and not crossorigin: stringifier = descriptor.operations['Stringifier'] if stringifier: self.regular.append({ @@ -1890,7 +1890,7 @@ class AttrDefiner(PropertyDefiner): } for m in descriptor.interface.members if m.isAttr() and m.isStatic() == static - and (MemberIsUnforgeable(m, descriptor) == unforgeable or crossorigin) + and (MemberIsLegacyUnforgeable(m, descriptor) == unforgeable or crossorigin) and (not crossorigin or m.getExtendedAttribute("CrossOriginReadable") or m.getExtendedAttribute("CrossOriginWritable")) @@ -1923,7 +1923,7 @@ class AttrDefiner(PropertyDefiner): accessor = 'get_' + self.descriptor.internalNameFor(attr.identifier.name) jitinfo = "0 as *const JSJitInfo" else: - if attr.hasLenientThis(): + if attr.hasLegacyLenientThis(): accessor = "generic_lenient_getter" else: accessor = "generic_getter" @@ -1946,7 +1946,7 @@ class AttrDefiner(PropertyDefiner): accessor = 'set_' + self.descriptor.internalNameFor(attr.identifier.name) jitinfo = "0 as *const JSJitInfo" else: - if attr.hasLenientThis(): + if attr.hasLegacyLenientThis(): accessor = "generic_lenient_setter" else: accessor = "generic_setter" @@ -2187,7 +2187,7 @@ class CGImports(CGWrapper): if d.interface.isIteratorInterface(): types += [d.interface.iterableInterface] - members = d.interface.members + d.interface.namedConstructors + members = d.interface.members + d.interface.legacyFactoryFunctions constructor = d.interface.ctor() if constructor: members += [constructor] @@ -2436,7 +2436,7 @@ class CGPrototypeJSClass(CGThing): def define(self): name = str_to_const_array(self.descriptor.interface.identifier.name + "Prototype") slotCount = 0 - if self.descriptor.hasUnforgeableMembers: + if self.descriptor.hasLegacyUnforgeableMembers: slotCount += 1 return """\ static PrototypeClass: JSClass = JSClass { @@ -2814,7 +2814,7 @@ class CGConstructorEnabled(CGAbstractMethod): return CGList((CGGeneric(cond) for cond in conditions), " &&\n") -def InitUnforgeablePropertiesOnHolder(descriptor, properties): +def InitLegacyUnforgeablePropertiesOnHolder(descriptor, properties): """ Define the unforgeable properties on the unforgeable holder for the interface represented by descriptor. @@ -2823,12 +2823,12 @@ def InitUnforgeablePropertiesOnHolder(descriptor, properties): """ unforgeables = [] - defineUnforgeableAttrs = "define_guarded_properties(cx, unforgeable_holder.handle(), %s, global);" - defineUnforgeableMethods = "define_guarded_methods(cx, unforgeable_holder.handle(), %s, global);" + defineLegacyUnforgeableAttrs = "define_guarded_properties(cx, unforgeable_holder.handle(), %s, global);" + defineLegacyUnforgeableMethods = "define_guarded_methods(cx, unforgeable_holder.handle(), %s, global);" unforgeableMembers = [ - (defineUnforgeableAttrs, properties.unforgeable_attrs), - (defineUnforgeableMethods, properties.unforgeable_methods), + (defineLegacyUnforgeableAttrs, properties.unforgeable_attrs), + (defineLegacyUnforgeableMethods, properties.unforgeable_methods), ] for template, array in unforgeableMembers: if array.length() > 0: @@ -2836,12 +2836,12 @@ def InitUnforgeablePropertiesOnHolder(descriptor, properties): return CGList(unforgeables, "\n") -def CopyUnforgeablePropertiesToInstance(descriptor): +def CopyLegacyUnforgeablePropertiesToInstance(descriptor): """ Copy the unforgeable properties from the unforgeable holder for this interface to the instance object we have. """ - if not descriptor.hasUnforgeableMembers: + if not descriptor.hasLegacyUnforgeableMembers: return "" copyCode = "" @@ -2890,7 +2890,7 @@ class CGWrapMethod(CGAbstractMethod): pub=True, unsafe=True) def definition_body(self): - unforgeable = CopyUnforgeablePropertiesToInstance(self.descriptor) + unforgeable = CopyLegacyUnforgeablePropertiesToInstance(self.descriptor) if self.descriptor.proxy: if self.descriptor.isMaybeCrossOriginObject(): proto = "ptr::null_mut()" @@ -2958,10 +2958,10 @@ assert!(!proto.is_null()); %(createObject)s let root = raw.reflect_with(obj.get()); -%(copyUnforgeable)s +%(copyLegacyUnforgeable)s DomRoot::from_ref(&*root)\ -""" % {'copyUnforgeable': unforgeable, 'createObject': create}) +""" % {'copyLegacyUnforgeable': unforgeable, 'createObject': create}) class CGWrapGlobalMethod(CGAbstractMethod): @@ -2981,7 +2981,7 @@ class CGWrapGlobalMethod(CGAbstractMethod): def definition_body(self): values = { "concreteType": self.descriptor.concreteType, - "unforgeable": CopyUnforgeablePropertiesToInstance(self.descriptor) + "unforgeable": CopyLegacyUnforgeablePropertiesToInstance(self.descriptor) } pairs = [ @@ -3121,10 +3121,10 @@ class PropertyArrays(): self.static_attrs = AttrDefiner(descriptor, "StaticAttributes", static=True, unforgeable=False) self.methods = MethodDefiner(descriptor, "Methods", static=False, unforgeable=False) - self.unforgeable_methods = MethodDefiner(descriptor, "UnforgeableMethods", + self.unforgeable_methods = MethodDefiner(descriptor, "LegacyUnforgeableMethods", static=False, unforgeable=True) self.attrs = AttrDefiner(descriptor, "Attributes", static=False, unforgeable=False) - self.unforgeable_attrs = AttrDefiner(descriptor, "UnforgeableAttributes", + self.unforgeable_attrs = AttrDefiner(descriptor, "LegacyUnforgeableAttributes", static=False, unforgeable=True) self.consts = ConstDefiner(descriptor, "Constants") pass @@ -3426,7 +3426,7 @@ assert!((*cache)[PrototypeList::Constructor::%(id)s as usize].is_null()); ] + [defineAliasesFor(m) for m in sorted(aliasedMembers)]) code.append(defineAliases) - constructors = self.descriptor.interface.namedConstructors + constructors = self.descriptor.interface.legacyFactoryFunctions if constructors: decl = "let named_constructors: [(ConstructorClassHook, &'static [u8], u32); %d]" % len(constructors) specs = [] @@ -3439,7 +3439,7 @@ assert!((*cache)[PrototypeList::Constructor::%(id)s as usize].is_null()); code.append(CGWrapper(values, pre="%s = [\n" % decl, post="\n];")) code.append(CGGeneric("create_named_constructors(cx, global, &named_constructors, prototype.handle());")) - if self.descriptor.hasUnforgeableMembers: + if self.descriptor.hasLegacyUnforgeableMembers: # We want to use the same JSClass and prototype as the object we'll # end up defining the unforgeable properties on in the end, so that # we can use JS_InitializePropertiesFromCompatibleNativeObject to do @@ -3463,7 +3463,7 @@ unforgeable_holder.handle_mut().set( JS_NewObjectWithoutMetadata(*cx, %(holderClass)s, %(holderProto)s)); assert!(!unforgeable_holder.is_null()); """ % {'holderClass': holderClass, 'holderProto': holderProto})) - code.append(InitUnforgeablePropertiesOnHolder(self.descriptor, self.properties)) + code.append(InitLegacyUnforgeablePropertiesOnHolder(self.descriptor, self.properties)) code.append(CGGeneric("""\ let val = ObjectValue(unforgeable_holder.get()); JS_SetReservedSlot(prototype.get(), DOM_PROTO_UNFORGEABLE_HOLDER_SLOT, &val)""")) @@ -5631,7 +5631,7 @@ class CGDOMJSProxyHandler_defineProperty(CGAbstractExternMethod): namedSetter = self.descriptor.operations['NamedSetter'] if namedSetter: - if self.descriptor.hasUnforgeableMembers: + if self.descriptor.hasLegacyUnforgeableMembers: raise TypeError("Can't handle a named setter on an interface that has " "unforgeables. Figure out how that should work!") set += ("if id.is_string() || id.is_int() {\n" @@ -5675,7 +5675,7 @@ class CGDOMJSProxyHandler_delete(CGAbstractExternMethod): """) if self.descriptor.operations['NamedDeleter']: - if self.descriptor.hasUnforgeableMembers: + if self.descriptor.hasLegacyUnforgeableMembers: raise TypeError("Can't handle a deleter on an interface that has " "unforgeables. Figure out how that should work!") set += CGProxyNamedDeleter(self.descriptor).define() @@ -6731,7 +6731,7 @@ class CGDescriptor(CGThing): if descriptor.interface.hasInterfaceObject(): if descriptor.interface.ctor(): cgThings.append(CGClassConstructHook(descriptor)) - for ctor in descriptor.interface.namedConstructors: + for ctor in descriptor.interface.legacyFactoryFunctions: cgThings.append(CGClassConstructHook(descriptor, ctor)) if not descriptor.interface.isCallback(): cgThings.append(CGInterfaceObjectJSClass(descriptor)) @@ -7943,7 +7943,7 @@ class GlobalGenRoots(): pairs.append((d.name, binding, binding)) for alias in d.interface.legacyWindowAliases: pairs.append((alias, binding, binding)) - for ctor in d.interface.namedConstructors: + for ctor in d.interface.legacyFactoryFunctions: pairs.append((ctor.identifier.name, binding, binding)) pairs.sort(key=operator.itemgetter(0)) mappings = [ diff --git a/components/script/dom/bindings/codegen/Configuration.py b/components/script/dom/bindings/codegen/Configuration.py index cf6885be265..fb360a020aa 100644 --- a/components/script/dom/bindings/codegen/Configuration.py +++ b/components/script/dom/bindings/codegen/Configuration.py @@ -167,11 +167,11 @@ class DescriptorProvider: return self.config.getDescriptor(interfaceName) -def MemberIsUnforgeable(member, descriptor): +def MemberIsLegacyUnforgeable(member, descriptor): return ((member.isAttr() or member.isMethod()) and not member.isStatic() - and (member.isUnforgeable() - or bool(descriptor.interface.getExtendedAttribute("Unforgeable")))) + and (member.isLegacyUnforgeable() + or bool(descriptor.interface.getExtendedAttribute("LegacyUnforgeable")))) class Descriptor(DescriptorProvider): @@ -244,9 +244,9 @@ class Descriptor(DescriptorProvider): and not self.interface.getExtendedAttribute("Abstract") and not self.interface.getExtendedAttribute("Inline") and not spiderMonkeyInterface) - self.hasUnforgeableMembers = (self.concrete - and any(MemberIsUnforgeable(m, self) for m in - self.interface.members)) + self.hasLegacyUnforgeableMembers = (self.concrete + and any(MemberIsLegacyUnforgeable(m, self) for m in + self.interface.members)) self.operations = { 'IndexedGetter': None, @@ -461,7 +461,7 @@ def getTypesFromDescriptor(descriptor): members = [m for m in descriptor.interface.members] if descriptor.interface.ctor(): members.append(descriptor.interface.ctor()) - members.extend(descriptor.interface.namedConstructors) + members.extend(descriptor.interface.legacyFactoryFunctions) signatures = [s for m in members if m.isMethod() for s in m.signatures()] types = [] for s in signatures: diff --git a/components/script/dom/bindings/codegen/parser/WebIDL.py b/components/script/dom/bindings/codegen/parser/WebIDL.py index e317087837d..2366e3f7027 100644 --- a/components/script/dom/bindings/codegen/parser/WebIDL.py +++ b/components/script/dom/bindings/codegen/parser/WebIDL.py @@ -4,15 +4,16 @@ """ A WebIDL parser. """ +import copy +import math +import os +import re +import string +import traceback +from collections import OrderedDict, defaultdict +from itertools import chain from ply import lex, yacc -import re -import os -import traceback -import math -import string -from collections import defaultdict, OrderedDict -from itertools import chain # Machinery @@ -22,14 +23,14 @@ def parseInt(literal): sign = 0 base = 0 - if string[0] == '-': + if string[0] == "-": sign = -1 string = string[1:] else: sign = 1 - if string[0] == '0' and len(string) > 1: - if string[1] == 'x' or string[1] == 'X': + if string[0] == "0" and len(string) > 1: + if string[1] == "x" or string[1] == "X": base = 16 string = string[2:] else: @@ -45,19 +46,22 @@ def parseInt(literal): def enum(*names, **kw): class Foo(object): attrs = OrderedDict() + def __init__(self, names): for v, k in enumerate(names): self.attrs[k] = v + def __getattr__(self, attr): if attr in self.attrs: return self.attrs[attr] raise AttributeError + def __setattr__(self, name, value): # this makes it read-only raise NotImplementedError if "base" not in kw: return Foo(names) - return Foo(chain(list(kw["base"].attrs.keys()), names)) + return Foo(chain(kw["base"].attrs.keys(), names)) class WebIDLError(Exception): @@ -67,10 +71,12 @@ class WebIDLError(Exception): self.warning = warning def __str__(self): - return "%s: %s%s%s" % (self.warning and 'warning' or 'error', - self.message, - ", " if len(self.locations) != 0 else "", - "\n".join(self.locations)) + return "%s: %s%s%s" % ( + self.warning and "warning" or "error", + self.message, + ", " if len(self.locations) != 0 else "", + "\n".join(self.locations), + ) class Location(object): @@ -82,8 +88,7 @@ class Location(object): self._file = filename if filename else "" def __eq__(self, other): - return (self._lexpos == other._lexpos and - self._file == other._file) + return self._lexpos == other._lexpos and self._file == other._file def filename(self): return self._file @@ -92,8 +97,8 @@ class Location(object): if self._line: return - startofline = self._lexdata.rfind('\n', 0, self._lexpos) + 1 - endofline = self._lexdata.find('\n', self._lexpos, self._lexpos + 80) + startofline = self._lexdata.rfind("\n", 0, self._lexpos) + 1 + endofline = self._lexdata.find("\n", self._lexpos, self._lexpos + 80) if endofline != -1: self._line = self._lexdata[startofline:endofline] else: @@ -101,7 +106,7 @@ class Location(object): self._colno = self._lexpos - startofline # Our line number seems to point to the start of self._lexdata - self._lineno += self._lexdata.count('\n', 0, startofline) + self._lineno += self._lexdata.count("\n", 0, startofline) def get(self): self.resolve() @@ -112,8 +117,13 @@ class Location(object): def __str__(self): self.resolve() - return "%s line %s:%s\n%s\n%s" % (self._file, self._lineno, self._colno, - self._line, self._pointerline()) + return "%s line %s:%s\n%s\n%s" % ( + self._file, + self._lineno, + self._colno, + self._line, + self._pointerline(), + ) class BuiltinLocation(object): @@ -121,14 +131,10 @@ class BuiltinLocation(object): self.msg = text + "\n" def __eq__(self, other): - return (isinstance(other, BuiltinLocation) and - self.msg == other.msg) - - def __hash__(self): - return hash(self.msg) + return isinstance(other, BuiltinLocation) and self.msg == other.msg def filename(self): - return '' + return "" def resolve(self): pass @@ -194,13 +200,13 @@ class IDLObject(object): assert False # Override me! def getDeps(self, visited=None): - """ Return a set of files that this object depends on. If any of - these files are changed the parser needs to be rerun to regenerate - a new IDLObject. + """Return a set of files that this object depends on. If any of + these files are changed the parser needs to be rerun to regenerate + a new IDLObject. - The visited argument is a set of all the objects already visited. - We must test to see if we are in it, and if so, do nothing. This - prevents infinite recursion.""" + The visited argument is a set of all the objects already visited. + We must test to see if we are in it, and if so, do nothing. This + prevents infinite recursion.""" # NB: We can't use visited=set() above because the default value is # evaluated when the def statement is evaluated, not when the function @@ -256,9 +262,9 @@ class IDLScope(IDLObject): def ensureUnique(self, identifier, object): """ - Ensure that there is at most one 'identifier' in scope ('self'). - Note that object can be None. This occurs if we end up here for an - interface type we haven't seen yet. + Ensure that there is at most one 'identifier' in scope ('self'). + Note that object can be None. This occurs if we end up here for an + interface type we haven't seen yet. """ assert isinstance(identifier, IDLUnresolvedIdentifier) assert not object or isinstance(object, IDLObjectWithIdentifier) @@ -271,9 +277,9 @@ class IDLScope(IDLObject): # ensureUnique twice with the same object is not allowed assert id(object) != id(self._dict[identifier.name]) - replacement = self.resolveIdentifierConflict(self, identifier, - self._dict[identifier.name], - object) + replacement = self.resolveIdentifierConflict( + self, identifier, self._dict[identifier.name], object + ) self._dict[identifier.name] = replacement return @@ -282,44 +288,53 @@ class IDLScope(IDLObject): self._dict[identifier.name] = object def resolveIdentifierConflict(self, scope, identifier, originalObject, newObject): - if (isinstance(originalObject, IDLExternalInterface) and - isinstance(newObject, IDLExternalInterface) and - originalObject.identifier.name == newObject.identifier.name): + if ( + isinstance(originalObject, IDLExternalInterface) + and isinstance(newObject, IDLExternalInterface) + and originalObject.identifier.name == newObject.identifier.name + ): return originalObject - if (isinstance(originalObject, IDLExternalInterface) or - isinstance(newObject, IDLExternalInterface)): + if isinstance(originalObject, IDLExternalInterface) or isinstance( + newObject, IDLExternalInterface + ): raise WebIDLError( "Name collision between " "interface declarations for identifier '%s' at '%s' and '%s'" - % (identifier.name, - originalObject.location, newObject.location), []) + % (identifier.name, originalObject.location, newObject.location), + [], + ) - if (isinstance(originalObject, IDLDictionary) or - isinstance(newObject, IDLDictionary)): + if isinstance(originalObject, IDLDictionary) or isinstance( + newObject, IDLDictionary + ): raise WebIDLError( "Name collision between dictionary declarations for " "identifier '%s'.\n%s\n%s" - % (identifier.name, - originalObject.location, newObject.location), []) + % (identifier.name, originalObject.location, newObject.location), + [], + ) # We do the merging of overloads here as opposed to in IDLInterface - # because we need to merge overloads of NamedConstructors and we need to + # because we need to merge overloads of LegacyFactoryFunctions and we need to # detect conflicts in those across interfaces. See also the comment in - # IDLInterface.addExtendedAttributes for "NamedConstructor". - if (isinstance(originalObject, IDLMethod) and - isinstance(newObject, IDLMethod)): + # IDLInterface.addExtendedAttributes for "LegacyFactoryFunction". + if isinstance(originalObject, IDLMethod) and isinstance(newObject, IDLMethod): return originalObject.addOverload(newObject) # Default to throwing, derived classes can override. - conflictdesc = "\n\t%s at %s\n\t%s at %s" % (originalObject, - originalObject.location, - newObject, - newObject.location) + conflictdesc = "\n\t%s at %s\n\t%s at %s" % ( + originalObject, + originalObject.location, + newObject, + newObject.location, + ) raise WebIDLError( "Multiple unresolvable definitions of identifier '%s' in scope '%s'%s" - % (identifier.name, str(self), conflictdesc), []) + % (identifier.name, str(self), conflictdesc), + [], + ) def _lookupIdentifier(self, identifier): return self._dict[identifier.name] @@ -362,8 +377,9 @@ class IDLIdentifier(IDLObject): class IDLUnresolvedIdentifier(IDLObject): - def __init__(self, location, name, allowDoubleUnderscore=False, - allowForbidden=False): + def __init__( + self, location, name, allowDoubleUnderscore=False, allowForbidden=False + ): IDLObject.__init__(self, location) assert len(name) > 0 @@ -371,15 +387,14 @@ class IDLUnresolvedIdentifier(IDLObject): if name == "__noSuchMethod__": raise WebIDLError("__noSuchMethod__ is deprecated", [location]) - if name[:2] == "__" and name != "__content" and not allowDoubleUnderscore: - raise WebIDLError("Identifiers beginning with __ are reserved", - [location]) - if name[0] == '_' and not allowDoubleUnderscore: + if name[:2] == "__" and not allowDoubleUnderscore: + raise WebIDLError("Identifiers beginning with __ are reserved", [location]) + if name[0] == "_" and not allowDoubleUnderscore: name = name[1:] - if (name in ["constructor", "toString"] and - not allowForbidden): - raise WebIDLError("Cannot use reserved identifier '%s'" % (name), - [location]) + if name in ["constructor", "toString"] and not allowForbidden: + raise WebIDLError( + "Cannot use reserved identifier '%s'" % (name), [location] + ) self.name = name @@ -439,14 +454,15 @@ class IDLIdentifierPlaceholder(IDLObjectWithIdentifier): try: scope._lookupIdentifier(self.identifier) except: - raise WebIDLError("Unresolved type '%s'." % self.identifier, - [self.location]) + raise WebIDLError( + "Unresolved type '%s'." % self.identifier, [self.location] + ) obj = self.identifier.resolve(scope, None) return scope.lookupIdentifier(obj) -class IDLExposureMixins(): +class IDLExposureMixins: def __init__(self, location): # _exposureGlobalNames are the global names listed in our [Exposed] # extended attribute. exposureSet is the exposure set as defined in the @@ -460,11 +476,15 @@ class IDLExposureMixins(): assert scope.parentScope is None self._globalScope = scope - # Verify that our [Exposed] value, if any, makes sense. - for globalName in self._exposureGlobalNames: - if globalName not in scope.globalNames: - raise WebIDLError("Unknown [Exposed] value %s" % globalName, - [self._location]) + if "*" in self._exposureGlobalNames: + self._exposureGlobalNames = scope.globalNames + else: + # Verify that our [Exposed] value, if any, makes sense. + for globalName in self._exposureGlobalNames: + if globalName not in scope.globalNames: + raise WebIDLError( + "Unknown [Exposed] value %s" % globalName, [self._location] + ) # Verify that we are exposed _somwhere_ if we have some place to be # exposed. We don't want to assert that we're definitely exposed @@ -473,16 +493,20 @@ class IDLExposureMixins(): # and add global interfaces and [Exposed] annotations to all those # tests. if len(scope.globalNames) != 0: - if (len(self._exposureGlobalNames) == 0): - raise WebIDLError(("'%s' is not exposed anywhere even though we have " - "globals to be exposed to") % self, - [self.location]) + if len(self._exposureGlobalNames) == 0 and not self.isPseudoInterface(): + raise WebIDLError( + ( + "'%s' is not exposed anywhere even though we have " + "globals to be exposed to" + ) + % self, + [self.location], + ) - globalNameSetToExposureSet(scope, self._exposureGlobalNames, - self.exposureSet) + globalNameSetToExposureSet(scope, self._exposureGlobalNames, self.exposureSet) def isExposedInWindow(self): - return 'Window' in self.exposureSet + return "Window" in self.exposureSet def isExposedInAnyWorker(self): return len(self.getWorkerExposureSet()) > 0 @@ -505,6 +529,9 @@ class IDLExposureMixins(): workerScopes = self.parentScope.globalNameMapping["Worker"] return len(workerScopes.difference(self.exposureSet)) > 0 + def isExposedInShadowRealms(self): + return "ShadowRealmGlobalScope" in self.exposureSet + def getWorkerExposureSet(self): workerScopes = self._globalScope.globalNameMapping["Worker"] return workerScopes.intersection(self.exposureSet) @@ -535,6 +562,9 @@ class IDLExternalInterface(IDLObjectWithIdentifier): def isIteratorInterface(self): return False + def isAsyncIteratorInterface(self): + return False + def isExternal(self): return True @@ -543,9 +573,11 @@ class IDLExternalInterface(IDLObjectWithIdentifier): def addExtendedAttributes(self, attrs): if len(attrs) != 0: - raise WebIDLError("There are no extended attributes that are " - "allowed on external interfaces", - [attrs[0].location, self.location]) + raise WebIDLError( + "There are no extended attributes that are " + "allowed on external interfaces", + [attrs[0].location, self.location], + ) def resolve(self, parentScope): pass @@ -610,49 +642,68 @@ class IDLPartialInterfaceOrNamespace(IDLObject): for attr in attrs: identifier = attr.identifier() - if identifier == "NamedConstructor": + if identifier == "LegacyFactoryFunction": self.propagatedExtendedAttrs.append(attr) elif identifier == "SecureContext": self._haveSecureContextExtendedAttribute = True # This gets propagated to all our members. for member in self.members: if member.getExtendedAttribute("SecureContext"): - raise WebIDLError("[SecureContext] specified on both a " - "partial interface member and on the " - "partial interface itself", - [member.location, attr.location]) + raise WebIDLError( + "[SecureContext] specified on both a " + "partial interface member and on the " + "partial interface itself", + [member.location, attr.location], + ) member.addExtendedAttributes([attr]) elif identifier == "Exposed": # This just gets propagated to all our members. for member in self.members: if len(member._exposureGlobalNames) != 0: - raise WebIDLError("[Exposed] specified on both a " - "partial interface member and on the " - "partial interface itself", - [member.location, attr.location]) + raise WebIDLError( + "[Exposed] specified on both a " + "partial interface member and on the " + "partial interface itself", + [member.location, attr.location], + ) member.addExtendedAttributes([attr]) else: - raise WebIDLError("Unknown extended attribute %s on partial " - "interface" % identifier, - [attr.location]) + raise WebIDLError( + "Unknown extended attribute %s on partial " + "interface" % identifier, + [attr.location], + ) def finish(self, scope): if self._finished: return self._finished = True - if (not self._haveSecureContextExtendedAttribute and - self._nonPartialInterfaceOrNamespace.getExtendedAttribute("SecureContext")): + if ( + not self._haveSecureContextExtendedAttribute + and self._nonPartialInterfaceOrNamespace.getExtendedAttribute( + "SecureContext" + ) + ): # This gets propagated to all our members. for member in self.members: if member.getExtendedAttribute("SecureContext"): - raise WebIDLError("[SecureContext] specified on both a " - "partial interface member and on the " - "non-partial interface", - [member.location, - self._nonPartialInterfaceOrNamespace.location]) + raise WebIDLError( + "[SecureContext] specified on both a " + "partial interface member and on the " + "non-partial interface", + [ + member.location, + self._nonPartialInterfaceOrNamespace.location, + ], + ) member.addExtendedAttributes( - [IDLExtendedAttribute(self._nonPartialInterfaceOrNamespace.location, - ("SecureContext",))]) + [ + IDLExtendedAttribute( + self._nonPartialInterfaceOrNamespace.location, + ("SecureContext",), + ) + ] + ) # Need to make sure our non-partial interface or namespace gets # finished so it can report cases when we only have partial # interfaces/namespaces. @@ -675,6 +726,7 @@ def globalNameSetToExposureSet(globalScope, nameSet, exposureSet): for name in nameSet: exposureSet.update(globalScope.globalNameMapping[name]) + class IDLInterfaceOrInterfaceMixinOrNamespace(IDLObjectWithScope, IDLExposureMixins): def __init__(self, location, parentScope, name): assert isinstance(parentScope, IDLScope) @@ -691,8 +743,10 @@ class IDLInterfaceOrInterfaceMixinOrNamespace(IDLObjectWithScope, IDLExposureMix def finish(self, scope): if not self._isKnownNonPartial: - raise WebIDLError("%s does not have a non-partial declaration" % - str(self), [self.location]) + raise WebIDLError( + "%s does not have a non-partial declaration" % str(self), + [self.location], + ) IDLExposureMixins.finish(self, scope) @@ -707,8 +761,9 @@ class IDLInterfaceOrInterfaceMixinOrNamespace(IDLObjectWithScope, IDLExposureMix assert isinstance(originalObject, IDLInterfaceMember) assert isinstance(newObject, IDLInterfaceMember) - retval = IDLScope.resolveIdentifierConflict(self, scope, identifier, - originalObject, newObject) + retval = IDLScope.resolveIdentifierConflict( + self, scope, identifier, originalObject, newObject + ) # Might be a ctor, which isn't in self.members if newObject in self.members: @@ -728,9 +783,10 @@ class IDLInterfaceOrInterfaceMixinOrNamespace(IDLObjectWithScope, IDLExposureMix def setNonPartial(self, location, members): if self._isKnownNonPartial: - raise WebIDLError("Two non-partial definitions for the " - "same %s" % self.typeName(), - [location, self.location]) + raise WebIDLError( + "Two non-partial definitions for the " "same %s" % self.typeName(), + [location, self.location], + ) self._isKnownNonPartial = True # Now make it look like we were parsed at this new location, since # that's the place where the interface is "really" defined @@ -775,9 +831,11 @@ class IDLInterfaceOrInterfaceMixinOrNamespace(IDLObjectWithScope, IDLExposureMix # sets, make sure they aren't exposed in places where we are not. for member in self.members: if not member.exposureSet.issubset(self.exposureSet): - raise WebIDLError("Interface or interface mixin member has " - "larger exposure set than its container", - [member.location, self.location]) + raise WebIDLError( + "Interface or interface mixin member has " + "larger exposure set than its container", + [member.location, self.location], + ) def isExternal(self): return False @@ -788,7 +846,9 @@ class IDLInterfaceMixin(IDLInterfaceOrInterfaceMixinOrNamespace): self.actualExposureGlobalNames = set() assert isKnownNonPartial or len(members) == 0 - IDLInterfaceOrInterfaceMixinOrNamespace.__init__(self, location, parentScope, name) + IDLInterfaceOrInterfaceMixinOrNamespace.__init__( + self, location, parentScope, name + ) if isKnownNonPartial: self.setNonPartial(location, members) @@ -824,26 +884,33 @@ class IDLInterfaceMixin(IDLInterfaceOrInterfaceMixinOrNamespace): if member.isAttr(): if member.inherit: - raise WebIDLError("Interface mixin member cannot include " - "an inherited attribute", - [member.location, self.location]) + raise WebIDLError( + "Interface mixin member cannot include " + "an inherited attribute", + [member.location, self.location], + ) if member.isStatic(): - raise WebIDLError("Interface mixin member cannot include " - "a static member", - [member.location, self.location]) + raise WebIDLError( + "Interface mixin member cannot include " "a static member", + [member.location, self.location], + ) if member.isMethod(): if member.isStatic(): - raise WebIDLError("Interface mixin member cannot include " - "a static operation", - [member.location, self.location]) - if (member.isGetter() or - member.isSetter() or - member.isDeleter() or - member.isLegacycaller()): - raise WebIDLError("Interface mixin member cannot include a " - "special operation", - [member.location, self.location]) + raise WebIDLError( + "Interface mixin member cannot include " "a static operation", + [member.location, self.location], + ) + if ( + member.isGetter() + or member.isSetter() + or member.isDeleter() + or member.isLegacycaller() + ): + raise WebIDLError( + "Interface mixin member cannot include a " "special operation", + [member.location, self.location], + ) def addExtendedAttributes(self, attrs): for attr in attrs: @@ -851,22 +918,26 @@ class IDLInterfaceMixin(IDLInterfaceOrInterfaceMixinOrNamespace): if identifier == "SecureContext": if not attr.noArguments(): - raise WebIDLError("[%s] must take no arguments" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must take no arguments" % identifier, [attr.location] + ) # This gets propagated to all our members. for member in self.members: if member.getExtendedAttribute("SecureContext"): - raise WebIDLError("[SecureContext] specified on both " - "an interface mixin member and on" - "the interface mixin itself", - [member.location, attr.location]) + raise WebIDLError( + "[SecureContext] specified on both " + "an interface mixin member and on" + "the interface mixin itself", + [member.location, attr.location], + ) member.addExtendedAttributes([attr]) elif identifier == "Exposed": - convertExposedAttrToGlobalNameSet(attr, - self._exposureGlobalNames) + convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames) else: - raise WebIDLError("Unknown extended attribute %s on interface" % identifier, - [attr.location]) + raise WebIDLError( + "Unknown extended attribute %s on interface" % identifier, + [attr.location], + ) attrlist = attr.listValue() self._extendedAttrDict[identifier] = attrlist if len(attrlist) else True @@ -876,8 +947,7 @@ class IDLInterfaceMixin(IDLInterfaceOrInterfaceMixinOrNamespace): class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): - def __init__(self, location, parentScope, name, parent, members, - isKnownNonPartial, toStringTag): + def __init__(self, location, parentScope, name, parent, members, isKnownNonPartial): assert isKnownNonPartial or not parent assert isKnownNonPartial or len(members) == 0 @@ -887,7 +957,7 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): # namedConstructors needs deterministic ordering because bindings code # outputs the constructs in the order that namedConstructors enumerates # them. - self.namedConstructors = list() + self.legacyFactoryFunctions = list() self.legacyWindowAliases = [] self.includedMixins = set() # self.interfacesBasedOnSelf is the set of interfaces that inherit from @@ -896,6 +966,9 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): self.interfacesBasedOnSelf = set([self]) self._hasChildInterfaces = False self._isOnGlobalProtoChain = False + # Pseudo interfaces aren't exposed anywhere, and so shouldn't issue warnings + self._isPseudo = False + # Tracking of the number of reserved slots we need for our # members and those of ancestor interfaces. self.totalMembersInSlots = 0 @@ -904,33 +977,49 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): # If this is an iterator interface, we need to know what iterable # interface we're iterating for in order to get its nativeType. self.iterableInterface = None + self.asyncIterableInterface = None # True if we have cross-origin members. self.hasCrossOriginMembers = False # True if some descendant (including ourselves) has cross-origin members self.hasDescendantWithCrossOriginMembers = False - self.toStringTag = toStringTag - - IDLInterfaceOrInterfaceMixinOrNamespace.__init__(self, location, parentScope, name) + IDLInterfaceOrInterfaceMixinOrNamespace.__init__( + self, location, parentScope, name + ) if isKnownNonPartial: self.setNonPartial(location, parent, members) def ctor(self): - identifier = IDLUnresolvedIdentifier(self.location, "constructor", - allowForbidden=True) + identifier = IDLUnresolvedIdentifier( + self.location, "constructor", allowForbidden=True + ) try: return self._lookupIdentifier(identifier) except: return None def isIterable(self): - return (self.maplikeOrSetlikeOrIterable and - self.maplikeOrSetlikeOrIterable.isIterable()) + return ( + self.maplikeOrSetlikeOrIterable + and self.maplikeOrSetlikeOrIterable.isIterable() + ) + + def isAsyncIterable(self): + return ( + self.maplikeOrSetlikeOrIterable + and self.maplikeOrSetlikeOrIterable.isAsyncIterable() + ) def isIteratorInterface(self): return self.iterableInterface is not None + def isAsyncIteratorInterface(self): + return self.asyncIterableInterface is not None + + def getClassName(self): + return self.identifier.name + def finish(self, scope): if self._finished: return @@ -941,48 +1030,71 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): if len(self.legacyWindowAliases) > 0: if not self.hasInterfaceObject(): - raise WebIDLError("Interface %s unexpectedly has [LegacyWindowAlias] " - "and [NoInterfaceObject] together" % self.identifier.name, - [self.location]) + raise WebIDLError( + "Interface %s unexpectedly has [LegacyWindowAlias] " + "and [LegacyNoInterfaceObject] together" % self.identifier.name, + [self.location], + ) if not self.isExposedInWindow(): - raise WebIDLError("Interface %s has [LegacyWindowAlias] " - "but not exposed in Window" % self.identifier.name, - [self.location]) + raise WebIDLError( + "Interface %s has [LegacyWindowAlias] " + "but not exposed in Window" % self.identifier.name, + [self.location], + ) # Generate maplike/setlike interface members. Since generated members # need to be treated like regular interface members, do this before # things like exposure setting. for member in self.members: if member.isMaplikeOrSetlikeOrIterable(): + if self.isJSImplemented(): + raise WebIDLError( + "%s declaration used on " + "interface that is implemented in JS" + % (member.maplikeOrSetlikeOrIterableType), + [member.location], + ) + if member.valueType.isObservableArray() or ( + member.hasKeyType() and member.keyType.isObservableArray() + ): + raise WebIDLError( + "%s declaration uses ObservableArray as value or key type" + % (member.maplikeOrSetlikeOrIterableType), + [member.location], + ) # Check that we only have one interface declaration (currently # there can only be one maplike/setlike declaration per # interface) if self.maplikeOrSetlikeOrIterable: - raise WebIDLError("%s declaration used on " - "interface that already has %s " - "declaration" % - (member.maplikeOrSetlikeOrIterableType, - self.maplikeOrSetlikeOrIterable.maplikeOrSetlikeOrIterableType), - [self.maplikeOrSetlikeOrIterable.location, - member.location]) + raise WebIDLError( + "%s declaration used on " + "interface that already has %s " + "declaration" + % ( + member.maplikeOrSetlikeOrIterableType, + self.maplikeOrSetlikeOrIterable.maplikeOrSetlikeOrIterableType, + ), + [self.maplikeOrSetlikeOrIterable.location, member.location], + ) self.maplikeOrSetlikeOrIterable = member # If we've got a maplike or setlike declaration, we'll be building all of # our required methods in Codegen. Generate members now. - self.maplikeOrSetlikeOrIterable.expand(self.members, self.isJSImplemented()) + self.maplikeOrSetlikeOrIterable.expand(self.members) assert not self.parent or isinstance(self.parent, IDLIdentifierPlaceholder) parent = self.parent.finish(scope) if self.parent else None if parent and isinstance(parent, IDLExternalInterface): - raise WebIDLError("%s inherits from %s which does not have " - "a definition" % - (self.identifier.name, - self.parent.identifier.name), - [self.location]) + raise WebIDLError( + "%s inherits from %s which does not have " + "a definition" % (self.identifier.name, self.parent.identifier.name), + [self.location], + ) if parent and not isinstance(parent, IDLInterface): - raise WebIDLError("%s inherits from %s which is not an interface " % - (self.identifier.name, - self.parent.identifier.name), - [self.location, parent.location]) + raise WebIDLError( + "%s inherits from %s which is not an interface " + % (self.identifier.name, self.parent.identifier.name), + [self.location, parent.location], + ) self.parent = parent @@ -993,9 +1105,10 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): for m in self.members: if m.isAttr() or m.isMethod(): if m.isStatic(): - raise WebIDLError("Don't mark things explicitly static " - "in namespaces", - [self.location, m.location]) + raise WebIDLError( + "Don't mark things explicitly static " "in namespaces", + [self.location, m.location], + ) # Just mark all our methods/attributes as static. The other # option is to duplicate the relevant InterfaceMembers # production bits but modified to produce static stuff to @@ -1013,55 +1126,63 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): # Note: This is not a self.parent.isOnGlobalProtoChain() check # because ancestors of a [Global] interface can have other # descendants. - raise WebIDLError("[Global] interface has another interface " - "inheriting from it", - [self.location, self.parent.location]) + raise WebIDLError( + "[Global] interface has another interface " "inheriting from it", + [self.location, self.parent.location], + ) # Make sure that we're not exposed in places where our parent is not if not self.exposureSet.issubset(self.parent.exposureSet): - raise WebIDLError("Interface %s is exposed in globals where its " - "parent interface %s is not exposed." % - (self.identifier.name, - self.parent.identifier.name), - [self.location, self.parent.location]) + raise WebIDLError( + "Interface %s is exposed in globals where its " + "parent interface %s is not exposed." + % (self.identifier.name, self.parent.identifier.name), + [self.location, self.parent.location], + ) # Callbacks must not inherit from non-callbacks. # XXXbz Can non-callbacks inherit from callbacks? Spec issue pending. if self.isCallback(): if not self.parent.isCallback(): - raise WebIDLError("Callback interface %s inheriting from " - "non-callback interface %s" % - (self.identifier.name, - self.parent.identifier.name), - [self.location, self.parent.location]) + raise WebIDLError( + "Callback interface %s inheriting from " + "non-callback interface %s" + % (self.identifier.name, self.parent.identifier.name), + [self.location, self.parent.location], + ) elif self.parent.isCallback(): - raise WebIDLError("Non-callback interface %s inheriting from " - "callback interface %s" % - (self.identifier.name, - self.parent.identifier.name), - [self.location, self.parent.location]) + raise WebIDLError( + "Non-callback interface %s inheriting from " + "callback interface %s" + % (self.identifier.name, self.parent.identifier.name), + [self.location, self.parent.location], + ) # Interfaces which have interface objects can't inherit - # from [NoInterfaceObject] interfaces. - if (self.parent.getExtendedAttribute("NoInterfaceObject") and - not self.getExtendedAttribute("NoInterfaceObject")): - raise WebIDLError("Interface %s does not have " - "[NoInterfaceObject] but inherits from " - "interface %s which does" % - (self.identifier.name, - self.parent.identifier.name), - [self.location, self.parent.location]) + # from [LegacyNoInterfaceObject] interfaces. + if self.parent.getExtendedAttribute( + "LegacyNoInterfaceObject" + ) and not self.getExtendedAttribute("LegacyNoInterfaceObject"): + raise WebIDLError( + "Interface %s does not have " + "[LegacyNoInterfaceObject] but inherits from " + "interface %s which does" + % (self.identifier.name, self.parent.identifier.name), + [self.location, self.parent.location], + ) # Interfaces that are not [SecureContext] can't inherit # from [SecureContext] interfaces. - if (self.parent.getExtendedAttribute("SecureContext") and - not self.getExtendedAttribute("SecureContext")): - raise WebIDLError("Interface %s does not have " - "[SecureContext] but inherits from " - "interface %s which does" % - (self.identifier.name, - self.parent.identifier.name), - [self.location, self.parent.location]) + if self.parent.getExtendedAttribute( + "SecureContext" + ) and not self.getExtendedAttribute("SecureContext"): + raise WebIDLError( + "Interface %s does not have " + "[SecureContext] but inherits from " + "interface %s which does" + % (self.identifier.name, self.parent.identifier.name), + [self.location, self.parent.location], + ) for mixin in self.includedMixins: mixin.finish(scope) @@ -1070,7 +1191,8 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): if cycleInGraph: raise WebIDLError( "Interface %s has itself as ancestor" % self.identifier.name, - [self.location, cycleInGraph.location]) + [self.location, cycleInGraph.location], + ) self.finishMembers(scope) @@ -1078,25 +1200,28 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): if ctor is not None: if not self.hasInterfaceObject(): raise WebIDLError( - "Can't have both a constructor and [NoInterfaceObject]", - [self.location, ctor.location]) + "Can't have both a constructor and [LegacyNoInterfaceObject]", + [self.location, ctor.location], + ) if self.globalNames: raise WebIDLError( "Can't have both a constructor and [Global]", - [self.location, ctor.location]) + [self.location, ctor.location], + ) - assert(ctor._exposureGlobalNames == self._exposureGlobalNames) + assert ctor._exposureGlobalNames == self._exposureGlobalNames ctor._exposureGlobalNames.update(self._exposureGlobalNames) # Remove the constructor operation from our member list so # it doesn't get in the way later. self.members.remove(ctor) - for ctor in self.namedConstructors: + for ctor in self.legacyFactoryFunctions: if self.globalNames: raise WebIDLError( - "Can't have both a named constructor and [Global]", - [self.location, ctor.location]) + "Can't have both a legacy factory function and [Global]", + [self.location, ctor.location], + ) assert len(ctor._exposureGlobalNames) == 0 ctor._exposureGlobalNames.update(self._exposureGlobalNames) ctor.finish(scope) @@ -1106,67 +1231,84 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): # admixed. self.originalMembers = list(self.members) - for mixin in sorted(self.includedMixins, - key=lambda x: x.identifier.name): + for mixin in sorted(self.includedMixins, key=lambda x: x.identifier.name): for mixinMember in mixin.members: for member in self.members: if mixinMember.identifier.name == member.identifier.name: raise WebIDLError( - "Multiple definitions of %s on %s coming from 'includes' statements" % - (member.identifier.name, self), - [mixinMember.location, member.location]) + "Multiple definitions of %s on %s coming from 'includes' statements" + % (member.identifier.name, self), + [mixinMember.location, member.location], + ) self.members.extend(mixin.members) for ancestor in self.getInheritedInterfaces(): ancestor.interfacesBasedOnSelf.add(self) - if (ancestor.maplikeOrSetlikeOrIterable is not None and - self.maplikeOrSetlikeOrIterable is not None): - raise WebIDLError("Cannot have maplike/setlike on %s that " - "inherits %s, which is already " - "maplike/setlike" % - (self.identifier.name, - ancestor.identifier.name), - [self.maplikeOrSetlikeOrIterable.location, - ancestor.maplikeOrSetlikeOrIterable.location]) + if ( + ancestor.maplikeOrSetlikeOrIterable is not None + and self.maplikeOrSetlikeOrIterable is not None + ): + raise WebIDLError( + "Cannot have maplike/setlike on %s that " + "inherits %s, which is already " + "maplike/setlike" + % (self.identifier.name, ancestor.identifier.name), + [ + self.maplikeOrSetlikeOrIterable.location, + ancestor.maplikeOrSetlikeOrIterable.location, + ], + ) - # Deal with interfaces marked [Unforgeable], now that we have our full + # Deal with interfaces marked [LegacyUnforgeable], now that we have our full # member list, except unforgeables pulled in from parents. We want to # do this before we set "originatingInterface" on our unforgeable # members. - if self.getExtendedAttribute("Unforgeable"): + if self.getExtendedAttribute("LegacyUnforgeable"): # Check that the interface already has all the things the # spec would otherwise require us to synthesize and is # missing the ones we plan to synthesize. if not any(m.isMethod() and m.isStringifier() for m in self.members): - raise WebIDLError("Unforgeable interface %s does not have a " - "stringifier" % self.identifier.name, - [self.location]) + raise WebIDLError( + "LegacyUnforgeable interface %s does not have a " + "stringifier" % self.identifier.name, + [self.location], + ) for m in self.members: if m.identifier.name == "toJSON": - raise WebIDLError("Unforgeable interface %s has a " - "toJSON so we won't be able to add " - "one ourselves" % self.identifier.name, - [self.location, m.location]) + raise WebIDLError( + "LegacyUnforgeable interface %s has a " + "toJSON so we won't be able to add " + "one ourselves" % self.identifier.name, + [self.location, m.location], + ) if m.identifier.name == "valueOf" and not m.isStatic(): - raise WebIDLError("Unforgeable interface %s has a valueOf " - "member so we won't be able to add one " - "ourselves" % self.identifier.name, - [self.location, m.location]) + raise WebIDLError( + "LegacyUnforgeable interface %s has a valueOf " + "member so we won't be able to add one " + "ourselves" % self.identifier.name, + [self.location, m.location], + ) for member in self.members: - if ((member.isAttr() or member.isMethod()) and - member.isUnforgeable() and - not hasattr(member, "originatingInterface")): + if ( + (member.isAttr() or member.isMethod()) + and member.isLegacyUnforgeable() + and not hasattr(member, "originatingInterface") + ): member.originatingInterface = self for member in self.members: - if ((member.isMethod() and - member.getExtendedAttribute("CrossOriginCallable")) or - (member.isAttr() and - (member.getExtendedAttribute("CrossOriginReadable") or - member.getExtendedAttribute("CrossOriginWritable")))): + if ( + member.isMethod() and member.getExtendedAttribute("CrossOriginCallable") + ) or ( + member.isAttr() + and ( + member.getExtendedAttribute("CrossOriginReadable") + or member.getExtendedAttribute("CrossOriginWritable") + ) + ): self.hasCrossOriginMembers = True break @@ -1180,16 +1322,21 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): # members from our parent. Also, maplike/setlike declarations get a # slot to hold their backing object. for member in self.members: - if ((member.isAttr() and - (member.getExtendedAttribute("StoreInSlot") or - member.getExtendedAttribute("Cached"))) or - member.isMaplikeOrSetlike()): + if ( + member.isAttr() + and ( + member.getExtendedAttribute("StoreInSlot") + or member.getExtendedAttribute("Cached") + or member.type.isObservableArray() + ) + ) or member.isMaplikeOrSetlike(): if self.isJSImplemented() and not member.isMaplikeOrSetlike(): - raise WebIDLError("Interface %s is JS-implemented and we " - "don't support [Cached] or [StoreInSlot] " - "on JS-implemented interfaces" % - self.identifier.name, - [self.location, member.location]) + raise WebIDLError( + "Interface %s is JS-implemented and we " + "don't support [Cached] or [StoreInSlot] or ObservableArray " + "on JS-implemented interfaces" % self.identifier.name, + [self.location, member.location], + ) if member.slotIndices is None: member.slotIndices = dict() member.slotIndices[self.identifier.name] = self.totalMembersInSlots @@ -1198,27 +1345,33 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): self._ownMembersInSlots += 1 if self.parent: - # Make sure we don't shadow any of the [Unforgeable] attributes on our + # Make sure we don't shadow any of the [LegacyUnforgeable] attributes on our # ancestor interfaces. We don't have to worry about mixins here, because # those have already been imported into the relevant .members lists. And # we don't have to worry about anything other than our parent, because it # has already imported its ancestors' unforgeable attributes into its # member list. - for unforgeableMember in (member for member in self.parent.members if - (member.isAttr() or member.isMethod()) and - member.isUnforgeable()): - shadows = [m for m in self.members if - (m.isAttr() or m.isMethod()) and - not m.isStatic() and - m.identifier.name == unforgeableMember.identifier.name] + for unforgeableMember in ( + member + for member in self.parent.members + if (member.isAttr() or member.isMethod()) + and member.isLegacyUnforgeable() + ): + shadows = [ + m + for m in self.members + if (m.isAttr() or m.isMethod()) + and not m.isStatic() + and m.identifier.name == unforgeableMember.identifier.name + ] if len(shadows) != 0: - locs = [unforgeableMember.location] + [s.location for s - in shadows] - raise WebIDLError("Interface %s shadows [Unforgeable] " - "members of %s" % - (self.identifier.name, - ancestor.identifier.name), - locs) + locs = [unforgeableMember.location] + [s.location for s in shadows] + raise WebIDLError( + "Interface %s shadows [LegacyUnforgeable] " + "members of %s" + % (self.identifier.name, ancestor.identifier.name), + locs, + ) # And now just stick it in our members, since we won't be # inheriting this down the proto chain. If we really cared we # could try to do something where we set up the unforgeable @@ -1234,8 +1387,9 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): testInterface = self isAncestor = False while testInterface: - self.maplikeOrSetlikeOrIterable.checkCollisions(testInterface.members, - isAncestor) + self.maplikeOrSetlikeOrIterable.checkCollisions( + testInterface.members, isAncestor + ) isAncestor = True testInterface = testInterface.parent @@ -1265,7 +1419,7 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): else: continue - if (memberType != "stringifiers" and memberType != "legacycallers"): + if memberType != "stringifiers" and memberType != "legacycallers": if member.isNamed(): memberType = "named " + memberType else: @@ -1273,10 +1427,14 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): memberType = "indexed " + memberType if memberType in specialMembersSeen: - raise WebIDLError("Multiple " + memberType + " on %s" % (self), - [self.location, - specialMembersSeen[memberType].location, - member.location]) + raise WebIDLError( + "Multiple " + memberType + " on %s" % (self), + [ + self.location, + specialMembersSeen[memberType].location, + member.location, + ], + ) specialMembersSeen[memberType] = member @@ -1286,7 +1444,8 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): raise WebIDLError( "Interface with [LegacyUnenumerableNamedProperties] does " "not have a named getter", - [self.location]) + [self.location], + ) ancestor = self.parent while ancestor: if ancestor.getExtendedAttribute("LegacyUnenumerableNamedProperties"): @@ -1294,7 +1453,8 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): "Interface with [LegacyUnenumerableNamedProperties] " "inherits from another interface with " "[LegacyUnenumerableNamedProperties]", - [self.location, ancestor.location]) + [self.location, ancestor.location], + ) ancestor = ancestor.parent if self._isOnGlobalProtoChain: @@ -1302,56 +1462,63 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): for memberType in ["setter", "deleter"]: memberId = "named " + memberType + "s" if memberId in specialMembersSeen: - raise WebIDLError("Interface with [Global] has a named %s" % - memberType, - [self.location, - specialMembersSeen[memberId].location]) - # Make sure we're not [OverrideBuiltins] - if self.getExtendedAttribute("OverrideBuiltins"): - raise WebIDLError("Interface with [Global] also has " - "[OverrideBuiltins]", - [self.location]) + raise WebIDLError( + "Interface with [Global] has a named %s" % memberType, + [self.location, specialMembersSeen[memberId].location], + ) + # Make sure we're not [LegacyOverrideBuiltIns] + if self.getExtendedAttribute("LegacyOverrideBuiltIns"): + raise WebIDLError( + "Interface with [Global] also has " "[LegacyOverrideBuiltIns]", + [self.location], + ) # Mark all of our ancestors as being on the global's proto chain too parent = self.parent while parent: - # Must not inherit from an interface with [OverrideBuiltins] - if parent.getExtendedAttribute("OverrideBuiltins"): - raise WebIDLError("Interface with [Global] inherits from " - "interface with [OverrideBuiltins]", - [self.location, parent.location]) + # Must not inherit from an interface with [LegacyOverrideBuiltIns] + if parent.getExtendedAttribute("LegacyOverrideBuiltIns"): + raise WebIDLError( + "Interface with [Global] inherits from " + "interface with [LegacyOverrideBuiltIns]", + [self.location, parent.location], + ) parent._isOnGlobalProtoChain = True parent = parent.parent def validate(self): - def checkDuplicateNames(member, name, attributeName): for m in self.members: if m.identifier.name == name: - raise WebIDLError("[%s=%s] has same name as interface member" % - (attributeName, name), - [member.location, m.location]) + raise WebIDLError( + "[%s=%s] has same name as interface member" + % (attributeName, name), + [member.location, m.location], + ) if m.isMethod() and m != member and name in m.aliases: - raise WebIDLError("conflicting [%s=%s] definitions" % - (attributeName, name), - [member.location, m.location]) + raise WebIDLError( + "conflicting [%s=%s] definitions" % (attributeName, name), + [member.location, m.location], + ) if m.isAttr() and m != member and name in m.bindingAliases: - raise WebIDLError("conflicting [%s=%s] definitions" % - (attributeName, name), - [member.location, m.location]) + raise WebIDLError( + "conflicting [%s=%s] definitions" % (attributeName, name), + [member.location, m.location], + ) # We also don't support inheriting from unforgeable interfaces. - if self.getExtendedAttribute("Unforgeable") and self.hasChildInterfaces(): - locations = ([self.location] + - list(i.location for i in - self.interfacesBasedOnSelf if i.parent == self)) - raise WebIDLError("%s is an unforgeable ancestor interface" % - self.identifier.name, - locations) + if self.getExtendedAttribute("LegacyUnforgeable") and self.hasChildInterfaces(): + locations = [self.location] + list( + i.location for i in self.interfacesBasedOnSelf if i.parent == self + ) + raise WebIDLError( + "%s is an unforgeable ancestor interface" % self.identifier.name, + locations, + ) ctor = self.ctor() if ctor is not None: ctor.validate() - for namedCtor in self.namedConstructors: + for namedCtor in self.legacyFactoryFunctions: namedCtor.validate() indexedGetter = None @@ -1360,50 +1527,57 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): member.validate() if self.isCallback() and member.getExtendedAttribute("Replaceable"): - raise WebIDLError("[Replaceable] used on an attribute on " - "interface %s which is a callback interface" % - self.identifier.name, - [self.location, member.location]) + raise WebIDLError( + "[Replaceable] used on an attribute on " + "interface %s which is a callback interface" % self.identifier.name, + [self.location, member.location], + ) # Check that PutForwards refers to another attribute and that no # cycles exist in forwarded assignments. Also check for a # integer-typed "length" attribute. if member.isAttr(): - if (member.identifier.name == "length" and - member.type.isInteger()): + if member.identifier.name == "length" and member.type.isInteger(): hasLengthAttribute = True iface = self attr = member putForwards = attr.getExtendedAttribute("PutForwards") if putForwards and self.isCallback(): - raise WebIDLError("[PutForwards] used on an attribute " - "on interface %s which is a callback " - "interface" % self.identifier.name, - [self.location, member.location]) + raise WebIDLError( + "[PutForwards] used on an attribute " + "on interface %s which is a callback " + "interface" % self.identifier.name, + [self.location, member.location], + ) while putForwards is not None: forwardIface = attr.type.unroll().inner fowardAttr = None for forwardedMember in forwardIface.members: - if (not forwardedMember.isAttr() or - forwardedMember.identifier.name != putForwards[0]): + if ( + not forwardedMember.isAttr() + or forwardedMember.identifier.name != putForwards[0] + ): continue if forwardedMember == member: - raise WebIDLError("Cycle detected in forwarded " - "assignments for attribute %s on " - "%s" % - (member.identifier.name, self), - [member.location]) + raise WebIDLError( + "Cycle detected in forwarded " + "assignments for attribute %s on " + "%s" % (member.identifier.name, self), + [member.location], + ) fowardAttr = forwardedMember break if fowardAttr is None: - raise WebIDLError("Attribute %s on %s forwards to " - "missing attribute %s" % - (attr.identifier.name, iface, putForwards), - [attr.location]) + raise WebIDLError( + "Attribute %s on %s forwards to " + "missing attribute %s" + % (attr.identifier.name, iface, putForwards), + [attr.location], + ) iface = forwardIface attr = fowardAttr @@ -1417,29 +1591,41 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): for alias in member.aliases: if self.isOnGlobalProtoChain(): - raise WebIDLError("[Alias] must not be used on a " - "[Global] interface operation", - [member.location]) - if (member.getExtendedAttribute("Exposed") or - member.getExtendedAttribute("ChromeOnly") or - member.getExtendedAttribute("Pref") or - member.getExtendedAttribute("Func") or - member.getExtendedAttribute("SecureContext")): - raise WebIDLError("[Alias] must not be used on a " - "conditionally exposed operation", - [member.location]) + raise WebIDLError( + "[Alias] must not be used on a " + "[Global] interface operation", + [member.location], + ) + if ( + member.getExtendedAttribute("Exposed") + or member.getExtendedAttribute("ChromeOnly") + or member.getExtendedAttribute("Pref") + or member.getExtendedAttribute("Func") + or member.getExtendedAttribute("Trial") + or member.getExtendedAttribute("SecureContext") + ): + raise WebIDLError( + "[Alias] must not be used on a " + "conditionally exposed operation", + [member.location], + ) if member.isStatic(): - raise WebIDLError("[Alias] must not be used on a " - "static operation", - [member.location]) + raise WebIDLError( + "[Alias] must not be used on a " "static operation", + [member.location], + ) if member.isIdentifierLess(): - raise WebIDLError("[Alias] must not be used on an " - "identifierless operation", - [member.location]) - if member.isUnforgeable(): - raise WebIDLError("[Alias] must not be used on an " - "[Unforgeable] operation", - [member.location]) + raise WebIDLError( + "[Alias] must not be used on an " + "identifierless operation", + [member.location], + ) + if member.isLegacyUnforgeable(): + raise WebIDLError( + "[Alias] must not be used on an " + "[LegacyUnforgeable] operation", + [member.location], + ) checkDuplicateNames(member, alias, "Alias") @@ -1449,16 +1635,18 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): for bindingAlias in member.bindingAliases: checkDuplicateNames(member, bindingAlias, "BindingAlias") - # Conditional exposure makes no sense for interfaces with no # interface object. # And SecureContext makes sense for interfaces with no interface object, # since it is also propagated to interface members. - if (self.isExposedConditionally(exclusions=["SecureContext"]) and - not self.hasInterfaceObject()): - raise WebIDLError("Interface with no interface object is " - "exposed conditionally", - [self.location]) + if ( + self.isExposedConditionally(exclusions=["SecureContext"]) + and not self.hasInterfaceObject() + ): + raise WebIDLError( + "Interface with no interface object is " "exposed conditionally", + [self.location], + ) # Value iterators are only allowed on interfaces with indexed getters, # and pair iterators are only allowed on interfaces without indexed @@ -1467,32 +1655,38 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): iterableDecl = self.maplikeOrSetlikeOrIterable if iterableDecl.isValueIterator(): if not indexedGetter: - raise WebIDLError("Interface with value iterator does not " - "support indexed properties", - [self.location, iterableDecl.location]) + raise WebIDLError( + "Interface with value iterator does not " + "support indexed properties", + [self.location, iterableDecl.location], + ) if iterableDecl.valueType != indexedGetter.signatures()[0][0]: - raise WebIDLError("Iterable type does not match indexed " - "getter type", - [iterableDecl.location, - indexedGetter.location]) + raise WebIDLError( + "Iterable type does not match indexed " "getter type", + [iterableDecl.location, indexedGetter.location], + ) if not hasLengthAttribute: - raise WebIDLError('Interface with value iterator does not ' - 'have an integer-typed "length" attribute', - [self.location, iterableDecl.location]) + raise WebIDLError( + "Interface with value iterator does not " + 'have an integer-typed "length" attribute', + [self.location, iterableDecl.location], + ) else: assert iterableDecl.isPairIterator() if indexedGetter: - raise WebIDLError("Interface with pair iterator supports " - "indexed properties", - [self.location, iterableDecl.location, - indexedGetter.location]) + raise WebIDLError( + "Interface with pair iterator supports " "indexed properties", + [self.location, iterableDecl.location, indexedGetter.location], + ) if indexedGetter and not hasLengthAttribute: - raise WebIDLError('Interface with an indexed getter does not have ' - 'an integer-typed "length" attribute', - [self.location, indexedGetter.location]) + raise WebIDLError( + "Interface with an indexed getter does not have " + 'an integer-typed "length" attribute', + [self.location, indexedGetter.location], + ) def setCallback(self, value): self._callback = value @@ -1505,15 +1699,25 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): return ( # JS-implemented things should never need the # this-handling weirdness of single-operation interfaces. - not self.isJSImplemented() and + not self.isJSImplemented() + and # Not inheriting from another interface - not self.parent and + not self.parent + and # No attributes of any kinds - not any(m.isAttr() for m in self.members) and + not any(m.isAttr() for m in self.members) + and # There is at least one regular operation, and all regular # operations have the same identifier - len(set(m.identifier.name for m in self.members if - m.isMethod() and not m.isStatic())) == 1) + len( + set( + m.identifier.name + for m in self.members + if m.isMethod() and not m.isStatic() + ) + ) + == 1 + ) def inheritanceDepth(self): depth = 0 @@ -1529,14 +1733,18 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): def hasInterfaceObject(self): if self.isCallback(): return self.hasConstants() - return not hasattr(self, "_noInterfaceObject") + return not hasattr(self, "_noInterfaceObject") and not self.isPseudoInterface() def hasInterfacePrototypeObject(self): - return (not self.isCallback() and not self.isNamespace() - and self.getUserData('hasConcreteDescendant', False)) + return ( + not self.isCallback() + and not self.isNamespace() + and self.getUserData("hasConcreteDescendant", False) + and not self.isPseudoInterface() + ) def addIncludedMixin(self, includedMixin): - assert(isinstance(includedMixin, IDLInterfaceMixin)) + assert isinstance(includedMixin, IDLInterfaceMixin) self.includedMixins.add(includedMixin) def getInheritedInterfaces(self): @@ -1545,7 +1753,7 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): (not including this interface itself). The list is in order from most derived to least derived. """ - assert(self._finished) + assert self._finished if not self.parent: return [] parentInterfaces = self.parent.getInheritedInterfaces() @@ -1596,6 +1804,9 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): def isOnGlobalProtoChain(self): return self._isOnGlobalProtoChain + def isPseudoInterface(self): + return self._isPseudo + def _getDependentObjects(self): deps = set(self.members) deps.update(self.includedMixins) @@ -1606,18 +1817,35 @@ class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): def hasMembersInSlots(self): return self._ownMembersInSlots != 0 - conditionExtendedAttributes = [ "Pref", "ChromeOnly", "Func", - "SecureContext" ] + conditionExtendedAttributes = [ + "Pref", + "ChromeOnly", + "Func", + "Trial", + "SecureContext", + ] + def isExposedConditionally(self, exclusions=[]): - return any(((not a in exclusions) and self.getExtendedAttribute(a)) for a in self.conditionExtendedAttributes) + return any( + ((not a in exclusions) and self.getExtendedAttribute(a)) + for a in self.conditionExtendedAttributes + ) + class IDLInterface(IDLInterfaceOrNamespace): - def __init__(self, location, parentScope, name, parent, members, - isKnownNonPartial, classNameOverride=None, - toStringTag=None): - IDLInterfaceOrNamespace.__init__(self, location, parentScope, name, - parent, members, isKnownNonPartial, - toStringTag) + def __init__( + self, + location, + parentScope, + name, + parent, + members, + isKnownNonPartial, + classNameOverride=None, + ): + IDLInterfaceOrNamespace.__init__( + self, location, parentScope, name, parent, members, isKnownNonPartial + ) self.classNameOverride = classNameOverride def __str__(self): @@ -1629,7 +1857,7 @@ class IDLInterface(IDLInterfaceOrNamespace): def getClassName(self): if self.classNameOverride: return self.classNameOverride - return self.identifier.name + return IDLInterfaceOrNamespace.getClassName(self) def addExtendedAttributes(self, attrs): for attr in attrs: @@ -1637,22 +1865,29 @@ class IDLInterface(IDLInterfaceOrNamespace): # Special cased attrs if identifier == "TreatNonCallableAsNull": - raise WebIDLError("TreatNonCallableAsNull cannot be specified on interfaces", - [attr.location, self.location]) - if identifier == "TreatNonObjectAsNull": - raise WebIDLError("TreatNonObjectAsNull cannot be specified on interfaces", - [attr.location, self.location]) - elif identifier == "NoInterfaceObject": + raise WebIDLError( + "TreatNonCallableAsNull cannot be specified on interfaces", + [attr.location, self.location], + ) + if identifier == "LegacyTreatNonObjectAsNull": + raise WebIDLError( + "LegacyTreatNonObjectAsNull cannot be specified on interfaces", + [attr.location, self.location], + ) + elif identifier == "LegacyNoInterfaceObject": if not attr.noArguments(): - raise WebIDLError("[NoInterfaceObject] must take no arguments", - [attr.location]) + raise WebIDLError( + "[LegacyNoInterfaceObject] must take no arguments", + [attr.location], + ) self._noInterfaceObject = True - elif identifier == "NamedConstructor": + elif identifier == "LegacyFactoryFunction": if not attr.hasValue(): - raise WebIDLError("NamedConstructor must either take an identifier or take a named argument list", - [attr.location]) - + raise WebIDLError( + "LegacyFactoryFunction must either take an identifier or take a named argument list", + [attr.location], + ) args = attr.args() if attr.hasArgs() else [] @@ -1664,37 +1899,43 @@ class IDLInterface(IDLInterfaceOrNamespace): # Named constructors are always assumed to be able to # throw (since there's no way to indicate otherwise). method.addExtendedAttributes( - [IDLExtendedAttribute(self.location, ("Throws",))]) + [IDLExtendedAttribute(self.location, ("Throws",))] + ) - # We need to detect conflicts for NamedConstructors across + # We need to detect conflicts for LegacyFactoryFunctions across # interfaces. We first call resolve on the parentScope, - # which will merge all NamedConstructors with the same + # which will merge all LegacyFactoryFunctions with the same # identifier accross interfaces as overloads. method.resolve(self.parentScope) # Then we look up the identifier on the parentScope. If the # result is the same as the method we're adding then it # hasn't been added as an overload and it's the first time - # we've encountered a NamedConstructor with that identifier. + # we've encountered a LegacyFactoryFunction with that identifier. # If the result is not the same as the method we're adding # then it has been added as an overload and we need to check # whether the result is actually one of our existing - # NamedConstructors. + # LegacyFactoryFunctions. newMethod = self.parentScope.lookupIdentifier(method.identifier) if newMethod == method: - self.namedConstructors.append(method) - elif newMethod not in self.namedConstructors: - raise WebIDLError("NamedConstructor conflicts with a " - "NamedConstructor of a different interface", - [method.location, newMethod.location]) - elif (identifier == "ExceptionClass"): + self.legacyFactoryFunctions.append(method) + elif newMethod not in self.legacyFactoryFunctions: + raise WebIDLError( + "LegacyFactoryFunction conflicts with a " + "LegacyFactoryFunction of a different interface", + [method.location, newMethod.location], + ) + elif identifier == "ExceptionClass": if not attr.noArguments(): - raise WebIDLError("[ExceptionClass] must take no arguments", - [attr.location]) + raise WebIDLError( + "[ExceptionClass] must take no arguments", [attr.location] + ) if self.parent: - raise WebIDLError("[ExceptionClass] must not be specified on " - "an interface with inherited interfaces", - [attr.location, self.location]) + raise WebIDLError( + "[ExceptionClass] must not be specified on " + "an interface with inherited interfaces", + [attr.location, self.location], + ) elif identifier == "Global": if attr.hasValue(): self.globalNames = [attr.value()] @@ -1702,8 +1943,9 @@ class IDLInterface(IDLInterfaceOrNamespace): self.globalNames = attr.args() else: self.globalNames = [self.identifier.name] - self.parentScope.addIfaceGlobalNames(self.identifier.name, - self.globalNames) + self.parentScope.addIfaceGlobalNames( + self.identifier.name, self.globalNames + ) self._isOnGlobalProtoChain = True elif identifier == "LegacyWindowAlias": if attr.hasValue(): @@ -1711,60 +1953,74 @@ class IDLInterface(IDLInterfaceOrNamespace): elif attr.hasArgs(): self.legacyWindowAliases = attr.args() else: - raise WebIDLError("[%s] must either take an identifier " - "or take an identifier list" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must either take an identifier " + "or take an identifier list" % identifier, + [attr.location], + ) for alias in self.legacyWindowAliases: unresolved = IDLUnresolvedIdentifier(attr.location, alias) IDLObjectWithIdentifier(attr.location, self.parentScope, unresolved) elif identifier == "SecureContext": if not attr.noArguments(): - raise WebIDLError("[%s] must take no arguments" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must take no arguments" % identifier, [attr.location] + ) # This gets propagated to all our members. for member in self.members: if member.getExtendedAttribute("SecureContext"): - raise WebIDLError("[SecureContext] specified on both " - "an interface member and on the " - "interface itself", - [member.location, attr.location]) + raise WebIDLError( + "[SecureContext] specified on both " + "an interface member and on the " + "interface itself", + [member.location, attr.location], + ) member.addExtendedAttributes([attr]) - elif (identifier == "NeedResolve" or - identifier == "OverrideBuiltins" or - identifier == "ChromeOnly" or - identifier == "Unforgeable" or - identifier == "LegacyEventInit" or - identifier == "ProbablyShortLivingWrapper" or - identifier == "LegacyUnenumerableNamedProperties" or - identifier == "RunConstructorInCallerCompartment" or - identifier == "WantsEventListenerHooks" or - identifier == "Serializable" or - identifier == "Abstract" or - identifier == "Inline"): + elif ( + identifier == "NeedResolve" + or identifier == "LegacyOverrideBuiltIns" + or identifier == "ChromeOnly" + or identifier == "LegacyUnforgeable" + or identifier == "LegacyEventInit" + or identifier == "ProbablyShortLivingWrapper" + or identifier == "LegacyUnenumerableNamedProperties" + or identifier == "RunConstructorInCallerCompartment" + or identifier == "WantsEventListenerHooks" + or identifier == "Serializable" + or identifier == "Abstract" + or identifier == "Inline" + ): # Known extended attributes that do not take values if not attr.noArguments(): - raise WebIDLError("[%s] must take no arguments" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must take no arguments" % identifier, [attr.location] + ) elif identifier == "Exposed": - convertExposedAttrToGlobalNameSet(attr, - self._exposureGlobalNames) - elif (identifier == "Pref" or - identifier == "JSImplementation" or - identifier == "HeaderFile" or - identifier == "Func" or - identifier == "Deprecated"): + convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames) + elif ( + identifier == "Pref" + or identifier == "JSImplementation" + or identifier == "HeaderFile" + or identifier == "Func" + or identifier == "Trial" + or identifier == "Deprecated" + ): # Known extended attributes that take a string value if not attr.hasValue(): - raise WebIDLError("[%s] must have a value" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must have a value" % identifier, [attr.location] + ) elif identifier == "InstrumentedProps": # Known extended attributes that take a list if not attr.hasArgs(): - raise WebIDLError("[%s] must have arguments" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must have arguments" % identifier, [attr.location] + ) else: - raise WebIDLError("Unknown extended attribute %s on interface" % identifier, - [attr.location]) + raise WebIDLError( + "Unknown extended attribute %s on interface" % identifier, + [attr.location], + ) attrlist = attr.listValue() self._extendedAttrDict[identifier] = attrlist if len(attrlist) else True @@ -1777,7 +2033,8 @@ class IDLInterface(IDLInterfaceOrNamespace): "interface. Per spec, that means the object should not be " "serializable, so chances are someone made a mistake here " "somewhere.", - [self.location, self.parent.location]) + [self.location, self.parent.location], + ) def isSerializable(self): return self.getExtendedAttribute("Serializable") @@ -1796,9 +2053,9 @@ class IDLInterface(IDLInterfaceOrNamespace): class IDLNamespace(IDLInterfaceOrNamespace): def __init__(self, location, parentScope, name, members, isKnownNonPartial): - IDLInterfaceOrNamespace.__init__(self, location, parentScope, name, - None, members, isKnownNonPartial, - toStringTag=None) + IDLInterfaceOrNamespace.__init__( + self, location, parentScope, name, None, members, isKnownNonPartial + ) def __str__(self): return "Namespace '%s'" % self.identifier.name @@ -1815,30 +2072,35 @@ class IDLNamespace(IDLInterfaceOrNamespace): identifier = attr.identifier() if identifier == "Exposed": - convertExposedAttrToGlobalNameSet(attr, - self._exposureGlobalNames) + convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames) elif identifier == "ClassString": # Takes a string value to override the default "Object" if # desired. if not attr.hasValue(): - raise WebIDLError("[%s] must have a value" % identifier, - [attr.location]) - elif (identifier == "ProtoObjectHack" or - identifier == "ChromeOnly"): + raise WebIDLError( + "[%s] must have a value" % identifier, [attr.location] + ) + elif identifier == "ProtoObjectHack" or identifier == "ChromeOnly": if not attr.noArguments(): - raise WebIDLError("[%s] must not have arguments" % identifier, - [attr.location]) - elif (identifier == "Pref" or - identifier == "HeaderFile" or - identifier == "Func"): + raise WebIDLError( + "[%s] must not have arguments" % identifier, [attr.location] + ) + elif ( + identifier == "Pref" + or identifier == "HeaderFile" + or identifier == "Func" + or identifier == "Trial" + ): # Known extended attributes that take a string value if not attr.hasValue(): - raise WebIDLError("[%s] must have a value" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must have a value" % identifier, [attr.location] + ) else: - raise WebIDLError("Unknown extended attribute %s on namespace" % - identifier, - [attr.location]) + raise WebIDLError( + "Unknown extended attribute %s on namespace" % identifier, + [attr.location], + ) attrlist = attr.listValue() self._extendedAttrDict[identifier] = attrlist if len(attrlist) else True @@ -1874,8 +2136,9 @@ class IDLDictionary(IDLObjectWithScope): Returns true if this dictionary can be empty (that is, it has no required members and neither do any of its ancestors). """ - return (all(member.optional for member in self.members) and - (not self.parent or self.parent.canBeEmpty())) + return all(member.optional for member in self.members) and ( + not self.parent or self.parent.canBeEmpty() + ) def finish(self, scope): if self._finished: @@ -1888,9 +2151,11 @@ class IDLDictionary(IDLObjectWithScope): oldParent = self.parent self.parent = self.parent.finish(scope) if not isinstance(self.parent, IDLDictionary): - raise WebIDLError("Dictionary %s has parent that is not a dictionary" % - self.identifier.name, - [oldParent.location, self.parent.location]) + raise WebIDLError( + "Dictionary %s has parent that is not a dictionary" + % self.identifier.name, + [oldParent.location, self.parent.location], + ) # Make sure the parent resolves all its members before we start # looking at them. @@ -1907,16 +2172,19 @@ class IDLDictionary(IDLObjectWithScope): member.complete(scope) assert member.type.isComplete() - # Members of a dictionary are sorted in lexicographic order - self.members.sort(key=lambda x: x.identifier.name) + # Members of a dictionary are sorted in lexicographic order, + # unless the dictionary opts out. + if not self.getExtendedAttribute("Unsorted"): + self.members.sort(key=lambda x: x.identifier.name) inheritedMembers = [] ancestor = self.parent while ancestor: if ancestor == self: - raise WebIDLError("Dictionary %s has itself as an ancestor" % - self.identifier.name, - [self.identifier.location]) + raise WebIDLError( + "Dictionary %s has itself as an ancestor" % self.identifier.name, + [self.identifier.location], + ) inheritedMembers.extend(ancestor.members) ancestor = ancestor.parent @@ -1924,9 +2192,11 @@ class IDLDictionary(IDLObjectWithScope): for inheritedMember in inheritedMembers: for member in self.members: if member.identifier.name == inheritedMember.identifier.name: - raise WebIDLError("Dictionary %s has two members with name %s" % - (self.identifier.name, member.identifier.name), - [member.location, inheritedMember.location]) + raise WebIDLError( + "Dictionary %s has two members with name %s" + % (self.identifier.name, member.identifier.name), + [member.location, inheritedMember.location], + ) def validate(self): def typeContainsDictionary(memberType, dictionary): @@ -1944,17 +2214,20 @@ class IDLDictionary(IDLObjectWithScope): None, if the boolean value in the first element is False. """ - if (memberType.nullable() or - memberType.isSequence() or - memberType.isRecord()): + if ( + memberType.nullable() + or memberType.isSequence() + or memberType.isRecord() + ): return typeContainsDictionary(memberType.inner, dictionary) if memberType.isDictionary(): if memberType.inner == dictionary: return (True, [memberType.location]) - (contains, locations) = dictionaryContainsDictionary(memberType.inner, - dictionary) + (contains, locations) = dictionaryContainsDictionary( + memberType.inner, dictionary + ) if contains: return (True, [memberType.location] + locations) @@ -1976,7 +2249,9 @@ class IDLDictionary(IDLObjectWithScope): if dictMember.parent == dictionary: return (True, [dictMember.location]) else: - (contains, locations) = dictionaryContainsDictionary(dictMember.parent, dictionary) + (contains, locations) = dictionaryContainsDictionary( + dictMember.parent, dictionary + ) if contains: return (True, [dictMember.location] + locations) @@ -1984,14 +2259,33 @@ class IDLDictionary(IDLObjectWithScope): for member in self.members: if member.type.isDictionary() and member.type.nullable(): - raise WebIDLError("Dictionary %s has member with nullable " - "dictionary type" % self.identifier.name, - [member.location]) + raise WebIDLError( + "Dictionary %s has member with nullable " + "dictionary type" % self.identifier.name, + [member.location], + ) (contains, locations) = typeContainsDictionary(member.type, self) if contains: - raise WebIDLError("Dictionary %s has member with itself as type." % - self.identifier.name, - [member.location] + locations) + raise WebIDLError( + "Dictionary %s has member with itself as type." + % self.identifier.name, + [member.location] + locations, + ) + + if member.type.isUndefined(): + raise WebIDLError( + "Dictionary %s has member with undefined as its type." + % self.identifier.name, + [member.location], + ) + elif member.type.isUnion(): + for unionMember in member.type.unroll().flatMemberTypes: + if unionMember.isUndefined(): + raise WebIDLError( + "Dictionary %s has member with a union containing " + "undefined as a type." % self.identifier.name, + [unionMember.location], + ) def getExtendedAttribute(self, name): return self._extendedAttrDict.get(name, None) @@ -2000,31 +2294,40 @@ class IDLDictionary(IDLObjectWithScope): for attr in attrs: identifier = attr.identifier() - if (identifier == "GenerateInitFromJSON" or - identifier == "GenerateInit"): + if identifier == "GenerateInitFromJSON" or identifier == "GenerateInit": if not attr.noArguments(): - raise WebIDLError("[%s] must not have arguments" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must not have arguments" % identifier, [attr.location] + ) self.needsConversionFromJS = True - elif (identifier == "GenerateConversionToJS" or - identifier == "GenerateToJSON"): + elif ( + identifier == "GenerateConversionToJS" or identifier == "GenerateToJSON" + ): if not attr.noArguments(): - raise WebIDLError("[%s] must not have arguments" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must not have arguments" % identifier, [attr.location] + ) # ToJSON methods require to-JS conversion, because we # implement ToJSON by converting to a JS object and # then using JSON.stringify. self.needsConversionToJS = True + elif identifier == "Unsorted": + if not attr.noArguments(): + raise WebIDLError( + "[Unsorted] must take no arguments", [attr.location] + ) else: - raise WebIDLError("[%s] extended attribute not allowed on " - "dictionaries" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] extended attribute not allowed on " + "dictionaries" % identifier, + [attr.location], + ) self._extendedAttrDict[identifier] = True def _getDependentObjects(self): deps = set(self.members) - if (self.parent): + if self.parent: deps.add(self.parent) return deps @@ -2039,8 +2342,9 @@ class IDLEnum(IDLObjectWithIdentifier): assert isinstance(name, IDLUnresolvedIdentifier) if len(values) != len(set(values)): - raise WebIDLError("Enum %s has multiple identical strings" % name.name, - [location]) + raise WebIDLError( + "Enum %s has multiple identical strings" % name.name, [location] + ) IDLObjectWithIdentifier.__init__(self, location, parentScope, name) self._values = values @@ -2059,9 +2363,10 @@ class IDLEnum(IDLObjectWithIdentifier): def addExtendedAttributes(self, attrs): if len(attrs) != 0: - raise WebIDLError("There are no extended attributes that are " - "allowed on enums", - [attrs[0].location, self.location]) + raise WebIDLError( + "There are no extended attributes that are " "allowed on enums", + [attrs[0].location, self.location], + ) def _getDependentObjects(self): return set() @@ -2070,56 +2375,72 @@ class IDLEnum(IDLObjectWithIdentifier): class IDLType(IDLObject): Tags = enum( # The integer types - 'int8', - 'uint8', - 'int16', - 'uint16', - 'int32', - 'uint32', - 'int64', - 'uint64', + "int8", + "uint8", + "int16", + "uint16", + "int32", + "uint32", + "int64", + "uint64", # Additional primitive types - 'bool', - 'unrestricted_float', - 'float', - 'unrestricted_double', + "bool", + "unrestricted_float", + "float", + "unrestricted_double", # "double" last primitive type to match IDLBuiltinType - 'double', + "double", # Other types - 'any', - 'domstring', - 'bytestring', - 'usvstring', - 'utf8string', - 'jsstring', - 'object', - 'undefined', + "any", + "undefined", + "domstring", + "bytestring", + "usvstring", + "utf8string", + "jsstring", + "object", # Funny stuff - 'interface', - 'dictionary', - 'enum', - 'callback', - 'union', - 'sequence', - 'record', - 'promise', - ) + "interface", + "dictionary", + "enum", + "callback", + "union", + "sequence", + "record", + "promise", + "observablearray", + ) def __init__(self, location, name): IDLObject.__init__(self, location) self.name = name self.builtin = False - self.treatNullAsEmpty = False + self.legacyNullToEmptyString = False self._clamp = False self._enforceRange = False self._allowShared = False self._extendedAttrDict = {} + def __hash__(self): + return ( + hash(self.builtin) + + hash(self.name) + + hash(self._clamp) + + hash(self._enforceRange) + + hash(self.legacyNullToEmptyString) + + hash(self._allowShared) + ) + def __eq__(self, other): - return (other and self.builtin == other.builtin and self.name == other.name and - self._clamp == other.hasClamp() and self._enforceRange == other.hasEnforceRange() and - self.treatNullAsEmpty == other.treatNullAsEmpty and - self._allowShared == other.hasAllowShared()) + return ( + other + and self.builtin == other.builtin + and self.name == other.name + and self._clamp == other.hasClamp() + and self._enforceRange == other.hasEnforceRange() + and self.legacyNullToEmptyString == other.legacyNullToEmptyString + and self._allowShared == other.hasAllowShared() + ) def __ne__(self, other): return not self == other @@ -2169,7 +2490,7 @@ class IDLType(IDLObject): return False def isUndefined(self): - return self.name == "Undefined" + return False def isSequence(self): return False @@ -2199,23 +2520,16 @@ class IDLType(IDLObject): return False def isGeckoInterface(self): - """ Returns a boolean indicating whether this type is an 'interface' - type that is implemented in Gecko. At the moment, this returns - true for all interface types that are not types from the TypedArray - spec.""" + """Returns a boolean indicating whether this type is an 'interface' + type that is implemented in Gecko. At the moment, this returns + true for all interface types that are not types from the TypedArray + spec.""" return self.isInterface() and not self.isSpiderMonkeyInterface() def isSpiderMonkeyInterface(self): - """ Returns a boolean indicating whether this type is an 'interface' - type that is implemented in SpiderMonkey. """ - return self.isInterface() and (self.isBufferSource() or - self.isReadableStream()) - - def isDictionary(self): - return False - - def isInterface(self): - return False + """Returns a boolean indicating whether this type is an 'interface' + type that is implemented in SpiderMonkey.""" + return self.isInterface() and (self.isBufferSource() or self.isReadableStream()) def isAny(self): return self.tag() == IDLType.Tags.any @@ -2242,6 +2556,12 @@ class IDLType(IDLObject): def isJSONType(self): return False + def isObservableArray(self): + return False + + def isDictionaryLike(self): + return self.isDictionary() or self.isRecord() or self.isCallbackInterface() + def hasClamp(self): return self._clamp @@ -2264,8 +2584,10 @@ class IDLType(IDLObject): def withExtendedAttributes(self, attrs): if len(attrs) > 0: - raise WebIDLError("Extended attributes on types only supported for builtins", - [attrs[0].location, self.location]) + raise WebIDLError( + "Extended attributes on types only supported for builtins", + [attrs[0].location, self.location], + ) return self def getExtendedAttribute(self, name): @@ -2278,8 +2600,10 @@ class IDLType(IDLObject): return self def isDistinguishableFrom(self, other): - raise TypeError("Can't tell whether a generic type is or is not " - "distinguishable from other things") + raise TypeError( + "Can't tell whether a generic type is or is not " + "distinguishable from other things" + ) def isExposedInAllOf(self, exposureSet): return True @@ -2287,7 +2611,7 @@ class IDLType(IDLObject): class IDLUnresolvedType(IDLType): """ - Unresolved types are interface types + Unresolved types are interface types """ def __init__(self, location, name, attrs=[]): @@ -2302,19 +2626,17 @@ class IDLUnresolvedType(IDLType): try: obj = scope._lookupIdentifier(self.name) except: - raise WebIDLError("Unresolved type '%s'." % self.name, - [self.location]) + raise WebIDLError("Unresolved type '%s'." % self.name, [self.location]) assert obj - if obj.isType(): - print(obj) assert not obj.isType() if obj.isTypedef(): assert self.name.name == obj.identifier.name - typedefType = IDLTypedefType(self.location, obj.innerType, - obj.identifier) + typedefType = IDLTypedefType(self.location, obj.innerType, obj.identifier) assert not typedefType.isComplete() - return typedefType.complete(scope).withExtendedAttributes(self.extraTypeAttributes) + return typedefType.complete(scope).withExtendedAttributes( + self.extraTypeAttributes + ) elif obj.isCallback() and not obj.isInterface(): assert self.name.name == obj.identifier.name return IDLCallbackType(obj.location, obj) @@ -2326,8 +2648,10 @@ class IDLUnresolvedType(IDLType): return IDLUnresolvedType(self.location, self.name, attrs) def isDistinguishableFrom(self, other): - raise TypeError("Can't tell whether an unresolved type is or is not " - "distinguishable from other things") + raise TypeError( + "Can't tell whether an unresolved type is or is not " + "distinguishable from other things" + ) class IDLParametrizedType(IDLType): @@ -2355,17 +2679,16 @@ class IDLParametrizedType(IDLType): class IDLNullableType(IDLParametrizedType): def __init__(self, location, innerType): - assert not innerType.isUndefined() assert not innerType == BuiltinTypes[IDLBuiltinType.Types.any] IDLParametrizedType.__init__(self, location, None, innerType) - def __eq__(self, other): - return isinstance(other, IDLNullableType) and self.inner == other.inner - def __hash__(self): return hash(self.inner) + def __eq__(self, other): + return isinstance(other, IDLNullableType) and self.inner == other.inner + def __str__(self): return self.inner.__str__() + "OrNull" @@ -2415,7 +2738,7 @@ class IDLNullableType(IDLParametrizedType): return self.inner.isInteger() def isUndefined(self): - return False + return self.inner.isUndefined() def isSequence(self): return self.inner.isSequence() @@ -2461,6 +2784,9 @@ class IDLNullableType(IDLParametrizedType): def isJSONType(self): return self.inner.isJSONType() + def isObservableArray(self): + return self.inner.isObservableArray() + def hasClamp(self): return self.inner.hasClamp() @@ -2482,27 +2808,41 @@ class IDLNullableType(IDLParametrizedType): assert self.inner.isComplete() if self.inner.nullable(): - raise WebIDLError("The inner type of a nullable type must not be " - "a nullable type", - [self.location, self.inner.location]) + raise WebIDLError( + "The inner type of a nullable type must not be a nullable type", + [self.location, self.inner.location], + ) if self.inner.isUnion(): if self.inner.hasNullableType: - raise WebIDLError("The inner type of a nullable type must not " - "be a union type that itself has a nullable " - "type as a member type", [self.location]) + raise WebIDLError( + "The inner type of a nullable type must not " + "be a union type that itself has a nullable " + "type as a member type", + [self.location], + ) if self.inner.isDOMString(): - if self.inner.treatNullAsEmpty: - raise WebIDLError("[TreatNullAs] not allowed on a nullable DOMString", - [self.location, self.inner.location]) + if self.inner.legacyNullToEmptyString: + raise WebIDLError( + "[LegacyNullToEmptyString] not allowed on a nullable DOMString", + [self.location, self.inner.location], + ) + if self.inner.isObservableArray(): + raise WebIDLError( + "The inner type of a nullable type must not be an ObservableArray type", + [self.location, self.inner.location], + ) self.name = self.inner.name + "OrNull" return self def isDistinguishableFrom(self, other): - if (other.nullable() or - other.isDictionary() or - (other.isUnion() and - (other.hasNullableType or other.hasDictionaryType()))): + if ( + other.nullable() + or other.isDictionary() + or ( + other.isUnion() and (other.hasNullableType or other.hasDictionaryType()) + ) + ): # Can't tell which type null should become return False return self.inner.isDistinguishableFrom(other) @@ -2525,57 +2865,21 @@ class IDLSequenceType(IDLParametrizedType): if self.inner.isComplete(): self.name = self.inner.name + "Sequence" - def __eq__(self, other): - return isinstance(other, IDLSequenceType) and self.inner == other.inner - def __hash__(self): return hash(self.inner) + def __eq__(self, other): + return isinstance(other, IDLSequenceType) and self.inner == other.inner + def __str__(self): return self.inner.__str__() + "Sequence" def prettyName(self): return "sequence<%s>" % self.inner.prettyName() - def nullable(self): - return False - - def isPrimitive(self): - return False - - def isString(self): - return False - - def isByteString(self): - return False - - def isDOMString(self): - return False - - def isUSVString(self): - return False - - def isUTF8String(self): - return False - - def isJSString(self): - return False - - def isUndefined(self): - return False - def isSequence(self): return True - def isDictionary(self): - return False - - def isInterface(self): - return False - - def isEnum(self): - return False - def isJSONType(self): return self.inner.isJSONType() @@ -2583,6 +2887,12 @@ class IDLSequenceType(IDLParametrizedType): return IDLType.Tags.sequence def complete(self, scope): + if self.inner.isObservableArray(): + raise WebIDLError( + "The inner type of a sequence type must not be an ObservableArray type", + [self.location, self.inner.location], + ) + self.inner = self.inner.complete(scope) self.name = self.inner.name + "Sequence" return self @@ -2593,9 +2903,16 @@ class IDLSequenceType(IDLParametrizedType): if other.isUnion(): # Just forward to the union; it'll deal return other.isDistinguishableFrom(self) - return (other.isPrimitive() or other.isString() or other.isEnum() or - other.isInterface() or other.isDictionary() or - other.isCallback() or other.isRecord()) + return ( + other.isUndefined() + or other.isPrimitive() + or other.isString() + or other.isEnum() + or other.isInterface() + or other.isDictionary() + or other.isCallback() + or other.isRecord() + ) class IDLRecordType(IDLParametrizedType): @@ -2612,6 +2929,9 @@ class IDLRecordType(IDLParametrizedType): if self.inner.isComplete(): self.name = self.keyType.name + self.inner.name + "Record" + def __hash__(self): + return hash(self.inner) + def __eq__(self, other): return isinstance(other, IDLRecordType) and self.inner == other.inner @@ -2631,6 +2951,12 @@ class IDLRecordType(IDLParametrizedType): return IDLType.Tags.record def complete(self, scope): + if self.inner.isObservableArray(): + raise WebIDLError( + "The value type of a record type must not be an ObservableArray type", + [self.location, self.inner.location], + ) + self.inner = self.inner.complete(scope) self.name = self.keyType.name + self.inner.name + "Record" return self @@ -2647,13 +2973,84 @@ class IDLRecordType(IDLParametrizedType): if other.isUnion(): # Just forward to the union; it'll deal return other.isDistinguishableFrom(self) - return (other.isPrimitive() or other.isString() or other.isEnum() or - other.isNonCallbackInterface() or other.isSequence()) + return ( + other.isPrimitive() + or other.isString() + or other.isEnum() + or other.isNonCallbackInterface() + or other.isSequence() + ) def isExposedInAllOf(self, exposureSet): return self.inner.unroll().isExposedInAllOf(exposureSet) +class IDLObservableArrayType(IDLParametrizedType): + def __init__(self, location, innerType): + assert not innerType.isUndefined() + IDLParametrizedType.__init__(self, location, None, innerType) + + def __hash__(self): + return hash(self.inner) + + def __eq__(self, other): + return isinstance(other, IDLObservableArrayType) and self.inner == other.inner + + def __str__(self): + return self.inner.__str__() + "ObservableArray" + + def prettyName(self): + return "ObservableArray<%s>" % self.inner.prettyName() + + def isJSONType(self): + return self.inner.isJSONType() + + def isObservableArray(self): + return True + + def isComplete(self): + return self.name is not None + + def tag(self): + return IDLType.Tags.observablearray + + def complete(self, scope): + if not self.inner.isComplete(): + self.inner = self.inner.complete(scope) + assert self.inner.isComplete() + + if self.inner.isDictionary(): + raise WebIDLError( + "The inner type of an ObservableArray type must not " + "be a dictionary type", + [self.location, self.inner.location], + ) + if self.inner.isSequence(): + raise WebIDLError( + "The inner type of an ObservableArray type must not " + "be a sequence type", + [self.location, self.inner.location], + ) + if self.inner.isRecord(): + raise WebIDLError( + "The inner type of an ObservableArray type must not be a record type", + [self.location, self.inner.location], + ) + if self.inner.isObservableArray(): + raise WebIDLError( + "The inner type of an ObservableArray type must not " + "be an ObservableArray type", + [self.location, self.inner.location], + ) + + self.name = self.inner.name + "ObservableArray" + return self + + def isDistinguishableFrom(self, other): + # ObservableArrays are not distinguishable from anything. + return False + + class IDLUnionType(IDLType): def __init__(self, location, memberTypes): IDLType.__init__(self, location, "") @@ -2673,9 +3070,6 @@ class IDLUnionType(IDLType): def prettyName(self): return "(" + " or ".join(m.prettyName() for m in self.memberTypes) + ")" - def isUndefined(self): - return False - def isUnion(self): return True @@ -2727,36 +3121,46 @@ class IDLUnionType(IDLType): while i < len(self.flatMemberTypes): if self.flatMemberTypes[i].nullable(): if self.hasNullableType: - raise WebIDLError("Can't have more than one nullable types in a union", - [nullableType.location, self.flatMemberTypes[i].location]) + raise WebIDLError( + "Can't have more than one nullable types in a union", + [nullableType.location, self.flatMemberTypes[i].location], + ) if self.hasDictionaryType(): - raise WebIDLError("Can't have a nullable type and a " - "dictionary type in a union", - [self._dictionaryType.location, - self.flatMemberTypes[i].location]) + raise WebIDLError( + "Can't have a nullable type and a " + "dictionary type in a union", + [ + self._dictionaryType.location, + self.flatMemberTypes[i].location, + ], + ) self.hasNullableType = True nullableType = self.flatMemberTypes[i] self.flatMemberTypes[i] = self.flatMemberTypes[i].inner continue if self.flatMemberTypes[i].isDictionary(): if self.hasNullableType: - raise WebIDLError("Can't have a nullable type and a " - "dictionary type in a union", - [nullableType.location, - self.flatMemberTypes[i].location]) + raise WebIDLError( + "Can't have a nullable type and a " + "dictionary type in a union", + [nullableType.location, self.flatMemberTypes[i].location], + ) self._dictionaryType = self.flatMemberTypes[i] + self.flatMemberTypes[i].inner.needsConversionFromJS = True elif self.flatMemberTypes[i].isUnion(): - self.flatMemberTypes[i:i + 1] = self.flatMemberTypes[i].memberTypes + self.flatMemberTypes[i : i + 1] = self.flatMemberTypes[i].memberTypes continue i += 1 for (i, t) in enumerate(self.flatMemberTypes[:-1]): - for u in self.flatMemberTypes[i + 1:]: + for u in self.flatMemberTypes[i + 1 :]: if not t.isDistinguishableFrom(u): - raise WebIDLError("Flat member types of a union should be " - "distinguishable, " + str(t) + " is not " - "distinguishable from " + str(u), - [self.location, t.location, u.location]) + raise WebIDLError( + "Flat member types of a union should be " + "distinguishable, " + str(t) + " is not " + "distinguishable from " + str(u), + [self.location, t.location, u.location], + ) return self @@ -2778,8 +3182,10 @@ class IDLUnionType(IDLType): def isExposedInAllOf(self, exposureSet): # We could have different member types in different globals. Just make sure that each thing in exposureSet has one of our member types exposed in it. for globalName in exposureSet: - if not any(t.unroll().isExposedInAllOf(set([globalName])) for t - in self.flatMemberTypes): + if not any( + t.unroll().isExposedInAllOf(set([globalName])) + for t in self.flatMemberTypes + ): return False return True @@ -2787,8 +3193,9 @@ class IDLUnionType(IDLType): return self._dictionaryType is not None def hasPossiblyEmptyDictionaryType(self): - return (self._dictionaryType is not None and - self._dictionaryType.inner.canBeEmpty()) + return ( + self._dictionaryType is not None and self._dictionaryType.inner.canBeEmpty() + ) def _getDependentObjects(self): return set(self.memberTypes) @@ -2800,6 +3207,9 @@ class IDLTypedefType(IDLType): self.inner = innerType self.builtin = False + def __hash__(self): + return hash(self.inner) + def __eq__(self, other): return isinstance(other, IDLTypedefType) and self.inner == other.inner @@ -2896,7 +3306,9 @@ class IDLTypedefType(IDLType): return self.inner._getDependentObjects() def withExtendedAttributes(self, attrs): - return IDLTypedefType(self.location, self.inner.withExtendedAttributes(attrs), self.name) + return IDLTypedefType( + self.location, self.inner.withExtendedAttributes(attrs), self.name + ) class IDLTypedef(IDLObjectWithIdentifier): @@ -2922,9 +3334,10 @@ class IDLTypedef(IDLObjectWithIdentifier): def addExtendedAttributes(self, attrs): if len(attrs) != 0: - raise WebIDLError("There are no extended attributes that are " - "allowed on typedefs", - [attrs[0].location, self.location]) + raise WebIDLError( + "There are no extended attributes that are " "allowed on typedefs", + [attrs[0].location, self.location], + ) def _getDependentObjects(self): return self.innerType._getDependentObjects() @@ -2937,53 +3350,26 @@ class IDLWrapperType(IDLType): self._identifier = inner.identifier self.builtin = False - def __eq__(self, other): - return (isinstance(other, IDLWrapperType) and - self._identifier == other._identifier and - self.builtin == other.builtin) - def __hash__(self): - return hash((self._identifier, self.builtin)) + return hash(self._identifier) + hash(self.builtin) + + def __eq__(self, other): + return ( + isinstance(other, IDLWrapperType) + and self._identifier == other._identifier + and self.builtin == other.builtin + ) def __str__(self): return str(self.name) + " (Wrapper)" - def nullable(self): - return False - - def isPrimitive(self): - return False - - def isString(self): - return False - - def isByteString(self): - return False - - def isDOMString(self): - return False - - def isUSVString(self): - return False - - def isUTF8String(self): - return False - - def isJSString(self): - return False - - def isUndefined(self): - return False - - def isSequence(self): - return False - def isDictionary(self): return isinstance(self.inner, IDLDictionary) def isInterface(self): - return (isinstance(self.inner, IDLInterface) or - isinstance(self.inner, IDLExternalInterface)) + return isinstance(self.inner, IDLInterface) or isinstance( + self.inner, IDLExternalInterface + ) def isCallbackInterface(self): return self.isInterface() and self.inner.isCallback() @@ -3014,8 +3400,11 @@ class IDLWrapperType(IDLType): dictionary = dictionary.parent return True else: - raise WebIDLError("IDLWrapperType wraps type %s that we don't know if " - "is serializable" % type(self.inner), [self.location]) + raise WebIDLError( + "IDLWrapperType wraps type %s that we don't know if " + "is serializable" % type(self.inner), + [self.location], + ) def resolveType(self, parentScope): assert isinstance(parentScope, IDLScope) @@ -3042,13 +3431,24 @@ class IDLWrapperType(IDLType): return other.isDistinguishableFrom(self) assert self.isInterface() or self.isEnum() or self.isDictionary() if self.isEnum(): - return (other.isPrimitive() or other.isInterface() or other.isObject() or - other.isCallback() or other.isDictionary() or - other.isSequence() or other.isRecord()) - if self.isDictionary() and other.nullable(): + return ( + other.isUndefined() + or other.isPrimitive() + or other.isInterface() + or other.isObject() + or other.isCallback() + or other.isDictionary() + or other.isSequence() + or other.isRecord() + ) + if self.isDictionary() and (other.nullable() or other.isUndefined()): return False - if (other.isPrimitive() or other.isString() or other.isEnum() or - other.isSequence()): + if ( + other.isPrimitive() + or other.isString() + or other.isEnum() + or other.isSequence() + ): return True if self.isDictionary(): return other.isNonCallbackInterface() @@ -3061,12 +3461,16 @@ class IDLWrapperType(IDLType): assert self.isGeckoInterface() and other.isGeckoInterface() if self.inner.isExternal() or other.unroll().inner.isExternal(): return self != other - return (len(self.inner.interfacesBasedOnSelf & - other.unroll().inner.interfacesBasedOnSelf) == 0 and - (self.isNonCallbackInterface() or - other.isNonCallbackInterface())) - if (other.isDictionary() or other.isCallback() or - other.isRecord()): + return len( + self.inner.interfacesBasedOnSelf + & other.unroll().inner.interfacesBasedOnSelf + ) == 0 and (self.isNonCallbackInterface() or other.isNonCallbackInterface()) + if ( + other.isUndefined() + or other.isDictionary() + or other.isCallback() + or other.isRecord() + ): return self.isNonCallbackInterface() # Not much else |other| can be @@ -3113,9 +3517,14 @@ class IDLPromiseType(IDLParametrizedType): def __init__(self, location, innerType): IDLParametrizedType.__init__(self, location, "Promise", innerType) + def __hash__(self): + return hash(self.promiseInnerType()) + def __eq__(self, other): - return (isinstance(other, IDLPromiseType) and - self.promiseInnerType() == other.promiseInnerType()) + return ( + isinstance(other, IDLPromiseType) + and self.promiseInnerType() == other.promiseInnerType() + ) def __str__(self): return self.inner.__str__() + "Promise" @@ -3133,6 +3542,12 @@ class IDLPromiseType(IDLParametrizedType): return IDLType.Tags.promise def complete(self, scope): + if self.inner.isObservableArray(): + raise WebIDLError( + "The inner type of a promise type must not be an ObservableArray type", + [self.location, self.inner.location], + ) + self.inner = self.promiseInnerType().complete(scope) return self @@ -3155,44 +3570,44 @@ class IDLBuiltinType(IDLType): Types = enum( # The integer types - 'byte', - 'octet', - 'short', - 'unsigned_short', - 'long', - 'unsigned_long', - 'long_long', - 'unsigned_long_long', + "byte", + "octet", + "short", + "unsigned_short", + "long", + "unsigned_long", + "long_long", + "unsigned_long_long", # Additional primitive types - 'boolean', - 'unrestricted_float', - 'float', - 'unrestricted_double', + "boolean", + "unrestricted_float", + "float", + "unrestricted_double", # IMPORTANT: "double" must be the last primitive type listed - 'double', + "double", # Other types - 'any', - 'domstring', - 'bytestring', - 'usvstring', - 'utf8string', - 'jsstring', - 'object', - 'undefined', + "any", + "undefined", + "domstring", + "bytestring", + "usvstring", + "utf8string", + "jsstring", + "object", # Funny stuff - 'ArrayBuffer', - 'ArrayBufferView', - 'Int8Array', - 'Uint8Array', - 'Uint8ClampedArray', - 'Int16Array', - 'Uint16Array', - 'Int32Array', - 'Uint32Array', - 'Float32Array', - 'Float64Array', - 'ReadableStream', - ) + "ArrayBuffer", + "ArrayBufferView", + "Int8Array", + "Uint8Array", + "Uint8ClampedArray", + "Int16Array", + "Uint16Array", + "Int32Array", + "Uint32Array", + "Float32Array", + "Float64Array", + "ReadableStream", + ) TagLookup = { Types.byte: IDLType.Tags.int8, @@ -3209,13 +3624,13 @@ class IDLBuiltinType(IDLType): Types.unrestricted_double: IDLType.Tags.unrestricted_double, Types.double: IDLType.Tags.double, Types.any: IDLType.Tags.any, + Types.undefined: IDLType.Tags.undefined, Types.domstring: IDLType.Tags.domstring, Types.bytestring: IDLType.Tags.bytestring, Types.usvstring: IDLType.Tags.usvstring, Types.utf8string: IDLType.Tags.utf8string, Types.jsstring: IDLType.Tags.jsstring, Types.object: IDLType.Tags.object, - Types.undefined: IDLType.Tags.undefined, Types.ArrayBuffer: IDLType.Tags.interface, Types.ArrayBufferView: IDLType.Tags.interface, Types.Int8Array: IDLType.Tags.interface, @@ -3245,13 +3660,13 @@ class IDLBuiltinType(IDLType): Types.unrestricted_double: "unrestricted double", Types.double: "double", Types.any: "any", + Types.undefined: "undefined", Types.domstring: "DOMString", Types.bytestring: "ByteString", Types.usvstring: "USVString", - Types.utf8string: "USVString", # That's what it is in spec terms - Types.jsstring: "USVString", # Again, that's what it is in spec terms + Types.utf8string: "USVString", # That's what it is in spec terms + Types.jsstring: "USVString", # Again, that's what it is in spec terms Types.object: "object", - Types.undefined: "undefined", Types.ArrayBuffer: "ArrayBuffer", Types.ArrayBufferView: "ArrayBufferView", Types.Int8Array: "Int8Array", @@ -3266,12 +3681,21 @@ class IDLBuiltinType(IDLType): Types.ReadableStream: "ReadableStream", } - def __init__(self, location, name, type, clamp=False, enforceRange=False, treatNullAsEmpty=False, - allowShared=False, attrLocation=[]): + def __init__( + self, + location, + name, + type, + clamp=False, + enforceRange=False, + legacyNullToEmptyString=False, + allowShared=False, + attrLocation=[], + ): """ - The mutually exclusive clamp/enforceRange/treatNullAsEmpty/allowShared arguments are used + The mutually exclusive clamp/enforceRange/legacyNullToEmptyString/allowShared arguments are used to create instances of this type with the appropriate attributes attached. Use .clamped(), - .rangeEnforced(), .withTreatNullAs() and .withAllowShared(). + .rangeEnforced(), .withLegacyNullToEmptyString() and .withAllowShared(). attrLocation is an array of source locations of these attributes for error reporting. """ @@ -3280,8 +3704,8 @@ class IDLBuiltinType(IDLType): self._typeTag = type self._clamped = None self._rangeEnforced = None - self._withTreatNullAs = None - self._withAllowShared = None; + self._withLegacyNullToEmptyString = None + self._withAllowShared = None if self.isInteger(): if clamp: self._clamp = True @@ -3292,20 +3716,27 @@ class IDLBuiltinType(IDLType): self.name = "RangeEnforced" + self.name self._extendedAttrDict["EnforceRange"] = True elif clamp or enforceRange: - raise WebIDLError("Non-integer types cannot be [Clamp] or [EnforceRange]", attrLocation) + raise WebIDLError( + "Non-integer types cannot be [Clamp] or [EnforceRange]", attrLocation + ) if self.isDOMString() or self.isUTF8String(): - if treatNullAsEmpty: - self.treatNullAsEmpty = True + if legacyNullToEmptyString: + self.legacyNullToEmptyString = True self.name = "NullIsEmpty" + self.name - self._extendedAttrDict["TreatNullAs"] = ["EmptyString"] - elif treatNullAsEmpty: - raise WebIDLError("Non-string types cannot be [TreatNullAs]", attrLocation) + self._extendedAttrDict["LegacyNullToEmptyString"] = True + elif legacyNullToEmptyString: + raise WebIDLError( + "Non-string types cannot be [LegacyNullToEmptyString]", attrLocation + ) if self.isBufferSource(): if allowShared: self._allowShared = True self._extendedAttrDict["AllowShared"] = True elif allowShared: - raise WebIDLError("Types that are not buffer source types cannot be [AllowShared]", attrLocation) + raise WebIDLError( + "Types that are not buffer source types cannot be [AllowShared]", + attrLocation, + ) def __str__(self): if self._allowShared: @@ -3313,41 +3744,51 @@ class IDLBuiltinType(IDLType): return "MaybeShared" + str(self.name) return str(self.name) - def __eq__(self, other): - return other and self.location == other.location and self.name == other.name and self._typeTag == other._typeTag - - def __hash__(self): - return hash((self.location, self.name, self._typeTag)) - def prettyName(self): return IDLBuiltinType.PrettyNames[self._typeTag] def clamped(self, attrLocation): if not self._clamped: - self._clamped = IDLBuiltinType(self.location, self.name, - self._typeTag, clamp=True, - attrLocation=attrLocation) + self._clamped = IDLBuiltinType( + self.location, + self.name, + self._typeTag, + clamp=True, + attrLocation=attrLocation, + ) return self._clamped def rangeEnforced(self, attrLocation): if not self._rangeEnforced: - self._rangeEnforced = IDLBuiltinType(self.location, self.name, - self._typeTag, enforceRange=True, - attrLocation=attrLocation) + self._rangeEnforced = IDLBuiltinType( + self.location, + self.name, + self._typeTag, + enforceRange=True, + attrLocation=attrLocation, + ) return self._rangeEnforced - def withTreatNullAs(self, attrLocation): - if not self._withTreatNullAs: - self._withTreatNullAs = IDLBuiltinType(self.location, self.name, - self._typeTag, treatNullAsEmpty=True, - attrLocation=attrLocation) - return self._withTreatNullAs + def withLegacyNullToEmptyString(self, attrLocation): + if not self._withLegacyNullToEmptyString: + self._withLegacyNullToEmptyString = IDLBuiltinType( + self.location, + self.name, + self._typeTag, + legacyNullToEmptyString=True, + attrLocation=attrLocation, + ) + return self._withLegacyNullToEmptyString def withAllowShared(self, attrLocation): if not self._withAllowShared: - self._withAllowShared = IDLBuiltinType(self.location, self.name, - self._typeTag, allowShared=True, - attrLocation=attrLocation) + self._withAllowShared = IDLBuiltinType( + self.location, + self.name, + self._typeTag, + allowShared=True, + attrLocation=attrLocation, + ) return self._withAllowShared def isPrimitive(self): @@ -3356,15 +3797,20 @@ class IDLBuiltinType(IDLType): def isBoolean(self): return self._typeTag == IDLBuiltinType.Types.boolean + def isUndefined(self): + return self._typeTag == IDLBuiltinType.Types.undefined + def isNumeric(self): return self.isPrimitive() and not self.isBoolean() def isString(self): - return (self._typeTag == IDLBuiltinType.Types.domstring or - self._typeTag == IDLBuiltinType.Types.bytestring or - self._typeTag == IDLBuiltinType.Types.usvstring or - self._typeTag == IDLBuiltinType.Types.utf8string or - self._typeTag == IDLBuiltinType.Types.jsstring) + return ( + self._typeTag == IDLBuiltinType.Types.domstring + or self._typeTag == IDLBuiltinType.Types.bytestring + or self._typeTag == IDLBuiltinType.Types.usvstring + or self._typeTag == IDLBuiltinType.Types.utf8string + or self._typeTag == IDLBuiltinType.Types.jsstring + ) def isByteString(self): return self._typeTag == IDLBuiltinType.Types.bytestring @@ -3391,8 +3837,10 @@ class IDLBuiltinType(IDLType): return self._typeTag == IDLBuiltinType.Types.ArrayBufferView def isTypedArray(self): - return (self._typeTag >= IDLBuiltinType.Types.Int8Array and - self._typeTag <= IDLBuiltinType.Types.Float64Array) + return ( + self._typeTag >= IDLBuiltinType.Types.Int8Array + and self._typeTag <= IDLBuiltinType.Types.Float64Array + ) def isReadableStream(self): return self._typeTag == IDLBuiltinType.Types.ReadableStream @@ -3401,25 +3849,31 @@ class IDLBuiltinType(IDLType): # TypedArray things are interface types per the TypedArray spec, # but we handle them as builtins because SpiderMonkey implements # all of it internally. - return (self.isArrayBuffer() or - self.isArrayBufferView() or - self.isTypedArray() or - self.isReadableStream()) + return ( + self.isArrayBuffer() + or self.isArrayBufferView() + or self.isTypedArray() + or self.isReadableStream() + ) def isNonCallbackInterface(self): # All the interfaces we can be are non-callback return self.isInterface() def isFloat(self): - return (self._typeTag == IDLBuiltinType.Types.float or - self._typeTag == IDLBuiltinType.Types.double or - self._typeTag == IDLBuiltinType.Types.unrestricted_float or - self._typeTag == IDLBuiltinType.Types.unrestricted_double) + return ( + self._typeTag == IDLBuiltinType.Types.float + or self._typeTag == IDLBuiltinType.Types.double + or self._typeTag == IDLBuiltinType.Types.unrestricted_float + or self._typeTag == IDLBuiltinType.Types.unrestricted_double + ) def isUnrestricted(self): assert self.isFloat() - return (self._typeTag == IDLBuiltinType.Types.unrestricted_float or - self._typeTag == IDLBuiltinType.Types.unrestricted_double) + return ( + self._typeTag == IDLBuiltinType.Types.unrestricted_float + or self._typeTag == IDLBuiltinType.Types.unrestricted_double + ) def isJSONType(self): return self.isPrimitive() or self.isString() or self.isObject() @@ -3436,48 +3890,85 @@ class IDLBuiltinType(IDLType): if other.isUnion(): # Just forward to the union; it'll deal return other.isDistinguishableFrom(self) - if self.isBoolean(): - return (other.isNumeric() or other.isString() or other.isEnum() or - other.isInterface() or other.isObject() or - other.isCallback() or other.isDictionary() or - other.isSequence() or other.isRecord()) - if self.isNumeric(): - return (other.isBoolean() or other.isString() or other.isEnum() or - other.isInterface() or other.isObject() or - other.isCallback() or other.isDictionary() or - other.isSequence() or other.isRecord()) + if self.isUndefined(): + return not (other.isUndefined() or other.isDictionaryLike()) + if self.isPrimitive(): + if ( + other.isUndefined() + or other.isString() + or other.isEnum() + or other.isInterface() + or other.isObject() + or other.isCallback() + or other.isDictionary() + or other.isSequence() + or other.isRecord() + ): + return True + if self.isBoolean(): + return other.isNumeric() + assert self.isNumeric() + return other.isBoolean() if self.isString(): - return (other.isPrimitive() or other.isInterface() or - other.isObject() or - other.isCallback() or other.isDictionary() or - other.isSequence() or other.isRecord()) + return ( + other.isUndefined() + or other.isPrimitive() + or other.isInterface() + or other.isObject() + or other.isCallback() + or other.isDictionary() + or other.isSequence() + or other.isRecord() + ) if self.isAny(): # Can't tell "any" apart from anything return False if self.isObject(): - return other.isPrimitive() or other.isString() or other.isEnum() - if self.isUndefined(): - return not other.isUndefined() + return ( + other.isUndefined() + or other.isPrimitive() + or other.isString() + or other.isEnum() + ) # Not much else we could be! assert self.isSpiderMonkeyInterface() # Like interfaces, but we know we're not a callback - return (other.isPrimitive() or other.isString() or other.isEnum() or - other.isCallback() or other.isDictionary() or - other.isSequence() or other.isRecord() or - (other.isInterface() and ( - # ArrayBuffer is distinguishable from everything - # that's not an ArrayBuffer or a callback interface - (self.isArrayBuffer() and not other.isArrayBuffer()) or - (self.isReadableStream() and not other.isReadableStream()) or - # ArrayBufferView is distinguishable from everything - # that's not an ArrayBufferView or typed array. - (self.isArrayBufferView() and not other.isArrayBufferView() and - not other.isTypedArray()) or - # Typed arrays are distinguishable from everything - # except ArrayBufferView and the same type of typed - # array - (self.isTypedArray() and not other.isArrayBufferView() and not - (other.isTypedArray() and other.name == self.name))))) + return ( + other.isUndefined() + or other.isPrimitive() + or other.isString() + or other.isEnum() + or other.isCallback() + or other.isDictionary() + or other.isSequence() + or other.isRecord() + or ( + other.isInterface() + and ( + # ArrayBuffer is distinguishable from everything + # that's not an ArrayBuffer or a callback interface + (self.isArrayBuffer() and not other.isArrayBuffer()) + or (self.isReadableStream() and not other.isReadableStream()) + or + # ArrayBufferView is distinguishable from everything + # that's not an ArrayBufferView or typed array. + ( + self.isArrayBufferView() + and not other.isArrayBufferView() + and not other.isTypedArray() + ) + or + # Typed arrays are distinguishable from everything + # except ArrayBufferView and the same type of typed + # array + ( + self.isTypedArray() + and not other.isArrayBufferView() + and not (other.isTypedArray() and other.name == self.name) + ) + ) + ) + ) def _getDependentObjects(self): return set() @@ -3488,177 +3979,218 @@ class IDLBuiltinType(IDLType): identifier = attribute.identifier() if identifier == "Clamp": if not attribute.noArguments(): - raise WebIDLError("[Clamp] must take no arguments", - [attribute.location]) + raise WebIDLError( + "[Clamp] must take no arguments", [attribute.location] + ) if ret.hasEnforceRange() or self._enforceRange: - raise WebIDLError("[EnforceRange] and [Clamp] are mutually exclusive", - [self.location, attribute.location]) + raise WebIDLError( + "[EnforceRange] and [Clamp] are mutually exclusive", + [self.location, attribute.location], + ) ret = self.clamped([self.location, attribute.location]) elif identifier == "EnforceRange": if not attribute.noArguments(): - raise WebIDLError("[EnforceRange] must take no arguments", - [attribute.location]) + raise WebIDLError( + "[EnforceRange] must take no arguments", [attribute.location] + ) if ret.hasClamp() or self._clamp: - raise WebIDLError("[EnforceRange] and [Clamp] are mutually exclusive", - [self.location, attribute.location]) + raise WebIDLError( + "[EnforceRange] and [Clamp] are mutually exclusive", + [self.location, attribute.location], + ) ret = self.rangeEnforced([self.location, attribute.location]) - elif identifier == "TreatNullAs": + elif identifier == "LegacyNullToEmptyString": if not (self.isDOMString() or self.isUTF8String()): - raise WebIDLError("[TreatNullAs] only allowed on DOMStrings and UTF8Strings", - [self.location, attribute.location]) + raise WebIDLError( + "[LegacyNullToEmptyString] only allowed on DOMStrings and UTF8Strings", + [self.location, attribute.location], + ) assert not self.nullable() - if not attribute.hasValue(): - raise WebIDLError("[TreatNullAs] must take an identifier argument", - [attribute.location]) - value = attribute.value() - if value != 'EmptyString': - raise WebIDLError("[TreatNullAs] must take the identifier " - "'EmptyString', not '%s'" % value, - [attribute.location]) - ret = self.withTreatNullAs([self.location, attribute.location]) + if attribute.hasValue(): + raise WebIDLError( + "[LegacyNullToEmptyString] must take no identifier argument", + [attribute.location], + ) + ret = self.withLegacyNullToEmptyString( + [self.location, attribute.location] + ) elif identifier == "AllowShared": if not attribute.noArguments(): - raise WebIDLError("[AllowShared] must take no arguments", - [attribute.location]) + raise WebIDLError( + "[AllowShared] must take no arguments", [attribute.location] + ) if not self.isBufferSource(): - raise WebIDLError("[AllowShared] only allowed on buffer source types", - [self.location, attribute.location]) + raise WebIDLError( + "[AllowShared] only allowed on buffer source types", + [self.location, attribute.location], + ) ret = self.withAllowShared([self.location, attribute.location]) else: - raise WebIDLError("Unhandled extended attribute on type", - [self.location, attribute.location]) + raise WebIDLError( + "Unhandled extended attribute on type", + [self.location, attribute.location], + ) return ret + BuiltinTypes = { - IDLBuiltinType.Types.byte: - IDLBuiltinType(BuiltinLocation(""), "Byte", - IDLBuiltinType.Types.byte), - IDLBuiltinType.Types.octet: - IDLBuiltinType(BuiltinLocation(""), "Octet", - IDLBuiltinType.Types.octet), - IDLBuiltinType.Types.short: - IDLBuiltinType(BuiltinLocation(""), "Short", - IDLBuiltinType.Types.short), - IDLBuiltinType.Types.unsigned_short: - IDLBuiltinType(BuiltinLocation(""), "UnsignedShort", - IDLBuiltinType.Types.unsigned_short), - IDLBuiltinType.Types.long: - IDLBuiltinType(BuiltinLocation(""), "Long", - IDLBuiltinType.Types.long), - IDLBuiltinType.Types.unsigned_long: - IDLBuiltinType(BuiltinLocation(""), "UnsignedLong", - IDLBuiltinType.Types.unsigned_long), - IDLBuiltinType.Types.long_long: - IDLBuiltinType(BuiltinLocation(""), "LongLong", - IDLBuiltinType.Types.long_long), - IDLBuiltinType.Types.unsigned_long_long: - IDLBuiltinType(BuiltinLocation(""), "UnsignedLongLong", - IDLBuiltinType.Types.unsigned_long_long), - IDLBuiltinType.Types.boolean: - IDLBuiltinType(BuiltinLocation(""), "Boolean", - IDLBuiltinType.Types.boolean), - IDLBuiltinType.Types.float: - IDLBuiltinType(BuiltinLocation(""), "Float", - IDLBuiltinType.Types.float), - IDLBuiltinType.Types.unrestricted_float: - IDLBuiltinType(BuiltinLocation(""), "UnrestrictedFloat", - IDLBuiltinType.Types.unrestricted_float), - IDLBuiltinType.Types.double: - IDLBuiltinType(BuiltinLocation(""), "Double", - IDLBuiltinType.Types.double), - IDLBuiltinType.Types.unrestricted_double: - IDLBuiltinType(BuiltinLocation(""), "UnrestrictedDouble", - IDLBuiltinType.Types.unrestricted_double), - IDLBuiltinType.Types.any: - IDLBuiltinType(BuiltinLocation(""), "Any", - IDLBuiltinType.Types.any), - IDLBuiltinType.Types.domstring: - IDLBuiltinType(BuiltinLocation(""), "String", - IDLBuiltinType.Types.domstring), - IDLBuiltinType.Types.bytestring: - IDLBuiltinType(BuiltinLocation(""), "ByteString", - IDLBuiltinType.Types.bytestring), - IDLBuiltinType.Types.usvstring: - IDLBuiltinType(BuiltinLocation(""), "USVString", - IDLBuiltinType.Types.usvstring), - IDLBuiltinType.Types.utf8string: - IDLBuiltinType(BuiltinLocation(""), "UTF8String", - IDLBuiltinType.Types.utf8string), - IDLBuiltinType.Types.jsstring: - IDLBuiltinType(BuiltinLocation(""), "JSString", - IDLBuiltinType.Types.jsstring), - IDLBuiltinType.Types.object: - IDLBuiltinType(BuiltinLocation(""), "Object", - IDLBuiltinType.Types.object), - IDLBuiltinType.Types.undefined: - IDLBuiltinType(BuiltinLocation(""), "Undefined", - IDLBuiltinType.Types.undefined), - IDLBuiltinType.Types.ArrayBuffer: - IDLBuiltinType(BuiltinLocation(""), "ArrayBuffer", - IDLBuiltinType.Types.ArrayBuffer), - IDLBuiltinType.Types.ArrayBufferView: - IDLBuiltinType(BuiltinLocation(""), "ArrayBufferView", - IDLBuiltinType.Types.ArrayBufferView), - IDLBuiltinType.Types.Int8Array: - IDLBuiltinType(BuiltinLocation(""), "Int8Array", - IDLBuiltinType.Types.Int8Array), - IDLBuiltinType.Types.Uint8Array: - IDLBuiltinType(BuiltinLocation(""), "Uint8Array", - IDLBuiltinType.Types.Uint8Array), - IDLBuiltinType.Types.Uint8ClampedArray: - IDLBuiltinType(BuiltinLocation(""), "Uint8ClampedArray", - IDLBuiltinType.Types.Uint8ClampedArray), - IDLBuiltinType.Types.Int16Array: - IDLBuiltinType(BuiltinLocation(""), "Int16Array", - IDLBuiltinType.Types.Int16Array), - IDLBuiltinType.Types.Uint16Array: - IDLBuiltinType(BuiltinLocation(""), "Uint16Array", - IDLBuiltinType.Types.Uint16Array), - IDLBuiltinType.Types.Int32Array: - IDLBuiltinType(BuiltinLocation(""), "Int32Array", - IDLBuiltinType.Types.Int32Array), - IDLBuiltinType.Types.Uint32Array: - IDLBuiltinType(BuiltinLocation(""), "Uint32Array", - IDLBuiltinType.Types.Uint32Array), - IDLBuiltinType.Types.Float32Array: - IDLBuiltinType(BuiltinLocation(""), "Float32Array", - IDLBuiltinType.Types.Float32Array), - IDLBuiltinType.Types.Float64Array: - IDLBuiltinType(BuiltinLocation(""), "Float64Array", - IDLBuiltinType.Types.Float64Array), - IDLBuiltinType.Types.ReadableStream: - IDLBuiltinType(BuiltinLocation(""), "ReadableStream", - IDLBuiltinType.Types.ReadableStream), + IDLBuiltinType.Types.byte: IDLBuiltinType( + BuiltinLocation(""), "Byte", IDLBuiltinType.Types.byte + ), + IDLBuiltinType.Types.octet: IDLBuiltinType( + BuiltinLocation(""), "Octet", IDLBuiltinType.Types.octet + ), + IDLBuiltinType.Types.short: IDLBuiltinType( + BuiltinLocation(""), "Short", IDLBuiltinType.Types.short + ), + IDLBuiltinType.Types.unsigned_short: IDLBuiltinType( + BuiltinLocation(""), + "UnsignedShort", + IDLBuiltinType.Types.unsigned_short, + ), + IDLBuiltinType.Types.long: IDLBuiltinType( + BuiltinLocation(""), "Long", IDLBuiltinType.Types.long + ), + IDLBuiltinType.Types.unsigned_long: IDLBuiltinType( + BuiltinLocation(""), + "UnsignedLong", + IDLBuiltinType.Types.unsigned_long, + ), + IDLBuiltinType.Types.long_long: IDLBuiltinType( + BuiltinLocation(""), "LongLong", IDLBuiltinType.Types.long_long + ), + IDLBuiltinType.Types.unsigned_long_long: IDLBuiltinType( + BuiltinLocation(""), + "UnsignedLongLong", + IDLBuiltinType.Types.unsigned_long_long, + ), + IDLBuiltinType.Types.undefined: IDLBuiltinType( + BuiltinLocation(""), "Undefined", IDLBuiltinType.Types.undefined + ), + IDLBuiltinType.Types.boolean: IDLBuiltinType( + BuiltinLocation(""), "Boolean", IDLBuiltinType.Types.boolean + ), + IDLBuiltinType.Types.float: IDLBuiltinType( + BuiltinLocation(""), "Float", IDLBuiltinType.Types.float + ), + IDLBuiltinType.Types.unrestricted_float: IDLBuiltinType( + BuiltinLocation(""), + "UnrestrictedFloat", + IDLBuiltinType.Types.unrestricted_float, + ), + IDLBuiltinType.Types.double: IDLBuiltinType( + BuiltinLocation(""), "Double", IDLBuiltinType.Types.double + ), + IDLBuiltinType.Types.unrestricted_double: IDLBuiltinType( + BuiltinLocation(""), + "UnrestrictedDouble", + IDLBuiltinType.Types.unrestricted_double, + ), + IDLBuiltinType.Types.any: IDLBuiltinType( + BuiltinLocation(""), "Any", IDLBuiltinType.Types.any + ), + IDLBuiltinType.Types.domstring: IDLBuiltinType( + BuiltinLocation(""), "String", IDLBuiltinType.Types.domstring + ), + IDLBuiltinType.Types.bytestring: IDLBuiltinType( + BuiltinLocation(""), "ByteString", IDLBuiltinType.Types.bytestring + ), + IDLBuiltinType.Types.usvstring: IDLBuiltinType( + BuiltinLocation(""), "USVString", IDLBuiltinType.Types.usvstring + ), + IDLBuiltinType.Types.utf8string: IDLBuiltinType( + BuiltinLocation(""), "UTF8String", IDLBuiltinType.Types.utf8string + ), + IDLBuiltinType.Types.jsstring: IDLBuiltinType( + BuiltinLocation(""), "JSString", IDLBuiltinType.Types.jsstring + ), + IDLBuiltinType.Types.object: IDLBuiltinType( + BuiltinLocation(""), "Object", IDLBuiltinType.Types.object + ), + IDLBuiltinType.Types.ArrayBuffer: IDLBuiltinType( + BuiltinLocation(""), + "ArrayBuffer", + IDLBuiltinType.Types.ArrayBuffer, + ), + IDLBuiltinType.Types.ArrayBufferView: IDLBuiltinType( + BuiltinLocation(""), + "ArrayBufferView", + IDLBuiltinType.Types.ArrayBufferView, + ), + IDLBuiltinType.Types.Int8Array: IDLBuiltinType( + BuiltinLocation(""), "Int8Array", IDLBuiltinType.Types.Int8Array + ), + IDLBuiltinType.Types.Uint8Array: IDLBuiltinType( + BuiltinLocation(""), "Uint8Array", IDLBuiltinType.Types.Uint8Array + ), + IDLBuiltinType.Types.Uint8ClampedArray: IDLBuiltinType( + BuiltinLocation(""), + "Uint8ClampedArray", + IDLBuiltinType.Types.Uint8ClampedArray, + ), + IDLBuiltinType.Types.Int16Array: IDLBuiltinType( + BuiltinLocation(""), "Int16Array", IDLBuiltinType.Types.Int16Array + ), + IDLBuiltinType.Types.Uint16Array: IDLBuiltinType( + BuiltinLocation(""), + "Uint16Array", + IDLBuiltinType.Types.Uint16Array, + ), + IDLBuiltinType.Types.Int32Array: IDLBuiltinType( + BuiltinLocation(""), "Int32Array", IDLBuiltinType.Types.Int32Array + ), + IDLBuiltinType.Types.Uint32Array: IDLBuiltinType( + BuiltinLocation(""), + "Uint32Array", + IDLBuiltinType.Types.Uint32Array, + ), + IDLBuiltinType.Types.Float32Array: IDLBuiltinType( + BuiltinLocation(""), + "Float32Array", + IDLBuiltinType.Types.Float32Array, + ), + IDLBuiltinType.Types.Float64Array: IDLBuiltinType( + BuiltinLocation(""), + "Float64Array", + IDLBuiltinType.Types.Float64Array, + ), + IDLBuiltinType.Types.ReadableStream: IDLBuiltinType( + BuiltinLocation(""), + "ReadableStream", + IDLBuiltinType.Types.ReadableStream, + ), } integerTypeSizes = { IDLBuiltinType.Types.byte: (-128, 127), - IDLBuiltinType.Types.octet: (0, 255), + IDLBuiltinType.Types.octet: (0, 255), IDLBuiltinType.Types.short: (-32768, 32767), IDLBuiltinType.Types.unsigned_short: (0, 65535), IDLBuiltinType.Types.long: (-2147483648, 2147483647), IDLBuiltinType.Types.unsigned_long: (0, 4294967295), IDLBuiltinType.Types.long_long: (-9223372036854775808, 9223372036854775807), - IDLBuiltinType.Types.unsigned_long_long: (0, 18446744073709551615) + IDLBuiltinType.Types.unsigned_long_long: (0, 18446744073709551615), } def matchIntegerValueToType(value): - for type, extremes in list(integerTypeSizes.items()): + for type, extremes in integerTypeSizes.items(): (min, max) = extremes if value <= max and value >= min: return BuiltinTypes[type] return None + class NoCoercionFoundError(WebIDLError): """ A class we use to indicate generic coercion failures because none of the types worked out in IDLValue.coerceToType. """ + class IDLValue(IDLObject): def __init__(self, location, type, value): IDLObject.__init__(self, location) @@ -3693,8 +4225,9 @@ class IDLValue(IDLObject): # non-WebIDLErrors here, because those can just happen if # "type" is not something that can have a default value at # all. - if (isinstance(e, WebIDLError) and - not isinstance(e, NoCoercionFoundError)): + if isinstance(e, WebIDLError) and not isinstance( + e, NoCoercionFoundError + ): raise e # If the type allows null, rerun this matching on the inner type, except @@ -3713,29 +4246,41 @@ class IDLValue(IDLObject): # Promote return IDLValue(self.location, type, self.value) else: - raise WebIDLError("Value %s is out of range for type %s." % - (self.value, type), [location]) + raise WebIDLError( + "Value %s is out of range for type %s." % (self.value, type), + [location], + ) elif self.type.isInteger() and type.isFloat(): # Convert an integer literal into float - if -2**24 <= self.value <= 2**24: + if -(2 ** 24) <= self.value <= 2 ** 24: return IDLValue(self.location, type, float(self.value)) else: - raise WebIDLError("Converting value %s to %s will lose precision." % - (self.value, type), [location]) + raise WebIDLError( + "Converting value %s to %s will lose precision." + % (self.value, type), + [location], + ) elif self.type.isString() and type.isEnum(): # Just keep our string, but make sure it's a valid value for this enum enum = type.unroll().inner - if self.value not in list(enum.values()): - raise WebIDLError("'%s' is not a valid default value for enum %s" - % (self.value, enum.identifier.name), - [location, enum.location]) + if self.value not in enum.values(): + raise WebIDLError( + "'%s' is not a valid default value for enum %s" + % (self.value, enum.identifier.name), + [location, enum.location], + ) return self elif self.type.isFloat() and type.isFloat(): - if (not type.isUnrestricted() and - (self.value == float("inf") or self.value == float("-inf") or - math.isnan(self.value))): - raise WebIDLError("Trying to convert unrestricted value %s to non-unrestricted" - % self.value, [location]) + if not type.isUnrestricted() and ( + self.value == float("inf") + or self.value == float("-inf") + or math.isnan(self.value) + ): + raise WebIDLError( + "Trying to convert unrestricted value %s to non-unrestricted" + % self.value, + [location], + ) return IDLValue(self.location, type, self.value) elif self.type.isString() and type.isUSVString(): # Allow USVStrings to use default value just like @@ -3744,27 +4289,35 @@ class IDLValue(IDLObject): # extra normalization step. assert self.type.isDOMString() return self - elif self.type.isDOMString() and type.treatNullAsEmpty: - # TreatNullAsEmpty is a different type for resolution reasons, - # however once you have a value it doesn't matter - return self - elif self.type.isString() and (type.isByteString() or type.isJSString() or type.isUTF8String()): + elif self.type.isString() and ( + type.isByteString() or type.isJSString() or type.isUTF8String() + ): # Allow ByteStrings, UTF8String, and JSStrings to use a default # value like DOMString. # No coercion is required as Codegen.py will handle the # extra steps. We want to make sure that our string contains # only valid characters, so we check that here. - valid_ascii_lit = " " + string.ascii_letters + string.digits + string.punctuation + valid_ascii_lit = ( + " " + string.ascii_letters + string.digits + string.punctuation + ) for idx, c in enumerate(self.value): if c not in valid_ascii_lit: - raise WebIDLError("Coercing this string literal %s to a ByteString is not supported yet. " - "Coercion failed due to an unsupported byte %d at index %d." - % (self.value.__repr__(), ord(c), idx), [location]) + raise WebIDLError( + "Coercing this string literal %s to a ByteString is not supported yet. " + "Coercion failed due to an unsupported byte %d at index %d." + % (self.value.__repr__(), ord(c), idx), + [location], + ) return IDLValue(self.location, type, self.value) + elif self.type.isDOMString() and type.legacyNullToEmptyString: + # LegacyNullToEmptyString is a different type for resolution reasons, + # however once you have a value it doesn't matter + return self - raise NoCoercionFoundError("Cannot coerce type %s to type %s." % - (self.type, type), [location]) + raise NoCoercionFoundError( + "Cannot coerce type %s to type %s." % (self.type, type), [location] + ) def _getDependentObjects(self): return set() @@ -3777,11 +4330,12 @@ class IDLNullValue(IDLObject): self.value = None def coerceToType(self, type, location): - if (not isinstance(type, IDLNullableType) and - not (type.isUnion() and type.hasNullableType) and - not type.isAny()): - raise WebIDLError("Cannot coerce null value to type %s." % type, - [location]) + if ( + not isinstance(type, IDLNullableType) + and not (type.isUnion() and type.hasNullableType) + and not type.isAny() + ): + raise WebIDLError("Cannot coerce null value to type %s." % type, [location]) nullValue = IDLNullValue(self.location) if type.isUnion() and not type.nullable() and type.hasDictionaryType(): @@ -3816,8 +4370,9 @@ class IDLEmptySequenceValue(IDLObject): pass if not type.isSequence(): - raise WebIDLError("Cannot coerce empty sequence value to type %s." % type, - [location]) + raise WebIDLError( + "Cannot coerce empty sequence value to type %s." % type, [location] + ) emptySequenceValue = IDLEmptySequenceValue(self.location) emptySequenceValue.type = type @@ -3845,8 +4400,9 @@ class IDLDefaultDictionaryValue(IDLObject): pass if not type.isDictionary(): - raise WebIDLError("Cannot coerce default dictionary value to type %s." % type, - [location]) + raise WebIDLError( + "Cannot coerce default dictionary value to type %s." % type, [location] + ) defaultDictionaryValue = IDLDefaultDictionaryValue(self.location) defaultDictionaryValue.type = type @@ -3864,8 +4420,9 @@ class IDLUndefinedValue(IDLObject): def coerceToType(self, type, location): if not type.isAny(): - raise WebIDLError("Cannot coerce undefined value to type %s." % type, - [location]) + raise WebIDLError( + "Cannot coerce undefined value to type %s." % type, [location] + ) undefinedValue = IDLUndefinedValue(self.location) undefinedValue.type = type @@ -3878,17 +4435,10 @@ class IDLUndefinedValue(IDLObject): class IDLInterfaceMember(IDLObjectWithIdentifier, IDLExposureMixins): Tags = enum( - 'Const', - 'Attr', - 'Method', - 'MaplikeOrSetlike', - 'Iterable' + "Const", "Attr", "Method", "MaplikeOrSetlike", "AsyncIterable", "Iterable" ) - Special = enum( - 'Static', - 'Stringifier' - ) + Special = enum("Static", "Stringifier") AffectsValues = ("Nothing", "Everything") DependsOnValues = ("Nothing", "DOMState", "DeviceState", "Everything") @@ -3912,8 +4462,11 @@ class IDLInterfaceMember(IDLObjectWithIdentifier, IDLExposureMixins): return self.tag == IDLInterfaceMember.Tags.Const def isMaplikeOrSetlikeOrIterable(self): - return (self.tag == IDLInterfaceMember.Tags.MaplikeOrSetlike or - self.tag == IDLInterfaceMember.Tags.Iterable) + return ( + self.tag == IDLInterfaceMember.Tags.MaplikeOrSetlike + or self.tag == IDLInterfaceMember.Tags.AsyncIterable + or self.tag == IDLInterfaceMember.Tags.Iterable + ) def isMaplikeOrSetlike(self): return self.tag == IDLInterfaceMember.Tags.MaplikeOrSetlike @@ -3922,7 +4475,9 @@ class IDLInterfaceMember(IDLObjectWithIdentifier, IDLExposureMixins): for attr in attrs: self.handleExtendedAttribute(attr) attrlist = attr.listValue() - self._extendedAttrDict[attr.identifier()] = attrlist if len(attrlist) else True + self._extendedAttrDict[attr.identifier()] = ( + attrlist if len(attrlist) else True + ) def handleExtendedAttribute(self, attr): pass @@ -3936,66 +4491,84 @@ class IDLInterfaceMember(IDLObjectWithIdentifier, IDLExposureMixins): def validate(self): if self.isAttr() or self.isMethod(): if self.affects == "Everything" and self.dependsOn != "Everything": - raise WebIDLError("Interface member is flagged as affecting " - "everything but not depending on everything. " - "That seems rather unlikely.", - [self.location]) + raise WebIDLError( + "Interface member is flagged as affecting " + "everything but not depending on everything. " + "That seems rather unlikely.", + [self.location], + ) if self.getExtendedAttribute("NewObject"): if self.dependsOn == "Nothing" or self.dependsOn == "DOMState": - raise WebIDLError("A [NewObject] method is not idempotent, " - "so it has to depend on something other than DOM state.", - [self.location]) - if (self.getExtendedAttribute("Cached") or - self.getExtendedAttribute("StoreInSlot")): - raise WebIDLError("A [NewObject] attribute shouldnt be " - "[Cached] or [StoreInSlot], since the point " - "of those is to keep returning the same " - "thing across multiple calls, which is not " - "what [NewObject] does.", - [self.location]) + raise WebIDLError( + "A [NewObject] method is not idempotent, " + "so it has to depend on something other than DOM state.", + [self.location], + ) + if self.getExtendedAttribute("Cached") or self.getExtendedAttribute( + "StoreInSlot" + ): + raise WebIDLError( + "A [NewObject] attribute shouldnt be " + "[Cached] or [StoreInSlot], since the point " + "of those is to keep returning the same " + "thing across multiple calls, which is not " + "what [NewObject] does.", + [self.location], + ) def _setDependsOn(self, dependsOn): if self.dependsOn != "Everything": - raise WebIDLError("Trying to specify multiple different DependsOn, " - "Pure, or Constant extended attributes for " - "attribute", [self.location]) + raise WebIDLError( + "Trying to specify multiple different DependsOn, " + "Pure, or Constant extended attributes for " + "attribute", + [self.location], + ) if dependsOn not in IDLInterfaceMember.DependsOnValues: - raise WebIDLError("Invalid [DependsOn=%s] on attribute" % dependsOn, - [self.location]) + raise WebIDLError( + "Invalid [DependsOn=%s] on attribute" % dependsOn, [self.location] + ) self.dependsOn = dependsOn def _setAffects(self, affects): if self.affects != "Everything": - raise WebIDLError("Trying to specify multiple different Affects, " - "Pure, or Constant extended attributes for " - "attribute", [self.location]) + raise WebIDLError( + "Trying to specify multiple different Affects, " + "Pure, or Constant extended attributes for " + "attribute", + [self.location], + ) if affects not in IDLInterfaceMember.AffectsValues: - raise WebIDLError("Invalid [Affects=%s] on attribute" % dependsOn, - [self.location]) + raise WebIDLError( + "Invalid [Affects=%s] on attribute" % dependsOn, [self.location] + ) self.affects = affects def _addAlias(self, alias): if alias in self.aliases: - raise WebIDLError("Duplicate [Alias=%s] on attribute" % alias, - [self.location]) + raise WebIDLError( + "Duplicate [Alias=%s] on attribute" % alias, [self.location] + ) self.aliases.append(alias) def _addBindingAlias(self, bindingAlias): if bindingAlias in self.bindingAliases: - raise WebIDLError("Duplicate [BindingAlias=%s] on attribute" % bindingAlias, - [self.location]) + raise WebIDLError( + "Duplicate [BindingAlias=%s] on attribute" % bindingAlias, + [self.location], + ) self.bindingAliases.append(bindingAlias) -class IDLMaplikeOrSetlikeOrIterableBase(IDLInterfaceMember): +class IDLMaplikeOrSetlikeOrIterableBase(IDLInterfaceMember): def __init__(self, location, identifier, ifaceType, keyType, valueType, ifaceKind): IDLInterfaceMember.__init__(self, location, identifier, ifaceKind) if keyType is not None: assert isinstance(keyType, IDLType) else: assert valueType is not None - assert ifaceType in ['maplike', 'setlike', 'iterable'] + assert ifaceType in ["maplike", "setlike", "iterable", "asynciterable"] if valueType is not None: assert isinstance(valueType, IDLType) self.keyType = keyType @@ -4013,6 +4586,9 @@ class IDLMaplikeOrSetlikeOrIterableBase(IDLInterfaceMember): def isIterable(self): return self.maplikeOrSetlikeOrIterableType == "iterable" + def isAsyncIterable(self): + return self.maplikeOrSetlikeOrIterableType == "asynciterable" + def hasKeyType(self): return self.keyType is not None @@ -4022,28 +4598,42 @@ class IDLMaplikeOrSetlikeOrIterableBase(IDLInterfaceMember): def checkCollisions(self, members, isAncestor): for member in members: # Check that there are no disallowed members - if (member.identifier.name in self.disallowedMemberNames and - not ((member.isMethod() and member.isMaplikeOrSetlikeOrIterableMethod()) or - (member.isAttr() and member.isMaplikeOrSetlikeAttr()))): - raise WebIDLError("Member '%s' conflicts " - "with reserved %s name." % - (member.identifier.name, - self.maplikeOrSetlikeOrIterableType), - [self.location, member.location]) + if member.identifier.name in self.disallowedMemberNames and not ( + (member.isMethod() and member.isMaplikeOrSetlikeOrIterableMethod()) + or (member.isAttr() and member.isMaplikeOrSetlikeAttr()) + ): + raise WebIDLError( + "Member '%s' conflicts " + "with reserved %s name." + % (member.identifier.name, self.maplikeOrSetlikeOrIterableType), + [self.location, member.location], + ) # Check that there are no disallowed non-method members. # Ancestor members are always disallowed here; own members # are disallowed only if they're non-methods. - if ((isAncestor or member.isAttr() or member.isConst()) and - member.identifier.name in self.disallowedNonMethodNames): - raise WebIDLError("Member '%s' conflicts " - "with reserved %s method." % - (member.identifier.name, - self.maplikeOrSetlikeOrIterableType), - [self.location, member.location]) + if ( + isAncestor or member.isAttr() or member.isConst() + ) and member.identifier.name in self.disallowedNonMethodNames: + raise WebIDLError( + "Member '%s' conflicts " + "with reserved %s method." + % (member.identifier.name, self.maplikeOrSetlikeOrIterableType), + [self.location, member.location], + ) - def addMethod(self, name, members, allowExistingOperations, returnType, args=[], - chromeOnly=False, isPure=False, affectsNothing=False, newObject=False, - isIteratorAlias=False): + def addMethod( + self, + name, + members, + allowExistingOperations, + returnType, + args=[], + chromeOnly=False, + isPure=False, + affectsNothing=False, + newObject=False, + isIteratorAlias=False, + ): """ Create an IDLMethod based on the parameters passed in. @@ -4082,35 +4672,47 @@ class IDLMaplikeOrSetlikeOrIterableBase(IDLInterfaceMember): for m in members: if m.identifier.name == name and m.isMethod() and not m.isStatic(): return - method = IDLMethod(self.location, - IDLUnresolvedIdentifier(self.location, name, allowDoubleUnderscore=chromeOnly), - returnType, args, maplikeOrSetlikeOrIterable=self) + method = IDLMethod( + self.location, + IDLUnresolvedIdentifier( + self.location, name, allowDoubleUnderscore=chromeOnly + ), + returnType, + args, + maplikeOrSetlikeOrIterable=self, + ) # We need to be able to throw from declaration methods - method.addExtendedAttributes( - [IDLExtendedAttribute(self.location, ("Throws",))]) + method.addExtendedAttributes([IDLExtendedAttribute(self.location, ("Throws",))]) if chromeOnly: method.addExtendedAttributes( - [IDLExtendedAttribute(self.location, ("ChromeOnly",))]) + [IDLExtendedAttribute(self.location, ("ChromeOnly",))] + ) if isPure: method.addExtendedAttributes( - [IDLExtendedAttribute(self.location, ("Pure",))]) + [IDLExtendedAttribute(self.location, ("Pure",))] + ) # Following attributes are used for keys/values/entries. Can't mark # them pure, since they return a new object each time they are run. if affectsNothing: method.addExtendedAttributes( - [IDLExtendedAttribute(self.location, ("DependsOn", "Everything")), - IDLExtendedAttribute(self.location, ("Affects", "Nothing"))]) + [ + IDLExtendedAttribute(self.location, ("DependsOn", "Everything")), + IDLExtendedAttribute(self.location, ("Affects", "Nothing")), + ] + ) if newObject: method.addExtendedAttributes( - [IDLExtendedAttribute(self.location, ("NewObject",))]) + [IDLExtendedAttribute(self.location, ("NewObject",))] + ) if isIteratorAlias: - method.addExtendedAttributes( - [IDLExtendedAttribute(self.location, ("Alias", "@@iterator"))]) - # Methods generated for iterables should be enumerable, but the ones for - # maplike/setlike should not be. - if not self.isIterable(): - method.addExtendedAttributes( - [IDLExtendedAttribute(self.location, ("NonEnumerable",))]) + if not self.isAsyncIterable(): + method.addExtendedAttributes( + [IDLExtendedAttribute(self.location, ("Alias", "@@iterator"))] + ) + else: + method.addExtendedAttributes( + [IDLExtendedAttribute(self.location, ("Alias", "@@asyncIterator"))] + ) members.append(method) def resolve(self, parentScope): @@ -4151,30 +4753,47 @@ class IDLMaplikeOrSetlikeOrIterableBase(IDLInterfaceMember): return deps def getForEachArguments(self): - return [IDLArgument(self.location, - IDLUnresolvedIdentifier(BuiltinLocation(""), - "callback"), - BuiltinTypes[IDLBuiltinType.Types.object]), - IDLArgument(self.location, - IDLUnresolvedIdentifier(BuiltinLocation(""), - "thisArg"), - BuiltinTypes[IDLBuiltinType.Types.any], - optional=True)] + return [ + IDLArgument( + self.location, + IDLUnresolvedIdentifier( + BuiltinLocation(""), "callback" + ), + BuiltinTypes[IDLBuiltinType.Types.object], + ), + IDLArgument( + self.location, + IDLUnresolvedIdentifier( + BuiltinLocation(""), "thisArg" + ), + BuiltinTypes[IDLBuiltinType.Types.any], + optional=True, + ), + ] + # Iterable adds ES6 iterator style functions and traits # (keys/values/entries/@@iterator) to an interface. class IDLIterable(IDLMaplikeOrSetlikeOrIterableBase): - - def __init__(self, location, identifier, keyType, valueType=None, scope=None): - IDLMaplikeOrSetlikeOrIterableBase.__init__(self, location, identifier, - "iterable", keyType, valueType, - IDLInterfaceMember.Tags.Iterable) + def __init__(self, location, identifier, keyType, valueType, scope): + IDLMaplikeOrSetlikeOrIterableBase.__init__( + self, + location, + identifier, + "iterable", + keyType, + valueType, + IDLInterfaceMember.Tags.Iterable, + ) self.iteratorType = None def __str__(self): - return "declared iterable with key '%s' and value '%s'" % (self.keyType, self.valueType) + return "declared iterable with key '%s' and value '%s'" % ( + self.keyType, + self.valueType, + ) - def expand(self, members, isJSImplemented): + def expand(self, members): """ In order to take advantage of all of the method machinery in Codegen, we generate our functions as if they were part of the interface @@ -4186,20 +4805,42 @@ class IDLIterable(IDLMaplikeOrSetlikeOrIterableBase): return # object entries() - self.addMethod("entries", members, False, self.iteratorType, - affectsNothing=True, newObject=True, - isIteratorAlias=True) + self.addMethod( + "entries", + members, + False, + self.iteratorType, + affectsNothing=True, + newObject=True, + isIteratorAlias=True, + ) # object keys() - self.addMethod("keys", members, False, self.iteratorType, - affectsNothing=True, newObject=True) + self.addMethod( + "keys", + members, + False, + self.iteratorType, + affectsNothing=True, + newObject=True, + ) # object values() - self.addMethod("values", members, False, self.iteratorType, - affectsNothing=True, newObject=True) + self.addMethod( + "values", + members, + False, + self.iteratorType, + affectsNothing=True, + newObject=True, + ) # undefined forEach(callback(valueType, keyType), optional any thisArg) - self.addMethod("forEach", members, False, - BuiltinTypes[IDLBuiltinType.Types.undefined], - self.getForEachArguments()) + self.addMethod( + "forEach", + members, + False, + BuiltinTypes[IDLBuiltinType.Types.undefined], + self.getForEachArguments(), + ) def isValueIterator(self): return not self.isPairIterator() @@ -4207,98 +4848,220 @@ class IDLIterable(IDLMaplikeOrSetlikeOrIterableBase): def isPairIterator(self): return self.hasKeyType() + +class IDLAsyncIterable(IDLMaplikeOrSetlikeOrIterableBase): + def __init__(self, location, identifier, keyType, valueType, argList, scope): + for arg in argList: + if not arg.optional: + raise WebIDLError( + "The arguments of the asynchronously iterable declaration on " + "%s must all be optional arguments." % identifier, + [arg.location], + ) + + IDLMaplikeOrSetlikeOrIterableBase.__init__( + self, + location, + identifier, + "asynciterable", + keyType, + valueType, + IDLInterfaceMember.Tags.AsyncIterable, + ) + self.iteratorType = None + self.argList = argList + + def __str__(self): + return "declared async iterable with key '%s' and value '%s'" % ( + self.keyType, + self.valueType, + ) + + def expand(self, members): + """ + In order to take advantage of all of the method machinery in Codegen, + we generate our functions as if they were part of the interface + specification during parsing. + """ + # object values() + self.addMethod( + "values", + members, + False, + self.iteratorType, + self.argList, + affectsNothing=True, + newObject=True, + isIteratorAlias=(not self.isPairIterator()), + ) + + # We only need to add entries/keys here if we're a pair iterator. + if not self.isPairIterator(): + return + + # Methods can't share their IDLArguments, so we need to make copies here. + def copyArgList(argList): + return map(copy.copy, argList) + + # object entries() + self.addMethod( + "entries", + members, + False, + self.iteratorType, + copyArgList(self.argList), + affectsNothing=True, + newObject=True, + isIteratorAlias=True, + ) + # object keys() + self.addMethod( + "keys", + members, + False, + self.iteratorType, + copyArgList(self.argList), + affectsNothing=True, + newObject=True, + ) + + def isValueIterator(self): + return not self.isPairIterator() + + def isPairIterator(self): + return self.hasKeyType() + + # MaplikeOrSetlike adds ES6 map-or-set-like traits to an interface. class IDLMaplikeOrSetlike(IDLMaplikeOrSetlikeOrIterableBase): - - def __init__(self, location, identifier, maplikeOrSetlikeType, - readonly, keyType, valueType): - IDLMaplikeOrSetlikeOrIterableBase.__init__(self, location, identifier, maplikeOrSetlikeType, - keyType, valueType, IDLInterfaceMember.Tags.MaplikeOrSetlike) + def __init__( + self, location, identifier, maplikeOrSetlikeType, readonly, keyType, valueType + ): + IDLMaplikeOrSetlikeOrIterableBase.__init__( + self, + location, + identifier, + maplikeOrSetlikeType, + keyType, + valueType, + IDLInterfaceMember.Tags.MaplikeOrSetlike, + ) self.readonly = readonly self.slotIndices = None # When generating JSAPI access code, we need to know the backing object # type prefix to create the correct function. Generate here for reuse. if self.isMaplike(): - self.prefix = 'Map' + self.prefix = "Map" elif self.isSetlike(): - self.prefix = 'Set' + self.prefix = "Set" def __str__(self): - return "declared '%s' with key '%s'" % (self.maplikeOrSetlikeOrIterableType, self.keyType) + return "declared '%s' with key '%s'" % ( + self.maplikeOrSetlikeOrIterableType, + self.keyType, + ) - def expand(self, members, isJSImplemented): + def expand(self, members): """ In order to take advantage of all of the method machinery in Codegen, we generate our functions as if they were part of the interface specification during parsing. """ # Both maplike and setlike have a size attribute - sizeAttr = IDLAttribute(self.location, - IDLUnresolvedIdentifier(BuiltinLocation(""), "size"), - BuiltinTypes[IDLBuiltinType.Types.unsigned_long], - True, - maplikeOrSetlike=self) - # This should be non-enumerable. - sizeAttr.addExtendedAttributes( - [IDLExtendedAttribute(self.location, ("NonEnumerable",))]) - members.append(sizeAttr) + members.append( + IDLAttribute( + self.location, + IDLUnresolvedIdentifier( + BuiltinLocation(""), "size" + ), + BuiltinTypes[IDLBuiltinType.Types.unsigned_long], + True, + maplikeOrSetlike=self, + ) + ) self.reserved_ro_names = ["size"] self.disallowedMemberNames.append("size") # object entries() - self.addMethod("entries", members, False, BuiltinTypes[IDLBuiltinType.Types.object], - affectsNothing=True, isIteratorAlias=self.isMaplike()) + self.addMethod( + "entries", + members, + False, + BuiltinTypes[IDLBuiltinType.Types.object], + affectsNothing=True, + isIteratorAlias=self.isMaplike(), + ) # object keys() - self.addMethod("keys", members, False, BuiltinTypes[IDLBuiltinType.Types.object], - affectsNothing=True) + self.addMethod( + "keys", + members, + False, + BuiltinTypes[IDLBuiltinType.Types.object], + affectsNothing=True, + ) # object values() - self.addMethod("values", members, False, BuiltinTypes[IDLBuiltinType.Types.object], - affectsNothing=True, isIteratorAlias=self.isSetlike()) + self.addMethod( + "values", + members, + False, + BuiltinTypes[IDLBuiltinType.Types.object], + affectsNothing=True, + isIteratorAlias=self.isSetlike(), + ) # undefined forEach(callback(valueType, keyType), thisVal) - self.addMethod("forEach", members, False, BuiltinTypes[IDLBuiltinType.Types.undefined], - self.getForEachArguments()) + self.addMethod( + "forEach", + members, + False, + BuiltinTypes[IDLBuiltinType.Types.undefined], + self.getForEachArguments(), + ) def getKeyArg(): - return IDLArgument(self.location, - IDLUnresolvedIdentifier(self.location, "key"), - self.keyType) + return IDLArgument( + self.location, + IDLUnresolvedIdentifier(self.location, "key"), + self.keyType, + ) # boolean has(keyType key) - self.addMethod("has", members, False, BuiltinTypes[IDLBuiltinType.Types.boolean], - [getKeyArg()], isPure=True) + self.addMethod( + "has", + members, + False, + BuiltinTypes[IDLBuiltinType.Types.boolean], + [getKeyArg()], + isPure=True, + ) if not self.readonly: # undefined clear() - self.addMethod("clear", members, True, BuiltinTypes[IDLBuiltinType.Types.undefined], - []) + self.addMethod( + "clear", members, True, BuiltinTypes[IDLBuiltinType.Types.undefined], [] + ) # boolean delete(keyType key) - self.addMethod("delete", members, True, - BuiltinTypes[IDLBuiltinType.Types.boolean], [getKeyArg()]) - - # Always generate underscored functions (e.g. __add, __clear) for js - # implemented interfaces as convenience functions. - if isJSImplemented: - # undefined clear() - self.addMethod("clear", members, True, BuiltinTypes[IDLBuiltinType.Types.undefined], - [], chromeOnly=True) - # boolean delete(keyType key) - self.addMethod("delete", members, True, - BuiltinTypes[IDLBuiltinType.Types.boolean], [getKeyArg()], - chromeOnly=True) + self.addMethod( + "delete", + members, + True, + BuiltinTypes[IDLBuiltinType.Types.boolean], + [getKeyArg()], + ) if self.isSetlike(): if not self.readonly: # Add returns the set object it just added to. # object add(keyType key) - self.addMethod("add", members, True, - BuiltinTypes[IDLBuiltinType.Types.object], [getKeyArg()]) - if isJSImplemented: - self.addMethod("add", members, True, - BuiltinTypes[IDLBuiltinType.Types.object], [getKeyArg()], - chromeOnly=True) + self.addMethod( + "add", + members, + True, + BuiltinTypes[IDLBuiltinType.Types.object], + [getKeyArg()], + ) return # If we get this far, we're a maplike declaration. @@ -4311,39 +5074,52 @@ class IDLMaplikeOrSetlike(IDLMaplikeOrSetlikeOrIterableBase): # # TODO: Bug 1155340 may change this to use specific type to provide # more info to JIT. - self.addMethod("get", members, False, BuiltinTypes[IDLBuiltinType.Types.any], - [getKeyArg()], isPure=True) + self.addMethod( + "get", + members, + False, + BuiltinTypes[IDLBuiltinType.Types.any], + [getKeyArg()], + isPure=True, + ) def getValueArg(): - return IDLArgument(self.location, - IDLUnresolvedIdentifier(self.location, "value"), - self.valueType) + return IDLArgument( + self.location, + IDLUnresolvedIdentifier(self.location, "value"), + self.valueType, + ) if not self.readonly: - self.addMethod("set", members, True, BuiltinTypes[IDLBuiltinType.Types.object], - [getKeyArg(), getValueArg()]) - if isJSImplemented: - self.addMethod("set", members, True, BuiltinTypes[IDLBuiltinType.Types.object], - [getKeyArg(), getValueArg()], chromeOnly=True) + self.addMethod( + "set", + members, + True, + BuiltinTypes[IDLBuiltinType.Types.object], + [getKeyArg(), getValueArg()], + ) + class IDLConst(IDLInterfaceMember): def __init__(self, location, identifier, type, value): - IDLInterfaceMember.__init__(self, location, identifier, - IDLInterfaceMember.Tags.Const) + IDLInterfaceMember.__init__( + self, location, identifier, IDLInterfaceMember.Tags.Const + ) assert isinstance(type, IDLType) if type.isDictionary(): - raise WebIDLError("A constant cannot be of a dictionary type", - [self.location]) + raise WebIDLError( + "A constant cannot be of a dictionary type", [self.location] + ) if type.isRecord(): - raise WebIDLError("A constant cannot be of a record type", - [self.location]) + raise WebIDLError("A constant cannot be of a record type", [self.location]) self.type = type self.value = value if identifier.name == "prototype": - raise WebIDLError("The identifier of a constant must not be 'prototype'", - [location]) + raise WebIDLError( + "The identifier of a constant must not be 'prototype'", [location] + ) def __str__(self): return "'%s' const '%s'" % (self.type, self.identifier) @@ -4375,17 +5151,21 @@ class IDLConst(IDLInterfaceMember): identifier = attr.identifier() if identifier == "Exposed": convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames) - elif (identifier == "Pref" or - identifier == "ChromeOnly" or - identifier == "Func" or - identifier == "SecureContext" or - identifier == "NonEnumerable" or - identifier == "NeedsWindowsUndef"): + elif ( + identifier == "Pref" + or identifier == "ChromeOnly" + or identifier == "Func" + or identifier == "Trial" + or identifier == "SecureContext" + or identifier == "NonEnumerable" + ): # Known attributes that we don't need to do anything with here pass else: - raise WebIDLError("Unknown extended attribute %s on constant" % identifier, - [attr.location]) + raise WebIDLError( + "Unknown extended attribute %s on constant" % identifier, + [attr.location], + ) IDLInterfaceMember.handleExtendedAttribute(self, attr) def _getDependentObjects(self): @@ -4393,35 +5173,53 @@ class IDLConst(IDLInterfaceMember): class IDLAttribute(IDLInterfaceMember): - def __init__(self, location, identifier, type, readonly, inherit=False, - static=False, stringifier=False, maplikeOrSetlike=None, - extendedAttrDict=None): - IDLInterfaceMember.__init__(self, location, identifier, - IDLInterfaceMember.Tags.Attr, - extendedAttrDict=extendedAttrDict) + def __init__( + self, + location, + identifier, + type, + readonly, + inherit=False, + static=False, + stringifier=False, + maplikeOrSetlike=None, + extendedAttrDict=None, + ): + IDLInterfaceMember.__init__( + self, + location, + identifier, + IDLInterfaceMember.Tags.Attr, + extendedAttrDict=extendedAttrDict, + ) assert isinstance(type, IDLType) self.type = type self.readonly = readonly self.inherit = inherit self._static = static - self.lenientThis = False - self._unforgeable = False + self.legacyLenientThis = False + self._legacyUnforgeable = False self.stringifier = stringifier self.slotIndices = None - assert maplikeOrSetlike is None or isinstance(maplikeOrSetlike, IDLMaplikeOrSetlike) + assert maplikeOrSetlike is None or isinstance( + maplikeOrSetlike, IDLMaplikeOrSetlike + ) self.maplikeOrSetlike = maplikeOrSetlike self.dependsOn = "Everything" self.affects = "Everything" self.bindingAliases = [] if static and identifier.name == "prototype": - raise WebIDLError("The identifier of a static attribute must not be 'prototype'", - [location]) + raise WebIDLError( + "The identifier of a static attribute must not be 'prototype'", + [location], + ) if readonly and inherit: - raise WebIDLError("An attribute cannot be both 'readonly' and 'inherit'", - [self.location]) + raise WebIDLError( + "An attribute cannot be both 'readonly' and 'inherit'", [self.location] + ) def isStatic(self): return self._static @@ -4443,69 +5241,111 @@ class IDLAttribute(IDLInterfaceMember): assert not isinstance(t.name, IDLUnresolvedIdentifier) self.type = t - if self.readonly and (self.type.hasClamp() or self.type.hasEnforceRange() or - self.type.hasAllowShared() or self.type.treatNullAsEmpty): - raise WebIDLError("A readonly attribute cannot be [Clamp] or [EnforceRange] or [AllowShared]", - [self.location]) + if self.readonly and ( + self.type.hasClamp() + or self.type.hasEnforceRange() + or self.type.hasAllowShared() + or self.type.legacyNullToEmptyString + ): + raise WebIDLError( + "A readonly attribute cannot be [Clamp] or [EnforceRange] or [AllowShared]", + [self.location], + ) if self.type.isDictionary() and not self.getExtendedAttribute("Cached"): - raise WebIDLError("An attribute cannot be of a dictionary type", - [self.location]) + raise WebIDLError( + "An attribute cannot be of a dictionary type", [self.location] + ) if self.type.isSequence() and not self.getExtendedAttribute("Cached"): - raise WebIDLError("A non-cached attribute cannot be of a sequence " - "type", [self.location]) + raise WebIDLError( + "A non-cached attribute cannot be of a sequence " "type", + [self.location], + ) if self.type.isRecord() and not self.getExtendedAttribute("Cached"): - raise WebIDLError("A non-cached attribute cannot be of a record " - "type", [self.location]) + raise WebIDLError( + "A non-cached attribute cannot be of a record " "type", [self.location] + ) if self.type.isUnion(): for f in self.type.unroll().flatMemberTypes: if f.isDictionary(): - raise WebIDLError("An attribute cannot be of a union " - "type if one of its member types (or " - "one of its member types's member " - "types, and so on) is a dictionary " - "type", [self.location, f.location]) + raise WebIDLError( + "An attribute cannot be of a union " + "type if one of its member types (or " + "one of its member types's member " + "types, and so on) is a dictionary " + "type", + [self.location, f.location], + ) if f.isSequence(): - raise WebIDLError("An attribute cannot be of a union " - "type if one of its member types (or " - "one of its member types's member " - "types, and so on) is a sequence " - "type", [self.location, f.location]) + raise WebIDLError( + "An attribute cannot be of a union " + "type if one of its member types (or " + "one of its member types's member " + "types, and so on) is a sequence " + "type", + [self.location, f.location], + ) if f.isRecord(): - raise WebIDLError("An attribute cannot be of a union " - "type if one of its member types (or " - "one of its member types's member " - "types, and so on) is a record " - "type", [self.location, f.location]) + raise WebIDLError( + "An attribute cannot be of a union " + "type if one of its member types (or " + "one of its member types's member " + "types, and so on) is a record " + "type", + [self.location, f.location], + ) if not self.type.isInterface() and self.getExtendedAttribute("PutForwards"): - raise WebIDLError("An attribute with [PutForwards] must have an " - "interface type as its type", [self.location]) + raise WebIDLError( + "An attribute with [PutForwards] must have an " + "interface type as its type", + [self.location], + ) - if (not self.type.isInterface() and - self.getExtendedAttribute("SameObject")): - raise WebIDLError("An attribute with [SameObject] must have an " - "interface type as its type", [self.location]) + if not self.type.isInterface() and self.getExtendedAttribute("SameObject"): + raise WebIDLError( + "An attribute with [SameObject] must have an " + "interface type as its type", + [self.location], + ) if self.type.isPromise() and not self.readonly: - raise WebIDLError("Promise-returning attributes must be readonly", - [self.location]) + raise WebIDLError( + "Promise-returning attributes must be readonly", [self.location] + ) + + if self.type.isObservableArray(): + if self.isStatic(): + raise WebIDLError( + "A static attribute cannot have an ObservableArray type", + [self.location], + ) + if self.getExtendedAttribute("Cached") or self.getExtendedAttribute( + "StoreInSlot" + ): + raise WebIDLError( + "[Cached] and [StoreInSlot] must not be used " + "on an attribute whose type is ObservableArray", + [self.location], + ) def validate(self): def typeContainsChromeOnlyDictionaryMember(type): - if (type.nullable() or - type.isSequence() or - type.isRecord()): + if type.nullable() or type.isSequence() or type.isRecord(): return typeContainsChromeOnlyDictionaryMember(type.inner) if type.isUnion(): for memberType in type.flatMemberTypes: - (contains, location) = typeContainsChromeOnlyDictionaryMember(memberType) + (contains, location) = typeContainsChromeOnlyDictionaryMember( + memberType + ) if contains: return (True, location) if type.isDictionary(): dictionary = type.inner while dictionary: - (contains, location) = dictionaryContainsChromeOnlyMember(dictionary) + (contains, location) = dictionaryContainsChromeOnlyMember( + dictionary + ) if contains: return (True, location) dictionary = dictionary.parent @@ -4516,254 +5356,345 @@ class IDLAttribute(IDLInterfaceMember): for member in dictionary.members: if member.getExtendedAttribute("ChromeOnly"): return (True, member.location) - (contains, location) = typeContainsChromeOnlyDictionaryMember(member.type) + (contains, location) = typeContainsChromeOnlyDictionaryMember( + member.type + ) if contains: return (True, location) return (False, None) IDLInterfaceMember.validate(self) - if (self.getExtendedAttribute("Cached") or - self.getExtendedAttribute("StoreInSlot")): + if self.getExtendedAttribute("Cached") or self.getExtendedAttribute( + "StoreInSlot" + ): if not self.affects == "Nothing": - raise WebIDLError("Cached attributes and attributes stored in " - "slots must be Constant or Pure or " - "Affects=Nothing, since the getter won't always " - "be called.", - [self.location]) + raise WebIDLError( + "Cached attributes and attributes stored in " + "slots must be Constant or Pure or " + "Affects=Nothing, since the getter won't always " + "be called.", + [self.location], + ) (contains, location) = typeContainsChromeOnlyDictionaryMember(self.type) if contains: - raise WebIDLError("[Cached] and [StoreInSlot] must not be used " - "on an attribute whose type contains a " - "[ChromeOnly] dictionary member", - [self.location, location]) + raise WebIDLError( + "[Cached] and [StoreInSlot] must not be used " + "on an attribute whose type contains a " + "[ChromeOnly] dictionary member", + [self.location, location], + ) if self.getExtendedAttribute("Frozen"): - if (not self.type.isSequence() and not self.type.isDictionary() and - not self.type.isRecord()): - raise WebIDLError("[Frozen] is only allowed on " - "sequence-valued, dictionary-valued, and " - "record-valued attributes", - [self.location]) + if ( + not self.type.isSequence() + and not self.type.isDictionary() + and not self.type.isRecord() + ): + raise WebIDLError( + "[Frozen] is only allowed on " + "sequence-valued, dictionary-valued, and " + "record-valued attributes", + [self.location], + ) if not self.type.unroll().isExposedInAllOf(self.exposureSet): - raise WebIDLError("Attribute returns a type that is not exposed " - "everywhere where the attribute is exposed", - [self.location]) + raise WebIDLError( + "Attribute returns a type that is not exposed " + "everywhere where the attribute is exposed", + [self.location], + ) if self.getExtendedAttribute("CEReactions"): if self.readonly: - raise WebIDLError("[CEReactions] is not allowed on " - "readonly attributes", - [self.location]) + raise WebIDLError( + "[CEReactions] is not allowed on " "readonly attributes", + [self.location], + ) def handleExtendedAttribute(self, attr): identifier = attr.identifier() - if ((identifier == "SetterThrows" or identifier == "SetterCanOOM" or - identifier == "SetterNeedsSubjectPrincipal") - and self.readonly): - raise WebIDLError("Readonly attributes must not be flagged as " - "[%s]" % identifier, - [self.location]) + if ( + identifier == "SetterThrows" + or identifier == "SetterCanOOM" + or identifier == "SetterNeedsSubjectPrincipal" + ) and self.readonly: + raise WebIDLError( + "Readonly attributes must not be flagged as " "[%s]" % identifier, + [self.location], + ) elif identifier == "BindingAlias": if not attr.hasValue(): - raise WebIDLError("[BindingAlias] takes an identifier or string", - [attr.location]) + raise WebIDLError( + "[BindingAlias] takes an identifier or string", [attr.location] + ) self._addBindingAlias(attr.value()) - elif (((identifier == "Throws" or identifier == "GetterThrows" or - identifier == "CanOOM" or identifier == "GetterCanOOM") and - self.getExtendedAttribute("StoreInSlot")) or - (identifier == "StoreInSlot" and - (self.getExtendedAttribute("Throws") or - self.getExtendedAttribute("GetterThrows") or - self.getExtendedAttribute("CanOOM") or - self.getExtendedAttribute("GetterCanOOM")))): - raise WebIDLError("Throwing things can't be [StoreInSlot]", - [attr.location]) - elif identifier == "LenientThis": + elif ( + ( + identifier == "Throws" + or identifier == "GetterThrows" + or identifier == "CanOOM" + or identifier == "GetterCanOOM" + ) + and self.getExtendedAttribute("StoreInSlot") + ) or ( + identifier == "StoreInSlot" + and ( + self.getExtendedAttribute("Throws") + or self.getExtendedAttribute("GetterThrows") + or self.getExtendedAttribute("CanOOM") + or self.getExtendedAttribute("GetterCanOOM") + ) + ): + raise WebIDLError("Throwing things can't be [StoreInSlot]", [attr.location]) + elif identifier == "LegacyLenientThis": if not attr.noArguments(): - raise WebIDLError("[LenientThis] must take no arguments", - [attr.location]) + raise WebIDLError( + "[LegacyLenientThis] must take no arguments", [attr.location] + ) if self.isStatic(): - raise WebIDLError("[LenientThis] is only allowed on non-static " - "attributes", [attr.location, self.location]) + raise WebIDLError( + "[LegacyLenientThis] is only allowed on non-static " "attributes", + [attr.location, self.location], + ) if self.getExtendedAttribute("CrossOriginReadable"): - raise WebIDLError("[LenientThis] is not allowed in combination " - "with [CrossOriginReadable]", - [attr.location, self.location]) + raise WebIDLError( + "[LegacyLenientThis] is not allowed in combination " + "with [CrossOriginReadable]", + [attr.location, self.location], + ) if self.getExtendedAttribute("CrossOriginWritable"): - raise WebIDLError("[LenientThis] is not allowed in combination " - "with [CrossOriginWritable]", - [attr.location, self.location]) - self.lenientThis = True - elif identifier == "Unforgeable": + raise WebIDLError( + "[LegacyLenientThis] is not allowed in combination " + "with [CrossOriginWritable]", + [attr.location, self.location], + ) + self.legacyLenientThis = True + elif identifier == "LegacyUnforgeable": if self.isStatic(): - raise WebIDLError("[Unforgeable] is only allowed on non-static " - "attributes", [attr.location, self.location]) - self._unforgeable = True + raise WebIDLError( + "[LegacyUnforgeable] is only allowed on non-static " "attributes", + [attr.location, self.location], + ) + self._legacyUnforgeable = True elif identifier == "SameObject" and not self.readonly: - raise WebIDLError("[SameObject] only allowed on readonly attributes", - [attr.location, self.location]) + raise WebIDLError( + "[SameObject] only allowed on readonly attributes", + [attr.location, self.location], + ) elif identifier == "Constant" and not self.readonly: - raise WebIDLError("[Constant] only allowed on readonly attributes", - [attr.location, self.location]) + raise WebIDLError( + "[Constant] only allowed on readonly attributes", + [attr.location, self.location], + ) elif identifier == "PutForwards": if not self.readonly: - raise WebIDLError("[PutForwards] is only allowed on readonly " - "attributes", [attr.location, self.location]) + raise WebIDLError( + "[PutForwards] is only allowed on readonly " "attributes", + [attr.location, self.location], + ) if self.type.isPromise(): - raise WebIDLError("[PutForwards] is not allowed on " - "Promise-typed attributes", - [attr.location, self.location]) + raise WebIDLError( + "[PutForwards] is not allowed on " "Promise-typed attributes", + [attr.location, self.location], + ) if self.isStatic(): - raise WebIDLError("[PutForwards] is only allowed on non-static " - "attributes", [attr.location, self.location]) + raise WebIDLError( + "[PutForwards] is only allowed on non-static " "attributes", + [attr.location, self.location], + ) if self.getExtendedAttribute("Replaceable") is not None: - raise WebIDLError("[PutForwards] and [Replaceable] can't both " - "appear on the same attribute", - [attr.location, self.location]) + raise WebIDLError( + "[PutForwards] and [Replaceable] can't both " + "appear on the same attribute", + [attr.location, self.location], + ) if not attr.hasValue(): - raise WebIDLError("[PutForwards] takes an identifier", - [attr.location, self.location]) + raise WebIDLError( + "[PutForwards] takes an identifier", [attr.location, self.location] + ) elif identifier == "Replaceable": if not attr.noArguments(): - raise WebIDLError("[Replaceable] must take no arguments", - [attr.location]) + raise WebIDLError( + "[Replaceable] must take no arguments", [attr.location] + ) if not self.readonly: - raise WebIDLError("[Replaceable] is only allowed on readonly " - "attributes", [attr.location, self.location]) + raise WebIDLError( + "[Replaceable] is only allowed on readonly " "attributes", + [attr.location, self.location], + ) if self.type.isPromise(): - raise WebIDLError("[Replaceable] is not allowed on " - "Promise-typed attributes", - [attr.location, self.location]) + raise WebIDLError( + "[Replaceable] is not allowed on " "Promise-typed attributes", + [attr.location, self.location], + ) if self.isStatic(): - raise WebIDLError("[Replaceable] is only allowed on non-static " - "attributes", [attr.location, self.location]) + raise WebIDLError( + "[Replaceable] is only allowed on non-static " "attributes", + [attr.location, self.location], + ) if self.getExtendedAttribute("PutForwards") is not None: - raise WebIDLError("[PutForwards] and [Replaceable] can't both " - "appear on the same attribute", - [attr.location, self.location]) - elif identifier == "LenientSetter": + raise WebIDLError( + "[PutForwards] and [Replaceable] can't both " + "appear on the same attribute", + [attr.location, self.location], + ) + elif identifier == "LegacyLenientSetter": if not attr.noArguments(): - raise WebIDLError("[LenientSetter] must take no arguments", - [attr.location]) + raise WebIDLError( + "[LegacyLenientSetter] must take no arguments", [attr.location] + ) if not self.readonly: - raise WebIDLError("[LenientSetter] is only allowed on readonly " - "attributes", [attr.location, self.location]) + raise WebIDLError( + "[LegacyLenientSetter] is only allowed on readonly " "attributes", + [attr.location, self.location], + ) if self.type.isPromise(): - raise WebIDLError("[LenientSetter] is not allowed on " - "Promise-typed attributes", - [attr.location, self.location]) + raise WebIDLError( + "[LegacyLenientSetter] is not allowed on " + "Promise-typed attributes", + [attr.location, self.location], + ) if self.isStatic(): - raise WebIDLError("[LenientSetter] is only allowed on non-static " - "attributes", [attr.location, self.location]) + raise WebIDLError( + "[LegacyLenientSetter] is only allowed on non-static " "attributes", + [attr.location, self.location], + ) if self.getExtendedAttribute("PutForwards") is not None: - raise WebIDLError("[LenientSetter] and [PutForwards] can't both " - "appear on the same attribute", - [attr.location, self.location]) + raise WebIDLError( + "[LegacyLenientSetter] and [PutForwards] can't both " + "appear on the same attribute", + [attr.location, self.location], + ) if self.getExtendedAttribute("Replaceable") is not None: - raise WebIDLError("[LenientSetter] and [Replaceable] can't both " - "appear on the same attribute", - [attr.location, self.location]) + raise WebIDLError( + "[LegacyLenientSetter] and [Replaceable] can't both " + "appear on the same attribute", + [attr.location, self.location], + ) elif identifier == "LenientFloat": if self.readonly: - raise WebIDLError("[LenientFloat] used on a readonly attribute", - [attr.location, self.location]) + raise WebIDLError( + "[LenientFloat] used on a readonly attribute", + [attr.location, self.location], + ) if not self.type.includesRestrictedFloat(): - raise WebIDLError("[LenientFloat] used on an attribute with a " - "non-restricted-float type", - [attr.location, self.location]) + raise WebIDLError( + "[LenientFloat] used on an attribute with a " + "non-restricted-float type", + [attr.location, self.location], + ) elif identifier == "StoreInSlot": if self.getExtendedAttribute("Cached"): - raise WebIDLError("[StoreInSlot] and [Cached] must not be " - "specified on the same attribute", - [attr.location, self.location]) + raise WebIDLError( + "[StoreInSlot] and [Cached] must not be " + "specified on the same attribute", + [attr.location, self.location], + ) elif identifier == "Cached": if self.getExtendedAttribute("StoreInSlot"): - raise WebIDLError("[Cached] and [StoreInSlot] must not be " - "specified on the same attribute", - [attr.location, self.location]) - elif (identifier == "CrossOriginReadable" or - identifier == "CrossOriginWritable"): + raise WebIDLError( + "[Cached] and [StoreInSlot] must not be " + "specified on the same attribute", + [attr.location, self.location], + ) + elif identifier == "CrossOriginReadable" or identifier == "CrossOriginWritable": if not attr.noArguments(): - raise WebIDLError("[%s] must take no arguments" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must take no arguments" % identifier, [attr.location] + ) if self.isStatic(): - raise WebIDLError("[%s] is only allowed on non-static " - "attributes" % identifier, - [attr.location, self.location]) - if self.getExtendedAttribute("LenientThis"): - raise WebIDLError("[LenientThis] is not allowed in combination " - "with [%s]" % identifier, - [attr.location, self.location]) + raise WebIDLError( + "[%s] is only allowed on non-static " "attributes" % identifier, + [attr.location, self.location], + ) + if self.getExtendedAttribute("LegacyLenientThis"): + raise WebIDLError( + "[LegacyLenientThis] is not allowed in combination " + "with [%s]" % identifier, + [attr.location, self.location], + ) elif identifier == "Exposed": convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames) elif identifier == "Pure": if not attr.noArguments(): - raise WebIDLError("[Pure] must take no arguments", - [attr.location]) + raise WebIDLError("[Pure] must take no arguments", [attr.location]) self._setDependsOn("DOMState") self._setAffects("Nothing") elif identifier == "Constant" or identifier == "SameObject": if not attr.noArguments(): - raise WebIDLError("[%s] must take no arguments" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must take no arguments" % identifier, [attr.location] + ) self._setDependsOn("Nothing") self._setAffects("Nothing") elif identifier == "Affects": if not attr.hasValue(): - raise WebIDLError("[Affects] takes an identifier", - [attr.location]) + raise WebIDLError("[Affects] takes an identifier", [attr.location]) self._setAffects(attr.value()) elif identifier == "DependsOn": if not attr.hasValue(): - raise WebIDLError("[DependsOn] takes an identifier", - [attr.location]) - if (attr.value() != "Everything" and attr.value() != "DOMState" and - not self.readonly): - raise WebIDLError("[DependsOn=%s] only allowed on " - "readonly attributes" % attr.value(), - [attr.location, self.location]) + raise WebIDLError("[DependsOn] takes an identifier", [attr.location]) + if ( + attr.value() != "Everything" + and attr.value() != "DOMState" + and not self.readonly + ): + raise WebIDLError( + "[DependsOn=%s] only allowed on " + "readonly attributes" % attr.value(), + [attr.location, self.location], + ) self._setDependsOn(attr.value()) elif identifier == "UseCounter": if self.stringifier: - raise WebIDLError("[UseCounter] must not be used on a " - "stringifier attribute", - [attr.location, self.location]) + raise WebIDLError( + "[UseCounter] must not be used on a " "stringifier attribute", + [attr.location, self.location], + ) elif identifier == "Unscopable": if not attr.noArguments(): - raise WebIDLError("[Unscopable] must take no arguments", - [attr.location]) + raise WebIDLError( + "[Unscopable] must take no arguments", [attr.location] + ) if self.isStatic(): - raise WebIDLError("[Unscopable] is only allowed on non-static " - "attributes and operations", - [attr.location, self.location]) + raise WebIDLError( + "[Unscopable] is only allowed on non-static " + "attributes and operations", + [attr.location, self.location], + ) elif identifier == "CEReactions": if not attr.noArguments(): - raise WebIDLError("[CEReactions] must take no arguments", - [attr.location]) - elif (identifier == "Pref" or - identifier == "Deprecated" or - identifier == "SetterThrows" or - identifier == "Throws" or - identifier == "GetterThrows" or - identifier == "SetterCanOOM" or - identifier == "CanOOM" or - identifier == "GetterCanOOM" or - identifier == "ChromeOnly" or - identifier == "Func" or - identifier == "SecureContext" or - identifier == "Frozen" or - identifier == "NewObject" or - identifier == "NeedsSubjectPrincipal" or - identifier == "SetterNeedsSubjectPrincipal" or - identifier == "GetterNeedsSubjectPrincipal" or - identifier == "NeedsCallerType" or - identifier == "ReturnValueNeedsContainsHack" or - identifier == "BinaryName" or - identifier == "NonEnumerable"): + raise WebIDLError( + "[CEReactions] must take no arguments", [attr.location] + ) + elif ( + identifier == "Pref" + or identifier == "Deprecated" + or identifier == "SetterThrows" + or identifier == "Throws" + or identifier == "GetterThrows" + or identifier == "SetterCanOOM" + or identifier == "CanOOM" + or identifier == "GetterCanOOM" + or identifier == "ChromeOnly" + or identifier == "Func" + or identifier == "Trial" + or identifier == "SecureContext" + or identifier == "Frozen" + or identifier == "NewObject" + or identifier == "NeedsSubjectPrincipal" + or identifier == "SetterNeedsSubjectPrincipal" + or identifier == "GetterNeedsSubjectPrincipal" + or identifier == "NeedsCallerType" + or identifier == "ReturnValueNeedsContainsHack" + or identifier == "BinaryName" + or identifier == "NonEnumerable" + ): # Known attributes that we don't need to do anything with here pass else: - raise WebIDLError("Unknown extended attribute %s on attribute" % identifier, - [attr.location]) + raise WebIDLError( + "Unknown extended attribute %s on attribute" % identifier, + [attr.location], + ) IDLInterfaceMember.handleExtendedAttribute(self, attr) def resolve(self, parentScope): @@ -4771,8 +5702,8 @@ class IDLAttribute(IDLInterfaceMember): self.type.resolveType(parentScope) IDLObjectWithIdentifier.resolve(self, parentScope) - def hasLenientThis(self): - return self.lenientThis + def hasLegacyLenientThis(self): + return self.legacyLenientThis def isMaplikeOrSetlikeAttr(self): """ @@ -4782,24 +5713,35 @@ class IDLAttribute(IDLInterfaceMember): """ return self.maplikeOrSetlike is not None - def isUnforgeable(self): - return self._unforgeable + def isLegacyUnforgeable(self): + return self._legacyUnforgeable def _getDependentObjects(self): return set([self.type]) def expand(self, members): assert self.stringifier - if not self.type.isDOMString() and not self.type.isUSVString(): - raise WebIDLError("The type of a stringifer attribute must be " - "either DOMString or USVString", - [self.location]) - identifier = IDLUnresolvedIdentifier(self.location, "__stringifier", - allowDoubleUnderscore=True) - method = IDLMethod(self.location, - identifier, - returnType=self.type, arguments=[], - stringifier=True, underlyingAttr=self) + if ( + not self.type.isDOMString() + and not self.type.isUSVString() + and not self.type.isUTF8String() + ): + raise WebIDLError( + "The type of a stringifer attribute must be " + "either DOMString, USVString or UTF8String", + [self.location], + ) + identifier = IDLUnresolvedIdentifier( + self.location, "__stringifier", allowDoubleUnderscore=True + ) + method = IDLMethod( + self.location, + identifier, + returnType=self.type, + arguments=[], + stringifier=True, + underlyingAttr=self, + ) allowedExtAttrs = ["Throws", "NeedsSubjectPrincipal", "Pure"] # Safe to ignore these as they are only meaningful for attributes attributeOnlyExtAttrs = [ @@ -4807,24 +5749,40 @@ class IDLAttribute(IDLInterfaceMember): "CrossOriginWritable", "SetterThrows", ] - for (key, value) in list(self._extendedAttrDict.items()): + for (key, value) in self._extendedAttrDict.items(): if key in allowedExtAttrs: if value is not True: - raise WebIDLError("[%s] with a value is currently " - "unsupported in stringifier attributes, " - "please file a bug to add support" % key, - [self.location]) - method.addExtendedAttributes([IDLExtendedAttribute(self.location, (key,))]) + raise WebIDLError( + "[%s] with a value is currently " + "unsupported in stringifier attributes, " + "please file a bug to add support" % key, + [self.location], + ) + method.addExtendedAttributes( + [IDLExtendedAttribute(self.location, (key,))] + ) elif not key in attributeOnlyExtAttrs: - raise WebIDLError("[%s] is currently unsupported in " - "stringifier attributes, please file a bug " - "to add support" % key, - [self.location]) + raise WebIDLError( + "[%s] is currently unsupported in " + "stringifier attributes, please file a bug " + "to add support" % key, + [self.location], + ) members.append(method) class IDLArgument(IDLObjectWithIdentifier): - def __init__(self, location, identifier, type, optional=False, defaultValue=None, variadic=False, dictionaryMember=False, allowTypeAttributes=False): + def __init__( + self, + location, + identifier, + type, + optional=False, + defaultValue=None, + variadic=False, + dictionaryMember=False, + allowTypeAttributes=False, + ): IDLObjectWithIdentifier.__init__(self, location, None, identifier) assert isinstance(type, IDLType) @@ -4845,24 +5803,37 @@ class IDLArgument(IDLObjectWithIdentifier): def addExtendedAttributes(self, attrs): for attribute in attrs: identifier = attribute.identifier() - if self.allowTypeAttributes and (identifier == "EnforceRange" or identifier == "Clamp" or - identifier == "TreatNullAs" or identifier == "AllowShared"): + if self.allowTypeAttributes and ( + identifier == "EnforceRange" + or identifier == "Clamp" + or identifier == "LegacyNullToEmptyString" + or identifier == "AllowShared" + ): self.type = self.type.withExtendedAttributes([attribute]) elif identifier == "TreatNonCallableAsNull": self._allowTreatNonCallableAsNull = True - elif (self.dictionaryMember and - (identifier == "ChromeOnly" or - identifier == "Func" or - identifier == "Pref")): + elif self.dictionaryMember and ( + identifier == "ChromeOnly" + or identifier == "Func" + or identifier == "Trial" + or identifier == "Pref" + ): if not self.optional: - raise WebIDLError("[%s] must not be used on a required " - "dictionary member" % identifier, - [attribute.location]) + raise WebIDLError( + "[%s] must not be used on a required " + "dictionary member" % identifier, + [attribute.location], + ) else: - raise WebIDLError("Unhandled extended attribute on %s" % - ("a dictionary member" if self.dictionaryMember else - "an argument"), - [attribute.location]) + raise WebIDLError( + "Unhandled extended attribute on %s" + % ( + "a dictionary member" + if self.dictionaryMember + else "an argument" + ), + [attribute.location], + ) attrlist = attribute.listValue() self._extendedAttrDict[identifier] = attrlist if len(attrlist) else True @@ -4885,22 +5856,37 @@ class IDLArgument(IDLObjectWithIdentifier): assert not isinstance(type.name, IDLUnresolvedIdentifier) self.type = type + if self.type.isUndefined(): + raise WebIDLError( + "undefined must not be used as the type of an argument in any circumstance", + [self.location], + ) + if self.type.isAny(): - assert (self.defaultValue is None or - isinstance(self.defaultValue, IDLNullValue)) + assert self.defaultValue is None or isinstance( + self.defaultValue, IDLNullValue + ) # optional 'any' values always have a default value if self.optional and not self.defaultValue and not self.variadic: # Set the default value to undefined, for simplicity, so the # codegen doesn't have to special-case this. self.defaultValue = IDLUndefinedValue(self.location) - if self.dictionaryMember and self.type.treatNullAsEmpty: - raise WebIDLError("Dictionary members cannot be [TreatNullAs]", [self.location]) + if self.dictionaryMember and self.type.legacyNullToEmptyString: + raise WebIDLError( + "Dictionary members cannot be [LegacyNullToEmptyString]", + [self.location], + ) + if self.type.isObservableArray(): + raise WebIDLError( + "%s cannot have an ObservableArray type" + % ("Dictionary members" if self.dictionaryMember else "Arguments"), + [self.location], + ) # Now do the coercing thing; this needs to happen after the # above creation of a default value. if self.defaultValue: - self.defaultValue = self.defaultValue.coerceToType(self.type, - self.location) + self.defaultValue = self.defaultValue.coerceToType(self.type, self.location) assert self.defaultValue def allowTreatNonCallableAsNull(self): @@ -4917,7 +5903,9 @@ class IDLArgument(IDLObjectWithIdentifier): class IDLCallback(IDLObjectWithScope): - def __init__(self, location, parentScope, identifier, returnType, arguments, isConstructor): + def __init__( + self, location, parentScope, identifier, returnType, arguments, isConstructor + ): assert isinstance(returnType, IDLType) self._returnType = returnType @@ -4965,29 +5953,42 @@ class IDLCallback(IDLObjectWithScope): argument.type = type def validate(self): - pass + for argument in self._arguments: + if argument.type.isUndefined(): + raise WebIDLError( + "undefined must not be used as the type of an argument in any circumstance", + [self.location], + ) def addExtendedAttributes(self, attrs): unhandledAttrs = [] for attr in attrs: if attr.identifier() == "TreatNonCallableAsNull": self._treatNonCallableAsNull = True - elif attr.identifier() == "TreatNonObjectAsNull": + elif attr.identifier() == "LegacyTreatNonObjectAsNull": if self._isConstructor: - raise WebIDLError("[TreatNonObjectAsNull] is not supported " - "on constructors", [self.location]) + raise WebIDLError( + "[LegacyTreatNonObjectAsNull] is not supported " + "on constructors", + [self.location], + ) self._treatNonObjectAsNull = True elif attr.identifier() == "MOZ_CAN_RUN_SCRIPT_BOUNDARY": if self._isConstructor: - raise WebIDLError("[MOZ_CAN_RUN_SCRIPT_BOUNDARY] is not " - "permitted on constructors", - [self.location]) + raise WebIDLError( + "[MOZ_CAN_RUN_SCRIPT_BOUNDARY] is not " + "permitted on constructors", + [self.location], + ) self._isRunScriptBoundary = True else: unhandledAttrs.append(attr) if self._treatNonCallableAsNull and self._treatNonObjectAsNull: - raise WebIDLError("Cannot specify both [TreatNonCallableAsNull] " - "and [TreatNonObjectAsNull]", [self.location]) + raise WebIDLError( + "Cannot specify both [TreatNonCallableAsNull] " + "and [LegacyTreatNonObjectAsNull]", + [self.location], + ) if len(unhandledAttrs) != 0: IDLType.addExtendedAttributes(self, unhandledAttrs) @@ -4995,7 +5996,7 @@ class IDLCallback(IDLObjectWithScope): return set([self._returnType] + self._arguments) def isRunScriptBoundary(self): - return self._isRunScriptBoundary; + return self._isRunScriptBoundary class IDLCallbackType(IDLType): @@ -5015,8 +6016,14 @@ class IDLCallbackType(IDLType): if other.isUnion(): # Just forward to the union; it'll deal return other.isDistinguishableFrom(self) - return (other.isPrimitive() or other.isString() or other.isEnum() or - other.isNonCallbackInterface() or other.isSequence()) + return ( + other.isUndefined() + or other.isPrimitive() + or other.isString() + or other.isEnum() + or other.isNonCallbackInterface() + or other.isSequence() + ) def _getDependentObjects(self): return self.callback._getDependentObjects() @@ -5031,6 +6038,7 @@ class IDLMethodOverload: IDLMethodOverload for each one, all hanging off an IDLMethod representing the full set of overloads. """ + def __init__(self, returnType, arguments, location): self.returnType = returnType # Clone the list of arguments, just in case @@ -5049,28 +6057,31 @@ class IDLMethodOverload: class IDLMethod(IDLInterfaceMember, IDLScope): Special = enum( - 'Getter', - 'Setter', - 'Deleter', - 'LegacyCaller', - base=IDLInterfaceMember.Special + "Getter", "Setter", "Deleter", "LegacyCaller", base=IDLInterfaceMember.Special ) - NamedOrIndexed = enum( - 'Neither', - 'Named', - 'Indexed' - ) + NamedOrIndexed = enum("Neither", "Named", "Indexed") - def __init__(self, location, identifier, returnType, arguments, - static=False, getter=False, setter=False, - deleter=False, specialType=NamedOrIndexed.Neither, - legacycaller=False, stringifier=False, - maplikeOrSetlikeOrIterable=None, - underlyingAttr=None): + def __init__( + self, + location, + identifier, + returnType, + arguments, + static=False, + getter=False, + setter=False, + deleter=False, + specialType=NamedOrIndexed.Neither, + legacycaller=False, + stringifier=False, + maplikeOrSetlikeOrIterable=None, + underlyingAttr=None, + ): # REVIEW: specialType is NamedOrIndexed -- wow, this is messed up. - IDLInterfaceMember.__init__(self, location, identifier, - IDLInterfaceMember.Tags.Method) + IDLInterfaceMember.__init__( + self, location, identifier, IDLInterfaceMember.Tags.Method + ) self._hasOverloads = False @@ -5091,19 +6102,23 @@ class IDLMethod(IDLInterfaceMember, IDLScope): self._legacycaller = legacycaller assert isinstance(stringifier, bool) self._stringifier = stringifier - assert maplikeOrSetlikeOrIterable is None or isinstance(maplikeOrSetlikeOrIterable, IDLMaplikeOrSetlikeOrIterableBase) + assert maplikeOrSetlikeOrIterable is None or isinstance( + maplikeOrSetlikeOrIterable, IDLMaplikeOrSetlikeOrIterableBase + ) self.maplikeOrSetlikeOrIterable = maplikeOrSetlikeOrIterable self._htmlConstructor = False self.underlyingAttr = underlyingAttr self._specialType = specialType - self._unforgeable = False + self._legacyUnforgeable = False self.dependsOn = "Everything" self.affects = "Everything" self.aliases = [] if static and identifier.name == "prototype": - raise WebIDLError("The identifier of a static operation must not be 'prototype'", - [location]) + raise WebIDLError( + "The identifier of a static operation must not be 'prototype'", + [location], + ) self.assertSignatureConstraints() @@ -5116,8 +6131,10 @@ class IDLMethod(IDLInterfaceMember, IDLScope): overload = self._overloads[0] arguments = overload.arguments assert len(arguments) == 1 - assert (arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.domstring] or - arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.unsigned_long]) + assert ( + arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.domstring] + or arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.unsigned_long] + ) assert not arguments[0].optional and not arguments[0].variadic assert not self._getter or not overload.returnType.isUndefined() @@ -5125,8 +6142,10 @@ class IDLMethod(IDLInterfaceMember, IDLScope): assert len(self._overloads) == 1 arguments = self._overloads[0].arguments assert len(arguments) == 2 - assert (arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.domstring] or - arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.unsigned_long]) + assert ( + arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.domstring] + or arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.unsigned_long] + ) assert not arguments[0].optional and not arguments[0].variadic assert not arguments[1].optional and not arguments[1].variadic @@ -5135,7 +6154,9 @@ class IDLMethod(IDLInterfaceMember, IDLScope): overload = self._overloads[0] assert len(overload.arguments) == 0 if not self.underlyingAttr: - assert overload.returnType == BuiltinTypes[IDLBuiltinType.Types.domstring] + assert ( + overload.returnType == BuiltinTypes[IDLBuiltinType.Types.domstring] + ) def isStatic(self): return self._static @@ -5153,13 +6174,17 @@ class IDLMethod(IDLInterfaceMember, IDLScope): return self._deleter def isNamed(self): - assert (self._specialType == IDLMethod.NamedOrIndexed.Named or - self._specialType == IDLMethod.NamedOrIndexed.Indexed) + assert ( + self._specialType == IDLMethod.NamedOrIndexed.Named + or self._specialType == IDLMethod.NamedOrIndexed.Indexed + ) return self._specialType == IDLMethod.NamedOrIndexed.Named def isIndexed(self): - assert (self._specialType == IDLMethod.NamedOrIndexed.Named or - self._specialType == IDLMethod.NamedOrIndexed.Indexed) + assert ( + self._specialType == IDLMethod.NamedOrIndexed.Named + or self._specialType == IDLMethod.NamedOrIndexed.Indexed + ) return self._specialType == IDLMethod.NamedOrIndexed.Indexed def isLegacycaller(self): @@ -5182,11 +6207,13 @@ class IDLMethod(IDLInterfaceMember, IDLScope): return self.maplikeOrSetlikeOrIterable is not None def isSpecial(self): - return (self.isGetter() or - self.isSetter() or - self.isDeleter() or - self.isLegacycaller() or - self.isStringifier()) + return ( + self.isGetter() + or self.isSetter() + or self.isDeleter() + or self.isLegacycaller() + or self.isStringifier() + ) def isHTMLConstructor(self): return self._htmlConstructor @@ -5202,8 +6229,10 @@ class IDLMethod(IDLInterfaceMember, IDLScope): implemented interfaces, so while these functions use what is considered an non-identifier name, they actually DO have an identifier. """ - return (self.identifier.name[:2] == "__" and - not self.isMaplikeOrSetlikeOrIterableMethod()) + return ( + self.identifier.name[:2] == "__" + and not self.isMaplikeOrSetlikeOrIterableMethod() + ) def resolve(self, parentScope): assert isinstance(parentScope, IDLScope) @@ -5217,36 +6246,52 @@ class IDLMethod(IDLInterfaceMember, IDLScope): assert len(method._overloads) == 1 if self._extendedAttrDict != method._extendedAttrDict: - extendedAttrDiff = set(self._extendedAttrDict.keys()) ^ set(method._extendedAttrDict.keys()) + extendedAttrDiff = set(self._extendedAttrDict.keys()) ^ set( + method._extendedAttrDict.keys() + ) - if extendedAttrDiff == { "LenientFloat" }: + if extendedAttrDiff == {"LenientFloat"}: if "LenientFloat" not in self._extendedAttrDict: for overload in self._overloads: if overload.includesRestrictedFloatArgument(): - raise WebIDLError("Restricted float behavior differs on different " - "overloads of %s" % method.identifier, - [overload.location, method.location]) - self._extendedAttrDict["LenientFloat"] = method._extendedAttrDict["LenientFloat"] - elif method._overloads[0].includesRestrictedFloatArgument(): - raise WebIDLError("Restricted float behavior differs on different " - "overloads of %s" % method.identifier, - [self.location, method.location]) - else: - raise WebIDLError("Extended attributes differ on different " + raise WebIDLError( + "Restricted float behavior differs on different " "overloads of %s" % method.identifier, - [self.location, method.location]) + [overload.location, method.location], + ) + self._extendedAttrDict["LenientFloat"] = method._extendedAttrDict[ + "LenientFloat" + ] + elif method._overloads[0].includesRestrictedFloatArgument(): + raise WebIDLError( + "Restricted float behavior differs on different " + "overloads of %s" % method.identifier, + [self.location, method.location], + ) + else: + raise WebIDLError( + "Extended attributes differ on different " + "overloads of %s" % method.identifier, + [self.location, method.location], + ) self._overloads.extend(method._overloads) self._hasOverloads = True if self.isStatic() != method.isStatic(): - raise WebIDLError("Overloaded identifier %s appears with different values of the 'static' attribute" % method.identifier, - [method.location]) + raise WebIDLError( + "Overloaded identifier %s appears with different values of the 'static' attribute" + % method.identifier, + [method.location], + ) if self.isLegacycaller() != method.isLegacycaller(): - raise WebIDLError("Overloaded identifier %s appears with different values of the 'legacycaller' attribute" % method.identifier, - [method.location]) + raise WebIDLError( + "Overloaded identifier %s appears with different values of the 'legacycaller' attribute" + % method.identifier, + [method.location], + ) # Can't overload special things! assert not self.isGetter() @@ -5263,8 +6308,9 @@ class IDLMethod(IDLInterfaceMember, IDLScope): return self def signatures(self): - return [(overload.returnType, overload.arguments) for overload in - self._overloads] + return [ + (overload.returnType, overload.arguments) for overload in self._overloads + ] def finish(self, scope): IDLInterfaceMember.finish(self, scope) @@ -5286,8 +6332,11 @@ class IDLMethod(IDLInterfaceMember, IDLScope): # Now compute various information that will be used by the # WebIDL overload resolution algorithm. self.maxArgCount = max(len(s[1]) for s in self.signatures()) - self.allowedArgCounts = [i for i in range(self.maxArgCount+1) - if len(self.signaturesForArgCount(i)) != 0] + self.allowedArgCounts = [ + i + for i in range(self.maxArgCount + 1) + if len(self.signaturesForArgCount(i)) != 0 + ] def validate(self): IDLInterfaceMember.validate(self) @@ -5306,19 +6355,26 @@ class IDLMethod(IDLInterfaceMember, IDLScope): raise WebIDLError( "Signatures for method '%s' with %d arguments have " "different types of arguments at index %d, which " - "is before distinguishing index %d" % - (self.identifier.name, argCount, idx, - distinguishingIndex), - [self.location, overload.location]) + "is before distinguishing index %d" + % ( + self.identifier.name, + argCount, + idx, + distinguishingIndex, + ), + [self.location, overload.location], + ) overloadWithPromiseReturnType = None overloadWithoutPromiseReturnType = None for overload in self._overloads: returnType = overload.returnType if not returnType.unroll().isExposedInAllOf(self.exposureSet): - raise WebIDLError("Overload returns a type that is not exposed " - "everywhere where the method is exposed", - [overload.location]) + raise WebIDLError( + "Overload returns a type that is not exposed " + "everywhere where the method is exposed", + [overload.location], + ) variadicArgument = None @@ -5326,46 +6382,62 @@ class IDLMethod(IDLInterfaceMember, IDLScope): for (idx, argument) in enumerate(arguments): assert argument.type.isComplete() - if ((argument.type.isDictionary() and - argument.type.unroll().inner.canBeEmpty()) or - (argument.type.isUnion() and - argument.type.unroll().hasPossiblyEmptyDictionaryType())): + if ( + argument.type.isDictionary() + and argument.type.unroll().inner.canBeEmpty() + ) or ( + argument.type.isUnion() + and argument.type.unroll().hasPossiblyEmptyDictionaryType() + ): # Optional dictionaries and unions containing optional # dictionaries at the end of the list or followed by # optional arguments must be optional. - if (not argument.optional and - all(arg.optional for arg in arguments[idx+1:])): - raise WebIDLError("Dictionary argument without any " - "required fields or union argument " - "containing such dictionary not " - "followed by a required argument " - "must be optional", - [argument.location]) + if not argument.optional and all( + arg.optional for arg in arguments[idx + 1 :] + ): + raise WebIDLError( + "Dictionary argument without any " + "required fields or union argument " + "containing such dictionary not " + "followed by a required argument " + "must be optional", + [argument.location], + ) - if (not argument.defaultValue and - all(arg.optional for arg in arguments[idx+1:])): - raise WebIDLError("Dictionary argument without any " - "required fields or union argument " - "containing such dictionary not " - "followed by a required argument " - "must have a default value", - [argument.location]) + if not argument.defaultValue and all( + arg.optional for arg in arguments[idx + 1 :] + ): + raise WebIDLError( + "Dictionary argument without any " + "required fields or union argument " + "containing such dictionary not " + "followed by a required argument " + "must have a default value", + [argument.location], + ) # An argument cannot be a nullable dictionary or a # nullable union containing a dictionary. - if (argument.type.nullable() and - (argument.type.isDictionary() or - (argument.type.isUnion() and - argument.type.unroll().hasDictionaryType()))): - raise WebIDLError("An argument cannot be a nullable " - "dictionary or nullable union " - "containing a dictionary", - [argument.location]) + if argument.type.nullable() and ( + argument.type.isDictionary() + or ( + argument.type.isUnion() + and argument.type.unroll().hasDictionaryType() + ) + ): + raise WebIDLError( + "An argument cannot be a nullable " + "dictionary or nullable union " + "containing a dictionary", + [argument.location], + ) # Only the last argument can be variadic if variadicArgument: - raise WebIDLError("Variadic argument is not last argument", - [variadicArgument.location]) + raise WebIDLError( + "Variadic argument is not last argument", + [variadicArgument.location], + ) if argument.variadic: variadicArgument = argument @@ -5376,47 +6448,64 @@ class IDLMethod(IDLInterfaceMember, IDLScope): # Make sure either all our overloads return Promises or none do if overloadWithPromiseReturnType and overloadWithoutPromiseReturnType: - raise WebIDLError("We have overloads with both Promise and " - "non-Promise return types", - [overloadWithPromiseReturnType.location, - overloadWithoutPromiseReturnType.location]) + raise WebIDLError( + "We have overloads with both Promise and " "non-Promise return types", + [ + overloadWithPromiseReturnType.location, + overloadWithoutPromiseReturnType.location, + ], + ) if overloadWithPromiseReturnType and self._legacycaller: - raise WebIDLError("May not have a Promise return type for a " - "legacycaller.", - [overloadWithPromiseReturnType.location]) + raise WebIDLError( + "May not have a Promise return type for a " "legacycaller.", + [overloadWithPromiseReturnType.location], + ) - if self.getExtendedAttribute("StaticClassOverride") and not \ - (self.identifier.scope.isJSImplemented() and self.isStatic()): - raise WebIDLError("StaticClassOverride can be applied to static" - " methods on JS-implemented classes only.", - [self.location]) + if self.getExtendedAttribute("StaticClassOverride") and not ( + self.identifier.scope.isJSImplemented() and self.isStatic() + ): + raise WebIDLError( + "StaticClassOverride can be applied to static" + " methods on JS-implemented classes only.", + [self.location], + ) # Ensure that toJSON methods satisfy the spec constraints on them. if self.identifier.name == "toJSON": if len(self.signatures()) != 1: - raise WebIDLError("toJSON method has multiple overloads", - [self._overloads[0].location, - self._overloads[1].location]) + raise WebIDLError( + "toJSON method has multiple overloads", + [self._overloads[0].location, self._overloads[1].location], + ) if len(self.signatures()[0][1]) != 0: - raise WebIDLError("toJSON method has arguments", - [self.location]) + raise WebIDLError("toJSON method has arguments", [self.location]) if not self.signatures()[0][0].isJSONType(): - raise WebIDLError("toJSON method has non-JSON return type", - [self.location]) + raise WebIDLError( + "toJSON method has non-JSON return type", [self.location] + ) def overloadsForArgCount(self, argc): - return [overload for overload in self._overloads if - len(overload.arguments) == argc or - (len(overload.arguments) > argc and - all(arg.optional for arg in overload.arguments[argc:])) or - (len(overload.arguments) < argc and - len(overload.arguments) > 0 and - overload.arguments[-1].variadic)] + return [ + overload + for overload in self._overloads + if len(overload.arguments) == argc + or ( + len(overload.arguments) > argc + and all(arg.optional for arg in overload.arguments[argc:]) + ) + or ( + len(overload.arguments) < argc + and len(overload.arguments) > 0 + and overload.arguments[-1].variadic + ) + ] def signaturesForArgCount(self, argc): - return [(overload.returnType, overload.arguments) for overload - in self.overloadsForArgCount(argc)] + return [ + (overload.returnType, overload.arguments) + for overload in self.overloadsForArgCount(argc) + ] def locationsForArgCount(self, argc): return [overload.location for overload in self.overloadsForArgCount(argc)] @@ -5424,163 +6513,199 @@ class IDLMethod(IDLInterfaceMember, IDLScope): def distinguishingIndexForArgCount(self, argc): def isValidDistinguishingIndex(idx, signatures): for (firstSigIndex, (firstRetval, firstArgs)) in enumerate(signatures[:-1]): - for (secondRetval, secondArgs) in signatures[firstSigIndex+1:]: + for (secondRetval, secondArgs) in signatures[firstSigIndex + 1 :]: if idx < len(firstArgs): firstType = firstArgs[idx].type else: - assert(firstArgs[-1].variadic) + assert firstArgs[-1].variadic firstType = firstArgs[-1].type if idx < len(secondArgs): secondType = secondArgs[idx].type else: - assert(secondArgs[-1].variadic) + assert secondArgs[-1].variadic secondType = secondArgs[-1].type if not firstType.isDistinguishableFrom(secondType): return False return True + signatures = self.signaturesForArgCount(argc) for idx in range(argc): if isValidDistinguishingIndex(idx, signatures): return idx # No valid distinguishing index. Time to throw locations = self.locationsForArgCount(argc) - raise WebIDLError("Signatures with %d arguments for method '%s' are not " - "distinguishable" % (argc, self.identifier.name), - locations) + raise WebIDLError( + "Signatures with %d arguments for method '%s' are not " + "distinguishable" % (argc, self.identifier.name), + locations, + ) def handleExtendedAttribute(self, attr): identifier = attr.identifier() - if (identifier == "GetterThrows" or - identifier == "SetterThrows" or - identifier == "GetterCanOOM" or - identifier == "SetterCanOOM" or - identifier == "SetterNeedsSubjectPrincipal" or - identifier == "GetterNeedsSubjectPrincipal"): - raise WebIDLError("Methods must not be flagged as " - "[%s]" % identifier, - [attr.location, self.location]) - elif identifier == "Unforgeable": + if ( + identifier == "GetterThrows" + or identifier == "SetterThrows" + or identifier == "GetterCanOOM" + or identifier == "SetterCanOOM" + or identifier == "SetterNeedsSubjectPrincipal" + or identifier == "GetterNeedsSubjectPrincipal" + ): + raise WebIDLError( + "Methods must not be flagged as " "[%s]" % identifier, + [attr.location, self.location], + ) + elif identifier == "LegacyUnforgeable": if self.isStatic(): - raise WebIDLError("[Unforgeable] is only allowed on non-static " - "methods", [attr.location, self.location]) - self._unforgeable = True + raise WebIDLError( + "[LegacyUnforgeable] is only allowed on non-static " "methods", + [attr.location, self.location], + ) + self._legacyUnforgeable = True elif identifier == "SameObject": - raise WebIDLError("Methods must not be flagged as [SameObject]", - [attr.location, self.location]) + raise WebIDLError( + "Methods must not be flagged as [SameObject]", + [attr.location, self.location], + ) elif identifier == "Constant": - raise WebIDLError("Methods must not be flagged as [Constant]", - [attr.location, self.location]) + raise WebIDLError( + "Methods must not be flagged as [Constant]", + [attr.location, self.location], + ) elif identifier == "PutForwards": - raise WebIDLError("Only attributes support [PutForwards]", - [attr.location, self.location]) - elif identifier == "LenientSetter": - raise WebIDLError("Only attributes support [LenientSetter]", - [attr.location, self.location]) + raise WebIDLError( + "Only attributes support [PutForwards]", [attr.location, self.location] + ) + elif identifier == "LegacyLenientSetter": + raise WebIDLError( + "Only attributes support [LegacyLenientSetter]", + [attr.location, self.location], + ) elif identifier == "LenientFloat": # This is called before we've done overload resolution overloads = self._overloads assert len(overloads) == 1 if not overloads[0].returnType.isUndefined(): - raise WebIDLError("[LenientFloat] used on a non-undefined returning method", - [attr.location, self.location]) + raise WebIDLError( + "[LenientFloat] used on a non-undefined method", + [attr.location, self.location], + ) if not overloads[0].includesRestrictedFloatArgument(): - raise WebIDLError("[LenientFloat] used on an operation with no " - "restricted float type arguments", - [attr.location, self.location]) + raise WebIDLError( + "[LenientFloat] used on an operation with no " + "restricted float type arguments", + [attr.location, self.location], + ) elif identifier == "Exposed": convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames) - elif (identifier == "CrossOriginCallable" or - identifier == "WebGLHandlesContextLoss"): + elif ( + identifier == "CrossOriginCallable" + or identifier == "WebGLHandlesContextLoss" + ): # Known no-argument attributes. if not attr.noArguments(): - raise WebIDLError("[%s] must take no arguments" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must take no arguments" % identifier, [attr.location] + ) if identifier == "CrossOriginCallable" and self.isStatic(): - raise WebIDLError("[CrossOriginCallable] is only allowed on non-static " - "attributes", - [attr.location, self.location]) + raise WebIDLError( + "[CrossOriginCallable] is only allowed on non-static " "attributes", + [attr.location, self.location], + ) elif identifier == "Pure": if not attr.noArguments(): - raise WebIDLError("[Pure] must take no arguments", - [attr.location]) + raise WebIDLError("[Pure] must take no arguments", [attr.location]) self._setDependsOn("DOMState") self._setAffects("Nothing") elif identifier == "Affects": if not attr.hasValue(): - raise WebIDLError("[Affects] takes an identifier", - [attr.location]) + raise WebIDLError("[Affects] takes an identifier", [attr.location]) self._setAffects(attr.value()) elif identifier == "DependsOn": if not attr.hasValue(): - raise WebIDLError("[DependsOn] takes an identifier", - [attr.location]) + raise WebIDLError("[DependsOn] takes an identifier", [attr.location]) self._setDependsOn(attr.value()) elif identifier == "Alias": if not attr.hasValue(): - raise WebIDLError("[Alias] takes an identifier or string", - [attr.location]) + raise WebIDLError( + "[Alias] takes an identifier or string", [attr.location] + ) self._addAlias(attr.value()) elif identifier == "UseCounter": if self.isSpecial(): - raise WebIDLError("[UseCounter] must not be used on a special " - "operation", - [attr.location, self.location]) + raise WebIDLError( + "[UseCounter] must not be used on a special " "operation", + [attr.location, self.location], + ) elif identifier == "Unscopable": if not attr.noArguments(): - raise WebIDLError("[Unscopable] must take no arguments", - [attr.location]) + raise WebIDLError( + "[Unscopable] must take no arguments", [attr.location] + ) if self.isStatic(): - raise WebIDLError("[Unscopable] is only allowed on non-static " - "attributes and operations", - [attr.location, self.location]) + raise WebIDLError( + "[Unscopable] is only allowed on non-static " + "attributes and operations", + [attr.location, self.location], + ) elif identifier == "CEReactions": if not attr.noArguments(): - raise WebIDLError("[CEReactions] must take no arguments", - [attr.location]) + raise WebIDLError( + "[CEReactions] must take no arguments", [attr.location] + ) if self.isSpecial() and not self.isSetter() and not self.isDeleter(): - raise WebIDLError("[CEReactions] is only allowed on operation, " - "attribute, setter, and deleter", - [attr.location, self.location]) + raise WebIDLError( + "[CEReactions] is only allowed on operation, " + "attribute, setter, and deleter", + [attr.location, self.location], + ) elif identifier == "Default": if not attr.noArguments(): - raise WebIDLError("[Default] must take no arguments", - [attr.location]) + raise WebIDLError("[Default] must take no arguments", [attr.location]) if not self.isToJSON(): - raise WebIDLError("[Default] is only allowed on toJSON operations", - [attr.location, self.location]) + raise WebIDLError( + "[Default] is only allowed on toJSON operations", + [attr.location, self.location], + ) if self.signatures()[0][0] != BuiltinTypes[IDLBuiltinType.Types.object]: - raise WebIDLError("The return type of the default toJSON " - "operation must be 'object'", - [attr.location, self.location]) - elif (identifier == "Throws" or - identifier == "CanOOM" or - identifier == "NewObject" or - identifier == "ChromeOnly" or - identifier == "Pref" or - identifier == "Deprecated" or - identifier == "Func" or - identifier == "SecureContext" or - identifier == "BinaryName" or - identifier == "NeedsSubjectPrincipal" or - identifier == "NeedsCallerType" or - identifier == "StaticClassOverride" or - identifier == "NonEnumerable" or - identifier == "Unexposed"): + raise WebIDLError( + "The return type of the default toJSON " + "operation must be 'object'", + [attr.location, self.location], + ) + elif ( + identifier == "Throws" + or identifier == "CanOOM" + or identifier == "NewObject" + or identifier == "ChromeOnly" + or identifier == "Pref" + or identifier == "Deprecated" + or identifier == "Func" + or identifier == "Trial" + or identifier == "SecureContext" + or identifier == "BinaryName" + or identifier == "NeedsSubjectPrincipal" + or identifier == "NeedsCallerType" + or identifier == "StaticClassOverride" + or identifier == "NonEnumerable" + or identifier == "Unexposed" + or identifier == "WebExtensionStub" + ): # Known attributes that we don't need to do anything with here pass else: - raise WebIDLError("Unknown extended attribute %s on method" % identifier, - [attr.location]) + raise WebIDLError( + "Unknown extended attribute %s on method" % identifier, [attr.location] + ) IDLInterfaceMember.handleExtendedAttribute(self, attr) def returnsPromise(self): return self._overloads[0].returnType.isPromise() - def isUnforgeable(self): - return self._unforgeable + def isLegacyUnforgeable(self): + return self._legacyUnforgeable def _getDependentObjects(self): deps = set() @@ -5607,45 +6732,55 @@ class IDLConstructor(IDLMethod): def handleExtendedAttribute(self, attr): identifier = attr.identifier() - if (identifier == "BinaryName" or - identifier == "ChromeOnly" or - identifier == "NewObject" or - identifier == "SecureContext" or - identifier == "Throws" or - identifier == "Func" or - identifier == "Pref"): + if ( + identifier == "BinaryName" + or identifier == "ChromeOnly" + or identifier == "NewObject" + or identifier == "SecureContext" + or identifier == "Throws" + or identifier == "Func" + or identifier == "Trial" + or identifier == "Pref" + or identifier == "UseCounter" + ): IDLMethod.handleExtendedAttribute(self, attr) elif identifier == "HTMLConstructor": if not attr.noArguments(): - raise WebIDLError("[HTMLConstructor] must take no arguments", - [attr.location]) - # We shouldn't end up here for named constructors. - assert(self.identifier.name == "constructor") + raise WebIDLError( + "[HTMLConstructor] must take no arguments", [attr.location] + ) + # We shouldn't end up here for legacy factory functions. + assert self.identifier.name == "constructor" if any(len(sig[1]) != 0 for sig in self.signatures()): - raise WebIDLError("[HTMLConstructor] must not be applied to a " - "constructor operation that has arguments.", - [attr.location]) + raise WebIDLError( + "[HTMLConstructor] must not be applied to a " + "constructor operation that has arguments.", + [attr.location], + ) self._htmlConstructor = True else: - raise WebIDLError("Unknown extended attribute %s on method" % identifier, - [attr.location]) + raise WebIDLError( + "Unknown extended attribute %s on method" % identifier, [attr.location] + ) def reallyInit(self, parentInterface): name = self._initName location = self._initLocation identifier = IDLUnresolvedIdentifier(location, name, allowForbidden=True) retType = IDLWrapperType(parentInterface.location, parentInterface) - IDLMethod.__init__(self, location, identifier, retType, self._initArgs, - static=True) - self._inited = True; + IDLMethod.__init__( + self, location, identifier, retType, self._initArgs, static=True + ) + self._inited = True # Propagate through whatever extended attributes we already had self.addExtendedAttributes(self._initExtendedAttrs) self._initExtendedAttrs = [] # Constructors are always NewObject. Whether they throw or not is # indicated by [Throws] annotations in the usual way. self.addExtendedAttributes( - [IDLExtendedAttribute(self.location, ("NewObject",))]) + [IDLExtendedAttribute(self.location, ("NewObject",))] + ) class IDLIncludesStatement(IDLObject): @@ -5659,25 +6794,28 @@ class IDLIncludesStatement(IDLObject): if self._finished: return self._finished = True - assert(isinstance(self.interface, IDLIdentifierPlaceholder)) - assert(isinstance(self.mixin, IDLIdentifierPlaceholder)) + assert isinstance(self.interface, IDLIdentifierPlaceholder) + assert isinstance(self.mixin, IDLIdentifierPlaceholder) interface = self.interface.finish(scope) mixin = self.mixin.finish(scope) # NOTE: we depend on not setting self.interface and # self.mixin here to keep track of the original # locations. if not isinstance(interface, IDLInterface): - raise WebIDLError("Left-hand side of 'includes' is not an " - "interface", - [self.interface.location, interface.location]) + raise WebIDLError( + "Left-hand side of 'includes' is not an " "interface", + [self.interface.location, interface.location], + ) if interface.isCallback(): - raise WebIDLError("Left-hand side of 'includes' is a callback " - "interface", - [self.interface.location, interface.location]) + raise WebIDLError( + "Left-hand side of 'includes' is a callback " "interface", + [self.interface.location, interface.location], + ) if not isinstance(mixin, IDLInterfaceMixin): - raise WebIDLError("Right-hand side of 'includes' is not an " - "interface mixin", - [self.mixin.location, mixin.location]) + raise WebIDLError( + "Right-hand side of 'includes' is not an " "interface mixin", + [self.mixin.location, mixin.location], + ) mixin.actualExposureGlobalNames.update(interface._exposureGlobalNames) @@ -5690,14 +6828,18 @@ class IDLIncludesStatement(IDLObject): def addExtendedAttributes(self, attrs): if len(attrs) != 0: - raise WebIDLError("There are no extended attributes that are " - "allowed on includes statements", - [attrs[0].location, self.location]) + raise WebIDLError( + "There are no extended attributes that are " + "allowed on includes statements", + [attrs[0].location, self.location], + ) + class IDLExtendedAttribute(IDLObject): """ A class to represent IDL extended attributes so we can give them locations """ + def __init__(self, location, tuple): IDLObject.__init__(self, location) self._tuple = tuple @@ -5712,15 +6854,18 @@ class IDLExtendedAttribute(IDLObject): return len(self._tuple) >= 2 and isinstance(self._tuple[1], str) def value(self): - assert(self.hasValue()) + assert self.hasValue() return self._tuple[1] def hasArgs(self): - return (len(self._tuple) == 2 and isinstance(self._tuple[1], list) or - len(self._tuple) == 3) + return ( + len(self._tuple) == 2 + and isinstance(self._tuple[1], list) + or len(self._tuple) == 3 + ) def args(self): - assert(self.hasArgs()) + assert self.hasArgs() # Our args are our last element return self._tuple[-1] @@ -5730,41 +6875,43 @@ class IDLExtendedAttribute(IDLObject): """ return list(self._tuple)[1:] + # Parser class Tokenizer(object): - tokens = [ - "INTEGER", - "FLOATLITERAL", - "IDENTIFIER", - "STRING", - "COMMENTS", - "WHITESPACE", - "OTHER" - ] + tokens = ["INTEGER", "FLOATLITERAL", "IDENTIFIER", "STRING", "WHITESPACE", "OTHER"] def t_FLOATLITERAL(self, t): - r'(-?(([0-9]+\.[0-9]*|[0-9]*\.[0-9]+)([Ee][+-]?[0-9]+)?|[0-9]+[Ee][+-]?[0-9]+|Infinity))|NaN' + r"(-?(([0-9]+\.[0-9]*|[0-9]*\.[0-9]+)([Ee][+-]?[0-9]+)?|[0-9]+[Ee][+-]?[0-9]+|Infinity))|NaN" t.value = float(t.value) return t def t_INTEGER(self, t): - r'-?(0([0-7]+|[Xx][0-9A-Fa-f]+)?|[1-9][0-9]*)' + r"-?(0([0-7]+|[Xx][0-9A-Fa-f]+)?|[1-9][0-9]*)" try: # Can't use int(), because that doesn't handle octal properly. t.value = parseInt(t.value) except: - raise WebIDLError("Invalid integer literal", - [Location(lexer=self.lexer, - lineno=self.lexer.lineno, - lexpos=self.lexer.lexpos, - filename=self._filename)]) + raise WebIDLError( + "Invalid integer literal", + [ + Location( + lexer=self.lexer, + lineno=self.lexer.lineno, + lexpos=self.lexer.lexpos, + filename=self._filename, + ) + ], + ) return t def t_IDENTIFIER(self, t): - r'[_-]?[A-Za-z][0-9A-Z_a-z-]*' - t.type = self.keywords.get(t.value, 'IDENTIFIER') + r"[_-]?[A-Za-z][0-9A-Z_a-z-]*" + t.type = self.keywords.get(t.value, "IDENTIFIER") + # If Builtin readable streams are disabled, mark ReadableStream as an identifier. + if t.type == "READABLESTREAM" and not self._use_builtin_readable_streams: + t.type = "IDENTIFIER" return t def t_STRING(self, t): @@ -5772,22 +6919,18 @@ class Tokenizer(object): t.value = t.value[1:-1] return t - def t_COMMENTS(self, t): - r'(\/\*(.|\n)*?\*\/)|(\/\/.*)' - pass - def t_WHITESPACE(self, t): - r'[\t\n\r ]+' + r"[\t\n\r ]+|[\t\n\r ]*((//[^\n]*|/\*.*?\*/)[\t\n\r ]*)+" pass def t_ELLIPSIS(self, t): - r'\.\.\.' + r"\.\.\." t.type = self.keywords.get(t.value) return t def t_OTHER(self, t): - r'[^\t\n\r 0-9A-Z_a-z]' - t.type = self.keywords.get(t.value, 'OTHER') + r"[^\t\n\r 0-9A-Z_a-z]" + t.type = self.keywords.get(t.value, "OTHER") return t keywords = { @@ -5830,6 +6973,7 @@ class Tokenizer(object): "float": "FLOAT", "long": "LONG", "object": "OBJECT", + "ObservableArray": "OBSERVABLEARRAY", "octet": "OCTET", "Promise": "PROMISE", "required": "REQUIRED", @@ -5847,6 +6991,7 @@ class Tokenizer(object): "[": "LBRACKET", "]": "RBRACKET", "?": "QUESTIONMARK", + "*": "ASTERISK", ",": "COMMA", "=": "EQUALS", "<": "LT", @@ -5861,32 +7006,38 @@ class Tokenizer(object): "constructor": "CONSTRUCTOR", "symbol": "SYMBOL", "async": "ASYNC", - } + } - tokens.extend(list(keywords.values())) + tokens.extend(keywords.values()) def t_error(self, t): - raise WebIDLError("Unrecognized Input", - [Location(lexer=self.lexer, - lineno=self.lexer.lineno, - lexpos=self.lexer.lexpos, - filename=self.filename)]) + raise WebIDLError( + "Unrecognized Input", + [ + Location( + lexer=self.lexer, + lineno=self.lexer.lineno, + lexpos=self.lexer.lexpos, + filename=self.filename, + ) + ], + ) - def __init__(self, lexer=None): + def __init__(self, outputdir, lexer=None, use_builtin_readable_streams=True): + self._use_builtin_readable_streams = use_builtin_readable_streams if lexer: self.lexer = lexer else: - self.lexer = lex.lex(object=self) + self.lexer = lex.lex(object=self, reflags=re.DOTALL) class SqueakyCleanLogger(object): errorWhitelist = [ - # Web IDL defines the WHITESPACE and COMMENTS token, but doesn't actually + # Web IDL defines the WHITESPACE token, but doesn't actually # use it ... so far. "Token 'WHITESPACE' defined, but not used", - "Token 'COMMENTS' defined, but not used", - # And that means we have unused tokens - "There are 2 unused tokens", + # And that means we have an unused token + "There is 1 unused token", # Web IDL defines a OtherOrComma rule that's only used in # ExtendedAttributeInner, which we don't use yet. "Rule 'OtherOrComma' defined, but not used", @@ -5896,18 +7047,21 @@ class SqueakyCleanLogger(object): "Symbol 'OtherOrComma' is unreachable", # Which means the Other symbol is unreachable. "Symbol 'Other' is unreachable", - ] + ] def __init__(self): self.errors = [] def debug(self, msg, *args, **kwargs): pass + info = debug def warning(self, msg, *args, **kwargs): - if msg == "%s:%d: Rule %r defined, but not used" or \ - msg == "%s:%d: Rule '%s' defined, but not used": + if ( + msg == "%s:%d: Rule %r defined, but not used" + or msg == "%s:%d: Rule '%s' defined, but not used" + ): # Munge things so we don't have to hardcode filenames and # line numbers in our whitelist. whitelistmsg = "Rule %r defined, but not used" @@ -5917,6 +7071,7 @@ class SqueakyCleanLogger(object): whitelistargs = args if (whitelistmsg % whitelistargs) not in SqueakyCleanLogger.errorWhitelist: self.errors.append(msg % args) + error = warning def reportGrammarErrors(self): @@ -5935,7 +7090,7 @@ class Parser(Tokenizer): # It's acceptable to split things at '|' boundaries. def p_Definitions(self, p): """ - Definitions : ExtendedAttributeList Definition Definitions + Definitions : ExtendedAttributeList Definition Definitions """ if p[2]: p[0] = [p[2]] @@ -5948,27 +7103,27 @@ class Parser(Tokenizer): def p_DefinitionsEmpty(self, p): """ - Definitions : + Definitions : """ p[0] = [] def p_Definition(self, p): """ - Definition : CallbackOrInterfaceOrMixin - | Namespace - | Partial - | Dictionary - | Exception - | Enum - | Typedef - | IncludesStatement + Definition : CallbackOrInterfaceOrMixin + | Namespace + | Partial + | Dictionary + | Exception + | Enum + | Typedef + | IncludesStatement """ p[0] = p[1] assert p[1] # We might not have implemented something ... def p_CallbackOrInterfaceOrMixinCallback(self, p): """ - CallbackOrInterfaceOrMixin : CALLBACK CallbackRestOrInterface + CallbackOrInterfaceOrMixin : CALLBACK CallbackRestOrInterface """ if p[2].isInterface(): assert isinstance(p[2], IDLInterface) @@ -5978,21 +7133,22 @@ class Parser(Tokenizer): def p_CallbackOrInterfaceOrMixinInterfaceOrMixin(self, p): """ - CallbackOrInterfaceOrMixin : INTERFACE InterfaceOrMixin + CallbackOrInterfaceOrMixin : INTERFACE InterfaceOrMixin """ p[0] = p[2] def p_CallbackRestOrInterface(self, p): """ - CallbackRestOrInterface : CallbackRest - | CallbackConstructorRest - | CallbackInterface + CallbackRestOrInterface : CallbackRest + | CallbackConstructorRest + | CallbackInterface """ assert p[1] p[0] = p[1] - def handleNonPartialObject(self, location, identifier, constructor, - constructorArgs, nonPartialArgs): + def handleNonPartialObject( + self, location, identifier, constructor, constructorArgs, nonPartialArgs + ): """ This handles non-partial objects (interfaces, namespaces and dictionaries) by checking for an existing partial object, and promoting @@ -6013,10 +7169,11 @@ class Parser(Tokenizer): existingObj = self.globalScope()._lookupIdentifier(identifier) if existingObj: if not isinstance(existingObj, constructor): - raise WebIDLError("%s has the same name as " - "non-%s object" % - (prettyname.capitalize(), prettyname), - [location, existingObj.location]) + raise WebIDLError( + "%s has the same name as " + "non-%s object" % (prettyname.capitalize(), prettyname), + [location, existingObj.location], + ) existingObj.setNonPartial(*nonPartialArgs) return existingObj except Exception as ex: @@ -6029,20 +7186,20 @@ class Parser(Tokenizer): def p_InterfaceOrMixin(self, p): """ - InterfaceOrMixin : InterfaceRest - | MixinRest + InterfaceOrMixin : InterfaceRest + | MixinRest """ p[0] = p[1] def p_CallbackInterface(self, p): """ - CallbackInterface : INTERFACE InterfaceRest + CallbackInterface : INTERFACE InterfaceRest """ p[0] = p[2] def p_InterfaceRest(self, p): """ - InterfaceRest : IDENTIFIER Inheritance LBRACE InterfaceMembers RBRACE SEMICOLON + InterfaceRest : IDENTIFIER Inheritance LBRACE InterfaceMembers RBRACE SEMICOLON """ location = self.getLocation(p, 1) identifier = IDLUnresolvedIdentifier(location, p[1]) @@ -6050,13 +7207,16 @@ class Parser(Tokenizer): parent = p[2] p[0] = self.handleNonPartialObject( - location, identifier, IDLInterface, + location, + identifier, + IDLInterface, [location, self.globalScope(), identifier, parent, members], - [location, parent, members]) + [location, parent, members], + ) def p_InterfaceForwardDecl(self, p): """ - InterfaceRest : IDENTIFIER SEMICOLON + InterfaceRest : IDENTIFIER SEMICOLON """ location = self.getLocation(p, 1) identifier = IDLUnresolvedIdentifier(location, p[1]) @@ -6065,10 +7225,12 @@ class Parser(Tokenizer): if self.globalScope()._lookupIdentifier(identifier): p[0] = self.globalScope()._lookupIdentifier(identifier) if not isinstance(p[0], IDLExternalInterface): - raise WebIDLError("Name collision between external " - "interface declaration for identifier " - "%s and %s" % (identifier.name, p[0]), - [location, p[0].location]) + raise WebIDLError( + "Name collision between external " + "interface declaration for identifier " + "%s and %s" % (identifier.name, p[0]), + [location, p[0].location], + ) return except Exception as ex: if isinstance(ex, WebIDLError): @@ -6079,52 +7241,63 @@ class Parser(Tokenizer): def p_MixinRest(self, p): """ - MixinRest : MIXIN IDENTIFIER LBRACE MixinMembers RBRACE SEMICOLON + MixinRest : MIXIN IDENTIFIER LBRACE MixinMembers RBRACE SEMICOLON """ location = self.getLocation(p, 1) identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2]) members = p[4] p[0] = self.handleNonPartialObject( - location, identifier, IDLInterfaceMixin, + location, + identifier, + IDLInterfaceMixin, [location, self.globalScope(), identifier, members], - [location, members]) + [location, members], + ) def p_Namespace(self, p): """ - Namespace : NAMESPACE IDENTIFIER LBRACE InterfaceMembers RBRACE SEMICOLON + Namespace : NAMESPACE IDENTIFIER LBRACE InterfaceMembers RBRACE SEMICOLON """ location = self.getLocation(p, 1) identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2]) members = p[4] p[0] = self.handleNonPartialObject( - location, identifier, IDLNamespace, + location, + identifier, + IDLNamespace, [location, self.globalScope(), identifier, members], - [location, None, members]) + [location, None, members], + ) def p_Partial(self, p): """ - Partial : PARTIAL PartialDefinition + Partial : PARTIAL PartialDefinition """ p[0] = p[2] def p_PartialDefinitionInterface(self, p): """ - PartialDefinition : INTERFACE PartialInterfaceOrPartialMixin + PartialDefinition : INTERFACE PartialInterfaceOrPartialMixin """ p[0] = p[2] def p_PartialDefinition(self, p): """ - PartialDefinition : PartialNamespace - | PartialDictionary + PartialDefinition : PartialNamespace + | PartialDictionary """ p[0] = p[1] - def handlePartialObject(self, location, identifier, nonPartialConstructor, - nonPartialConstructorArgs, - partialConstructorArgs): + def handlePartialObject( + self, + location, + identifier, + nonPartialConstructor, + nonPartialConstructorArgs, + partialConstructorArgs, + ): """ This handles partial objects (interfaces, namespaces and dictionaries) by checking for an existing non-partial object, and adding ourselves to @@ -6148,10 +7321,11 @@ class Parser(Tokenizer): nonPartialObject = self.globalScope()._lookupIdentifier(identifier) if nonPartialObject: if not isinstance(nonPartialObject, nonPartialConstructor): - raise WebIDLError("Partial %s has the same name as " - "non-%s object" % - (prettyname, prettyname), - [location, nonPartialObject.location]) + raise WebIDLError( + "Partial %s has the same name as " + "non-%s object" % (prettyname, prettyname), + [location, nonPartialObject.location], + ) except Exception as ex: if isinstance(ex, WebIDLError): raise ex @@ -6160,96 +7334,115 @@ class Parser(Tokenizer): if not nonPartialObject: nonPartialObject = nonPartialConstructor( # No members, False for isKnownNonPartial - *(nonPartialConstructorArgs), members=[], isKnownNonPartial=False) + *(nonPartialConstructorArgs), + members=[], + isKnownNonPartial=False + ) partialObject = None if isinstance(nonPartialObject, IDLDictionary): partialObject = IDLPartialDictionary( - *(partialConstructorArgs + [nonPartialObject])) - elif isinstance(nonPartialObject, (IDLInterface, IDLInterfaceMixin, IDLNamespace)): + *(partialConstructorArgs + [nonPartialObject]) + ) + elif isinstance( + nonPartialObject, (IDLInterface, IDLInterfaceMixin, IDLNamespace) + ): partialObject = IDLPartialInterfaceOrNamespace( - *(partialConstructorArgs + [nonPartialObject])) + *(partialConstructorArgs + [nonPartialObject]) + ) else: - raise WebIDLError("Unknown partial object type %s" % - type(partialObject), - [location]) + raise WebIDLError( + "Unknown partial object type %s" % type(partialObject), [location] + ) return partialObject def p_PartialInterfaceOrPartialMixin(self, p): """ - PartialInterfaceOrPartialMixin : PartialInterfaceRest - | PartialMixinRest + PartialInterfaceOrPartialMixin : PartialInterfaceRest + | PartialMixinRest """ p[0] = p[1] def p_PartialInterfaceRest(self, p): """ - PartialInterfaceRest : IDENTIFIER LBRACE PartialInterfaceMembers RBRACE SEMICOLON + PartialInterfaceRest : IDENTIFIER LBRACE PartialInterfaceMembers RBRACE SEMICOLON """ location = self.getLocation(p, 1) identifier = IDLUnresolvedIdentifier(location, p[1]) members = p[3] p[0] = self.handlePartialObject( - location, identifier, IDLInterface, + location, + identifier, + IDLInterface, [location, self.globalScope(), identifier, None], - [location, identifier, members]) + [location, identifier, members], + ) def p_PartialMixinRest(self, p): """ - PartialMixinRest : MIXIN IDENTIFIER LBRACE MixinMembers RBRACE SEMICOLON + PartialMixinRest : MIXIN IDENTIFIER LBRACE MixinMembers RBRACE SEMICOLON """ location = self.getLocation(p, 1) identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2]) members = p[4] p[0] = self.handlePartialObject( - location, identifier, IDLInterfaceMixin, + location, + identifier, + IDLInterfaceMixin, [location, self.globalScope(), identifier], - [location, identifier, members]) + [location, identifier, members], + ) def p_PartialNamespace(self, p): """ - PartialNamespace : NAMESPACE IDENTIFIER LBRACE InterfaceMembers RBRACE SEMICOLON + PartialNamespace : NAMESPACE IDENTIFIER LBRACE InterfaceMembers RBRACE SEMICOLON """ location = self.getLocation(p, 1) identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2]) members = p[4] p[0] = self.handlePartialObject( - location, identifier, IDLNamespace, + location, + identifier, + IDLNamespace, [location, self.globalScope(), identifier], - [location, identifier, members]) + [location, identifier, members], + ) def p_PartialDictionary(self, p): """ - PartialDictionary : DICTIONARY IDENTIFIER LBRACE DictionaryMembers RBRACE SEMICOLON + PartialDictionary : DICTIONARY IDENTIFIER LBRACE DictionaryMembers RBRACE SEMICOLON """ location = self.getLocation(p, 1) identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2]) members = p[4] p[0] = self.handlePartialObject( - location, identifier, IDLDictionary, + location, + identifier, + IDLDictionary, [location, self.globalScope(), identifier], - [location, identifier, members]) + [location, identifier, members], + ) def p_Inheritance(self, p): """ - Inheritance : COLON ScopedName + Inheritance : COLON ScopedName """ p[0] = IDLIdentifierPlaceholder(self.getLocation(p, 2), p[2]) def p_InheritanceEmpty(self, p): """ - Inheritance : + Inheritance : """ pass def p_InterfaceMembers(self, p): """ - InterfaceMembers : ExtendedAttributeList InterfaceMember InterfaceMembers + InterfaceMembers : ExtendedAttributeList InterfaceMember InterfaceMembers """ p[0] = [p[2]] @@ -6260,26 +7453,26 @@ class Parser(Tokenizer): def p_InterfaceMembersEmpty(self, p): """ - InterfaceMembers : + InterfaceMembers : """ p[0] = [] def p_InterfaceMember(self, p): """ - InterfaceMember : PartialInterfaceMember - | Constructor + InterfaceMember : PartialInterfaceMember + | Constructor """ p[0] = p[1] def p_Constructor(self, p): """ - Constructor : CONSTRUCTOR LPAREN ArgumentList RPAREN SEMICOLON + Constructor : CONSTRUCTOR LPAREN ArgumentList RPAREN SEMICOLON """ p[0] = IDLConstructor(self.getLocation(p, 1), p[3], "constructor") def p_PartialInterfaceMembers(self, p): """ - PartialInterfaceMembers : ExtendedAttributeList PartialInterfaceMember PartialInterfaceMembers + PartialInterfaceMembers : ExtendedAttributeList PartialInterfaceMember PartialInterfaceMembers """ p[0] = [p[2]] @@ -6290,27 +7483,26 @@ class Parser(Tokenizer): def p_PartialInterfaceMembersEmpty(self, p): """ - PartialInterfaceMembers : + PartialInterfaceMembers : """ p[0] = [] def p_PartialInterfaceMember(self, p): """ - PartialInterfaceMember : Const - | AttributeOrOperationOrMaplikeOrSetlikeOrIterable + PartialInterfaceMember : Const + | AttributeOrOperationOrMaplikeOrSetlikeOrIterable """ p[0] = p[1] - def p_MixinMembersEmpty(self, p): """ - MixinMembers : + MixinMembers : """ p[0] = [] def p_MixinMembers(self, p): """ - MixinMembers : ExtendedAttributeList MixinMember MixinMembers + MixinMembers : ExtendedAttributeList MixinMember MixinMembers """ p[0] = [p[2]] @@ -6321,15 +7513,15 @@ class Parser(Tokenizer): def p_MixinMember(self, p): """ - MixinMember : Const - | Attribute - | Operation + MixinMember : Const + | Attribute + | Operation """ p[0] = p[1] def p_Dictionary(self, p): """ - Dictionary : DICTIONARY IDENTIFIER Inheritance LBRACE DictionaryMembers RBRACE SEMICOLON + Dictionary : DICTIONARY IDENTIFIER Inheritance LBRACE DictionaryMembers RBRACE SEMICOLON """ location = self.getLocation(p, 1) identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2]) @@ -6338,8 +7530,8 @@ class Parser(Tokenizer): def p_DictionaryMembers(self, p): """ - DictionaryMembers : ExtendedAttributeList DictionaryMember DictionaryMembers - | + DictionaryMembers : ExtendedAttributeList DictionaryMember DictionaryMembers + | """ if len(p) == 1: # We're at the end of the list @@ -6351,21 +7543,26 @@ class Parser(Tokenizer): def p_DictionaryMemberRequired(self, p): """ - DictionaryMember : REQUIRED TypeWithExtendedAttributes IDENTIFIER SEMICOLON + DictionaryMember : REQUIRED TypeWithExtendedAttributes IDENTIFIER SEMICOLON """ # These quack a lot like required arguments, so just treat them that way. t = p[2] assert isinstance(t, IDLType) identifier = IDLUnresolvedIdentifier(self.getLocation(p, 3), p[3]) - p[0] = IDLArgument(self.getLocation(p, 3), identifier, t, - optional=False, - defaultValue=None, variadic=False, - dictionaryMember=True) + p[0] = IDLArgument( + self.getLocation(p, 3), + identifier, + t, + optional=False, + defaultValue=None, + variadic=False, + dictionaryMember=True, + ) def p_DictionaryMember(self, p): """ - DictionaryMember : Type IDENTIFIER Default SEMICOLON + DictionaryMember : Type IDENTIFIER Default SEMICOLON """ # These quack a lot like optional arguments, so just treat them that way. t = p[1] @@ -6376,15 +7573,21 @@ class Parser(Tokenizer): # Any attributes that precede this may apply to the type, so # we configure the argument to forward type attributes down instead of producing # a parse error - p[0] = IDLArgument(self.getLocation(p, 2), identifier, t, - optional=True, - defaultValue=defaultValue, variadic=False, - dictionaryMember=True, allowTypeAttributes=True) + p[0] = IDLArgument( + self.getLocation(p, 2), + identifier, + t, + optional=True, + defaultValue=defaultValue, + variadic=False, + dictionaryMember=True, + allowTypeAttributes=True, + ) def p_Default(self, p): """ - Default : EQUALS DefaultValue - | + Default : EQUALS DefaultValue + | """ if len(p) > 1: p[0] = p[2] @@ -6393,9 +7596,9 @@ class Parser(Tokenizer): def p_DefaultValue(self, p): """ - DefaultValue : ConstValue - | LBRACKET RBRACKET - | LBRACE RBRACE + DefaultValue : ConstValue + | LBRACKET RBRACKET + | LBRACE RBRACE """ if len(p) == 2: p[0] = p[1] @@ -6409,19 +7612,25 @@ class Parser(Tokenizer): def p_DefaultValueNull(self, p): """ - DefaultValue : NULL + DefaultValue : NULL """ p[0] = IDLNullValue(self.getLocation(p, 1)) + def p_DefaultValueUndefined(self, p): + """ + DefaultValue : UNDEFINED + """ + p[0] = IDLUndefinedValue(self.getLocation(p, 1)) + def p_Exception(self, p): """ - Exception : EXCEPTION IDENTIFIER Inheritance LBRACE ExceptionMembers RBRACE SEMICOLON + Exception : EXCEPTION IDENTIFIER Inheritance LBRACE ExceptionMembers RBRACE SEMICOLON """ pass def p_Enum(self, p): """ - Enum : ENUM IDENTIFIER LBRACE EnumValueList RBRACE SEMICOLON + Enum : ENUM IDENTIFIER LBRACE EnumValueList RBRACE SEMICOLON """ location = self.getLocation(p, 1) identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2]) @@ -6432,79 +7641,90 @@ class Parser(Tokenizer): def p_EnumValueList(self, p): """ - EnumValueList : STRING EnumValueListComma + EnumValueList : STRING EnumValueListComma """ p[0] = [p[1]] p[0].extend(p[2]) def p_EnumValueListComma(self, p): """ - EnumValueListComma : COMMA EnumValueListString + EnumValueListComma : COMMA EnumValueListString """ p[0] = p[2] def p_EnumValueListCommaEmpty(self, p): """ - EnumValueListComma : + EnumValueListComma : """ p[0] = [] def p_EnumValueListString(self, p): """ - EnumValueListString : STRING EnumValueListComma + EnumValueListString : STRING EnumValueListComma """ p[0] = [p[1]] p[0].extend(p[2]) def p_EnumValueListStringEmpty(self, p): """ - EnumValueListString : + EnumValueListString : """ p[0] = [] def p_CallbackRest(self, p): """ - CallbackRest : IDENTIFIER EQUALS ReturnType LPAREN ArgumentList RPAREN SEMICOLON + CallbackRest : IDENTIFIER EQUALS Type LPAREN ArgumentList RPAREN SEMICOLON """ identifier = IDLUnresolvedIdentifier(self.getLocation(p, 1), p[1]) - p[0] = IDLCallback(self.getLocation(p, 1), self.globalScope(), - identifier, p[3], p[5], isConstructor=False) + p[0] = IDLCallback( + self.getLocation(p, 1), + self.globalScope(), + identifier, + p[3], + p[5], + isConstructor=False, + ) def p_CallbackConstructorRest(self, p): """ - CallbackConstructorRest : CONSTRUCTOR IDENTIFIER EQUALS ReturnType LPAREN ArgumentList RPAREN SEMICOLON + CallbackConstructorRest : CONSTRUCTOR IDENTIFIER EQUALS Type LPAREN ArgumentList RPAREN SEMICOLON """ identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2]) - p[0] = IDLCallback(self.getLocation(p, 2), self.globalScope(), - identifier, p[4], p[6], isConstructor=True) + p[0] = IDLCallback( + self.getLocation(p, 2), + self.globalScope(), + identifier, + p[4], + p[6], + isConstructor=True, + ) def p_ExceptionMembers(self, p): """ - ExceptionMembers : ExtendedAttributeList ExceptionMember ExceptionMembers - | + ExceptionMembers : ExtendedAttributeList ExceptionMember ExceptionMembers + | """ pass def p_Typedef(self, p): """ - Typedef : TYPEDEF TypeWithExtendedAttributes IDENTIFIER SEMICOLON + Typedef : TYPEDEF TypeWithExtendedAttributes IDENTIFIER SEMICOLON """ - typedef = IDLTypedef(self.getLocation(p, 1), self.globalScope(), - p[2], p[3]) + typedef = IDLTypedef(self.getLocation(p, 1), self.globalScope(), p[2], p[3]) p[0] = typedef def p_IncludesStatement(self, p): """ - IncludesStatement : ScopedName INCLUDES ScopedName SEMICOLON + IncludesStatement : ScopedName INCLUDES ScopedName SEMICOLON """ - assert(p[2] == "includes") + assert p[2] == "includes" interface = IDLIdentifierPlaceholder(self.getLocation(p, 1), p[1]) mixin = IDLIdentifierPlaceholder(self.getLocation(p, 3), p[3]) p[0] = IDLIncludesStatement(self.getLocation(p, 1), interface, mixin) def p_Const(self, p): """ - Const : CONST ConstType IDENTIFIER EQUALS ConstValue SEMICOLON + Const : CONST ConstType IDENTIFIER EQUALS ConstValue SEMICOLON """ location = self.getLocation(p, 1) type = p[2] @@ -6514,7 +7734,7 @@ class Parser(Tokenizer): def p_ConstValueBoolean(self, p): """ - ConstValue : BooleanLiteral + ConstValue : BooleanLiteral """ location = self.getLocation(p, 1) booleanType = BuiltinTypes[IDLBuiltinType.Types.boolean] @@ -6522,7 +7742,7 @@ class Parser(Tokenizer): def p_ConstValueInteger(self, p): """ - ConstValue : INTEGER + ConstValue : INTEGER """ location = self.getLocation(p, 1) @@ -6536,14 +7756,16 @@ class Parser(Tokenizer): def p_ConstValueFloat(self, p): """ - ConstValue : FLOATLITERAL + ConstValue : FLOATLITERAL """ location = self.getLocation(p, 1) - p[0] = IDLValue(location, BuiltinTypes[IDLBuiltinType.Types.unrestricted_float], p[1]) + p[0] = IDLValue( + location, BuiltinTypes[IDLBuiltinType.Types.unrestricted_float], p[1] + ) def p_ConstValueString(self, p): """ - ConstValue : STRING + ConstValue : STRING """ location = self.getLocation(p, 1) stringType = BuiltinTypes[IDLBuiltinType.Types.domstring] @@ -6551,35 +7773,37 @@ class Parser(Tokenizer): def p_BooleanLiteralTrue(self, p): """ - BooleanLiteral : TRUE + BooleanLiteral : TRUE """ p[0] = True def p_BooleanLiteralFalse(self, p): """ - BooleanLiteral : FALSE + BooleanLiteral : FALSE """ p[0] = False def p_AttributeOrOperationOrMaplikeOrSetlikeOrIterable(self, p): """ - AttributeOrOperationOrMaplikeOrSetlikeOrIterable : Attribute - | Maplike - | Setlike - | Iterable - | Operation + AttributeOrOperationOrMaplikeOrSetlikeOrIterable : Attribute + | Maplike + | Setlike + | Iterable + | AsyncIterable + | Operation """ p[0] = p[1] def p_Iterable(self, p): """ - Iterable : ITERABLE LT TypeWithExtendedAttributes GT SEMICOLON - | ITERABLE LT TypeWithExtendedAttributes COMMA TypeWithExtendedAttributes GT SEMICOLON + Iterable : ITERABLE LT TypeWithExtendedAttributes GT SEMICOLON + | ITERABLE LT TypeWithExtendedAttributes COMMA TypeWithExtendedAttributes GT SEMICOLON """ location = self.getLocation(p, 2) - identifier = IDLUnresolvedIdentifier(location, "__iterable", - allowDoubleUnderscore=True) - if (len(p) > 6): + identifier = IDLUnresolvedIdentifier( + location, "__iterable", allowDoubleUnderscore=True + ) + if len(p) > 6: keyType = p[3] valueType = p[5] else: @@ -6588,61 +7812,98 @@ class Parser(Tokenizer): p[0] = IDLIterable(location, identifier, keyType, valueType, self.globalScope()) + def p_AsyncIterable(self, p): + """ + AsyncIterable : ASYNC ITERABLE LT TypeWithExtendedAttributes GT SEMICOLON + | ASYNC ITERABLE LT TypeWithExtendedAttributes COMMA TypeWithExtendedAttributes GT SEMICOLON + | ASYNC ITERABLE LT TypeWithExtendedAttributes GT LPAREN ArgumentList RPAREN SEMICOLON + | ASYNC ITERABLE LT TypeWithExtendedAttributes COMMA TypeWithExtendedAttributes GT LPAREN ArgumentList RPAREN SEMICOLON + """ + location = self.getLocation(p, 2) + identifier = IDLUnresolvedIdentifier( + location, "__iterable", allowDoubleUnderscore=True + ) + if len(p) == 12: + keyType = p[4] + valueType = p[6] + argList = p[9] + elif len(p) == 10: + keyType = None + valueType = p[4] + argList = p[7] + elif len(p) == 9: + keyType = p[4] + valueType = p[6] + argList = [] + else: + keyType = None + valueType = p[4] + argList = [] + + p[0] = IDLAsyncIterable( + location, identifier, keyType, valueType, argList, self.globalScope() + ) + def p_Setlike(self, p): """ - Setlike : ReadOnly SETLIKE LT TypeWithExtendedAttributes GT SEMICOLON + Setlike : ReadOnly SETLIKE LT TypeWithExtendedAttributes GT SEMICOLON """ readonly = p[1] maplikeOrSetlikeType = p[2] location = self.getLocation(p, 2) - identifier = IDLUnresolvedIdentifier(location, "__setlike", - allowDoubleUnderscore=True) + identifier = IDLUnresolvedIdentifier( + location, "__setlike", allowDoubleUnderscore=True + ) keyType = p[4] valueType = keyType - p[0] = IDLMaplikeOrSetlike(location, identifier, maplikeOrSetlikeType, - readonly, keyType, valueType) + p[0] = IDLMaplikeOrSetlike( + location, identifier, maplikeOrSetlikeType, readonly, keyType, valueType + ) def p_Maplike(self, p): """ - Maplike : ReadOnly MAPLIKE LT TypeWithExtendedAttributes COMMA TypeWithExtendedAttributes GT SEMICOLON + Maplike : ReadOnly MAPLIKE LT TypeWithExtendedAttributes COMMA TypeWithExtendedAttributes GT SEMICOLON """ readonly = p[1] maplikeOrSetlikeType = p[2] location = self.getLocation(p, 2) - identifier = IDLUnresolvedIdentifier(location, "__maplike", - allowDoubleUnderscore=True) + identifier = IDLUnresolvedIdentifier( + location, "__maplike", allowDoubleUnderscore=True + ) keyType = p[4] valueType = p[6] - p[0] = IDLMaplikeOrSetlike(location, identifier, maplikeOrSetlikeType, - readonly, keyType, valueType) + p[0] = IDLMaplikeOrSetlike( + location, identifier, maplikeOrSetlikeType, readonly, keyType, valueType + ) def p_AttributeWithQualifier(self, p): """ - Attribute : Qualifier AttributeRest + Attribute : Qualifier AttributeRest """ static = IDLInterfaceMember.Special.Static in p[1] stringifier = IDLInterfaceMember.Special.Stringifier in p[1] (location, identifier, type, readonly) = p[2] - p[0] = IDLAttribute(location, identifier, type, readonly, - static=static, stringifier=stringifier) + p[0] = IDLAttribute( + location, identifier, type, readonly, static=static, stringifier=stringifier + ) def p_AttributeInherited(self, p): """ - Attribute : INHERIT AttributeRest + Attribute : INHERIT AttributeRest """ (location, identifier, type, readonly) = p[2] p[0] = IDLAttribute(location, identifier, type, readonly, inherit=True) def p_Attribute(self, p): """ - Attribute : AttributeRest + Attribute : AttributeRest """ (location, identifier, type, readonly) = p[1] p[0] = IDLAttribute(location, identifier, type, readonly, inherit=False) def p_AttributeRest(self, p): """ - AttributeRest : ReadOnly ATTRIBUTE TypeWithExtendedAttributes AttributeName SEMICOLON + AttributeRest : ReadOnly ATTRIBUTE TypeWithExtendedAttributes AttributeName SEMICOLON """ location = self.getLocation(p, 2) readonly = p[1] @@ -6652,26 +7913,27 @@ class Parser(Tokenizer): def p_ReadOnly(self, p): """ - ReadOnly : READONLY + ReadOnly : READONLY """ p[0] = True def p_ReadOnlyEmpty(self, p): """ - ReadOnly : + ReadOnly : """ p[0] = False def p_Operation(self, p): """ - Operation : Qualifiers OperationRest + Operation : Qualifiers OperationRest """ qualifiers = p[1] # Disallow duplicates in the qualifier set if not len(set(qualifiers)) == len(qualifiers): - raise WebIDLError("Duplicate qualifiers are not allowed", - [self.getLocation(p, 1)]) + raise WebIDLError( + "Duplicate qualifiers are not allowed", [self.getLocation(p, 1)] + ) static = IDLInterfaceMember.Special.Static in p[1] # If static is there that's all that's allowed. This is disallowed @@ -6690,8 +7952,10 @@ class Parser(Tokenizer): if getter or deleter: if setter: - raise WebIDLError("getter and deleter are incompatible with setter", - [self.getLocation(p, 1)]) + raise WebIDLError( + "getter and deleter are incompatible with setter", + [self.getLocation(p, 1)], + ) (returnType, identifier, arguments) = p[2] @@ -6701,234 +7965,285 @@ class Parser(Tokenizer): if getter or deleter: if len(arguments) != 1: - raise WebIDLError("%s has wrong number of arguments" % - ("getter" if getter else "deleter"), - [self.getLocation(p, 2)]) + raise WebIDLError( + "%s has wrong number of arguments" + % ("getter" if getter else "deleter"), + [self.getLocation(p, 2)], + ) argType = arguments[0].type if argType == BuiltinTypes[IDLBuiltinType.Types.domstring]: specialType = IDLMethod.NamedOrIndexed.Named elif argType == BuiltinTypes[IDLBuiltinType.Types.unsigned_long]: specialType = IDLMethod.NamedOrIndexed.Indexed if deleter: - raise WebIDLError("There is no such thing as an indexed deleter.", - [self.getLocation(p, 1)]) + raise WebIDLError( + "There is no such thing as an indexed deleter.", + [self.getLocation(p, 1)], + ) else: - raise WebIDLError("%s has wrong argument type (must be DOMString or UnsignedLong)" % - ("getter" if getter else "deleter"), - [arguments[0].location]) + raise WebIDLError( + "%s has wrong argument type (must be DOMString or UnsignedLong)" + % ("getter" if getter else "deleter"), + [arguments[0].location], + ) if arguments[0].optional or arguments[0].variadic: - raise WebIDLError("%s cannot have %s argument" % - ("getter" if getter else "deleter", - "optional" if arguments[0].optional else "variadic"), - [arguments[0].location]) + raise WebIDLError( + "%s cannot have %s argument" + % ( + "getter" if getter else "deleter", + "optional" if arguments[0].optional else "variadic", + ), + [arguments[0].location], + ) if getter: if returnType.isUndefined(): - raise WebIDLError("getter cannot have undefined return type", - [self.getLocation(p, 2)]) + raise WebIDLError( + "getter cannot have undefined return type", [self.getLocation(p, 2)] + ) if setter: if len(arguments) != 2: - raise WebIDLError("setter has wrong number of arguments", - [self.getLocation(p, 2)]) + raise WebIDLError( + "setter has wrong number of arguments", [self.getLocation(p, 2)] + ) argType = arguments[0].type if argType == BuiltinTypes[IDLBuiltinType.Types.domstring]: specialType = IDLMethod.NamedOrIndexed.Named elif argType == BuiltinTypes[IDLBuiltinType.Types.unsigned_long]: specialType = IDLMethod.NamedOrIndexed.Indexed else: - raise WebIDLError("settter has wrong argument type (must be DOMString or UnsignedLong)", - [arguments[0].location]) + raise WebIDLError( + "settter has wrong argument type (must be DOMString or UnsignedLong)", + [arguments[0].location], + ) if arguments[0].optional or arguments[0].variadic: - raise WebIDLError("setter cannot have %s argument" % - ("optional" if arguments[0].optional else "variadic"), - [arguments[0].location]) + raise WebIDLError( + "setter cannot have %s argument" + % ("optional" if arguments[0].optional else "variadic"), + [arguments[0].location], + ) if arguments[1].optional or arguments[1].variadic: - raise WebIDLError("setter cannot have %s argument" % - ("optional" if arguments[1].optional else "variadic"), - [arguments[1].location]) + raise WebIDLError( + "setter cannot have %s argument" + % ("optional" if arguments[1].optional else "variadic"), + [arguments[1].location], + ) if stringifier: if len(arguments) != 0: - raise WebIDLError("stringifier has wrong number of arguments", - [self.getLocation(p, 2)]) + raise WebIDLError( + "stringifier has wrong number of arguments", + [self.getLocation(p, 2)], + ) if not returnType.isDOMString(): - raise WebIDLError("stringifier must have DOMString return type", - [self.getLocation(p, 2)]) + raise WebIDLError( + "stringifier must have DOMString return type", + [self.getLocation(p, 2)], + ) # identifier might be None. This is only permitted for special methods. if not identifier: - if (not getter and not setter and - not deleter and not legacycaller and not stringifier): - raise WebIDLError("Identifier required for non-special methods", - [self.getLocation(p, 2)]) + if ( + not getter + and not setter + and not deleter + and not legacycaller + and not stringifier + ): + raise WebIDLError( + "Identifier required for non-special methods", + [self.getLocation(p, 2)], + ) location = BuiltinLocation("") identifier = IDLUnresolvedIdentifier( location, - "__%s%s%s%s%s%s" % - ("named" if specialType == IDLMethod.NamedOrIndexed.Named else - "indexed" if specialType == IDLMethod.NamedOrIndexed.Indexed else "", - "getter" if getter else "", - "setter" if setter else "", - "deleter" if deleter else "", - "legacycaller" if legacycaller else "", - "stringifier" if stringifier else ""), - allowDoubleUnderscore=True) + "__%s%s%s%s%s%s" + % ( + "named" + if specialType == IDLMethod.NamedOrIndexed.Named + else "indexed" + if specialType == IDLMethod.NamedOrIndexed.Indexed + else "", + "getter" if getter else "", + "setter" if setter else "", + "deleter" if deleter else "", + "legacycaller" if legacycaller else "", + "stringifier" if stringifier else "", + ), + allowDoubleUnderscore=True, + ) - method = IDLMethod(self.getLocation(p, 2), identifier, returnType, arguments, - static=static, getter=getter, setter=setter, - deleter=deleter, specialType=specialType, - legacycaller=legacycaller, stringifier=stringifier) + method = IDLMethod( + self.getLocation(p, 2), + identifier, + returnType, + arguments, + static=static, + getter=getter, + setter=setter, + deleter=deleter, + specialType=specialType, + legacycaller=legacycaller, + stringifier=stringifier, + ) p[0] = method def p_Stringifier(self, p): """ - Operation : STRINGIFIER SEMICOLON + Operation : STRINGIFIER SEMICOLON """ - identifier = IDLUnresolvedIdentifier(BuiltinLocation(""), - "__stringifier", - allowDoubleUnderscore=True) - method = IDLMethod(self.getLocation(p, 1), - identifier, - returnType=BuiltinTypes[IDLBuiltinType.Types.domstring], - arguments=[], - stringifier=True) + identifier = IDLUnresolvedIdentifier( + BuiltinLocation(""), + "__stringifier", + allowDoubleUnderscore=True, + ) + method = IDLMethod( + self.getLocation(p, 1), + identifier, + returnType=BuiltinTypes[IDLBuiltinType.Types.domstring], + arguments=[], + stringifier=True, + ) p[0] = method def p_QualifierStatic(self, p): """ - Qualifier : STATIC + Qualifier : STATIC """ p[0] = [IDLInterfaceMember.Special.Static] def p_QualifierStringifier(self, p): """ - Qualifier : STRINGIFIER + Qualifier : STRINGIFIER """ p[0] = [IDLInterfaceMember.Special.Stringifier] def p_Qualifiers(self, p): """ - Qualifiers : Qualifier - | Specials + Qualifiers : Qualifier + | Specials """ p[0] = p[1] def p_Specials(self, p): """ - Specials : Special Specials + Specials : Special Specials """ p[0] = [p[1]] p[0].extend(p[2]) def p_SpecialsEmpty(self, p): """ - Specials : + Specials : """ p[0] = [] def p_SpecialGetter(self, p): """ - Special : GETTER + Special : GETTER """ p[0] = IDLMethod.Special.Getter def p_SpecialSetter(self, p): """ - Special : SETTER + Special : SETTER """ p[0] = IDLMethod.Special.Setter def p_SpecialDeleter(self, p): """ - Special : DELETER + Special : DELETER """ p[0] = IDLMethod.Special.Deleter def p_SpecialLegacyCaller(self, p): """ - Special : LEGACYCALLER + Special : LEGACYCALLER """ p[0] = IDLMethod.Special.LegacyCaller def p_OperationRest(self, p): """ - OperationRest : ReturnType OptionalIdentifier LPAREN ArgumentList RPAREN SEMICOLON + OperationRest : Type OptionalIdentifier LPAREN ArgumentList RPAREN SEMICOLON """ p[0] = (p[1], p[2], p[4]) def p_OptionalIdentifier(self, p): """ - OptionalIdentifier : IDENTIFIER + OptionalIdentifier : IDENTIFIER """ p[0] = IDLUnresolvedIdentifier(self.getLocation(p, 1), p[1]) def p_OptionalIdentifierEmpty(self, p): """ - OptionalIdentifier : + OptionalIdentifier : """ pass def p_ArgumentList(self, p): """ - ArgumentList : Argument Arguments + ArgumentList : Argument Arguments """ p[0] = [p[1]] if p[1] else [] p[0].extend(p[2]) def p_ArgumentListEmpty(self, p): """ - ArgumentList : + ArgumentList : """ p[0] = [] def p_Arguments(self, p): """ - Arguments : COMMA Argument Arguments + Arguments : COMMA Argument Arguments """ p[0] = [p[2]] if p[2] else [] p[0].extend(p[3]) def p_ArgumentsEmpty(self, p): """ - Arguments : + Arguments : """ p[0] = [] def p_Argument(self, p): """ - Argument : ExtendedAttributeList ArgumentRest + Argument : ExtendedAttributeList ArgumentRest """ p[0] = p[2] p[0].addExtendedAttributes(p[1]) def p_ArgumentRestOptional(self, p): """ - ArgumentRest : OPTIONAL TypeWithExtendedAttributes ArgumentName Default + ArgumentRest : OPTIONAL TypeWithExtendedAttributes ArgumentName Default """ t = p[2] assert isinstance(t, IDLType) # Arg names can be reserved identifiers - identifier = IDLUnresolvedIdentifier(self.getLocation(p, 3), p[3], - allowForbidden=True) + identifier = IDLUnresolvedIdentifier( + self.getLocation(p, 3), p[3], allowForbidden=True + ) defaultValue = p[4] - # We can't test t.isAny() here and give it a default value as needed, # since at this point t is not a fully resolved type yet (e.g. it might # be a typedef). We'll handle the 'any' case in IDLArgument.complete. - p[0] = IDLArgument(self.getLocation(p, 3), identifier, t, True, defaultValue, False) + p[0] = IDLArgument( + self.getLocation(p, 3), identifier, t, True, defaultValue, False + ) def p_ArgumentRest(self, p): """ - ArgumentRest : Type Ellipsis ArgumentName + ArgumentRest : Type Ellipsis ArgumentName """ t = p[1] assert isinstance(t, IDLType) # Arg names can be reserved identifiers - identifier = IDLUnresolvedIdentifier(self.getLocation(p, 3), p[3], - allowForbidden=True) + identifier = IDLUnresolvedIdentifier( + self.getLocation(p, 3), p[3], allowForbidden=True + ) variadic = p[2] @@ -6940,90 +8255,98 @@ class Parser(Tokenizer): # Any attributes that precede this may apply to the type, so # we configure the argument to forward type attributes down instead of producing # a parse error - p[0] = IDLArgument(self.getLocation(p, 3), identifier, t, variadic, None, variadic, allowTypeAttributes=True) + p[0] = IDLArgument( + self.getLocation(p, 3), + identifier, + t, + variadic, + None, + variadic, + allowTypeAttributes=True, + ) def p_ArgumentName(self, p): """ - ArgumentName : IDENTIFIER - | ArgumentNameKeyword + ArgumentName : IDENTIFIER + | ArgumentNameKeyword """ p[0] = p[1] def p_ArgumentNameKeyword(self, p): """ - ArgumentNameKeyword : ASYNC - | ATTRIBUTE - | CALLBACK - | CONST - | CONSTRUCTOR - | DELETER - | DICTIONARY - | ENUM - | EXCEPTION - | GETTER - | INCLUDES - | INHERIT - | INTERFACE - | ITERABLE - | LEGACYCALLER - | MAPLIKE - | MIXIN - | NAMESPACE - | PARTIAL - | READONLY - | REQUIRED - | SERIALIZER - | SETLIKE - | SETTER - | STATIC - | STRINGIFIER - | TYPEDEF - | UNRESTRICTED + ArgumentNameKeyword : ASYNC + | ATTRIBUTE + | CALLBACK + | CONST + | CONSTRUCTOR + | DELETER + | DICTIONARY + | ENUM + | EXCEPTION + | GETTER + | INCLUDES + | INHERIT + | INTERFACE + | ITERABLE + | LEGACYCALLER + | MAPLIKE + | MIXIN + | NAMESPACE + | PARTIAL + | READONLY + | REQUIRED + | SERIALIZER + | SETLIKE + | SETTER + | STATIC + | STRINGIFIER + | TYPEDEF + | UNRESTRICTED """ p[0] = p[1] def p_AttributeName(self, p): """ - AttributeName : IDENTIFIER - | AttributeNameKeyword + AttributeName : IDENTIFIER + | AttributeNameKeyword """ p[0] = p[1] def p_AttributeNameKeyword(self, p): """ - AttributeNameKeyword : ASYNC - | REQUIRED + AttributeNameKeyword : ASYNC + | REQUIRED """ p[0] = p[1] def p_Ellipsis(self, p): """ - Ellipsis : ELLIPSIS + Ellipsis : ELLIPSIS """ p[0] = True def p_EllipsisEmpty(self, p): """ - Ellipsis : + Ellipsis : """ p[0] = False def p_ExceptionMember(self, p): """ - ExceptionMember : Const - | ExceptionField + ExceptionMember : Const + | ExceptionField """ pass def p_ExceptionField(self, p): """ - ExceptionField : Type IDENTIFIER SEMICOLON + ExceptionField : Type IDENTIFIER SEMICOLON """ pass def p_ExtendedAttributeList(self, p): """ - ExtendedAttributeList : LBRACKET ExtendedAttribute ExtendedAttributes RBRACKET + ExtendedAttributeList : LBRACKET ExtendedAttribute ExtendedAttributes RBRACKET """ p[0] = [p[2]] if p[3]: @@ -7031,131 +8354,131 @@ class Parser(Tokenizer): def p_ExtendedAttributeListEmpty(self, p): """ - ExtendedAttributeList : + ExtendedAttributeList : """ p[0] = [] def p_ExtendedAttribute(self, p): """ - ExtendedAttribute : ExtendedAttributeNoArgs - | ExtendedAttributeArgList - | ExtendedAttributeIdent - | ExtendedAttributeNamedArgList - | ExtendedAttributeIdentList + ExtendedAttribute : ExtendedAttributeNoArgs + | ExtendedAttributeArgList + | ExtendedAttributeIdent + | ExtendedAttributeWildcard + | ExtendedAttributeNamedArgList + | ExtendedAttributeIdentList """ p[0] = IDLExtendedAttribute(self.getLocation(p, 1), p[1]) def p_ExtendedAttributeEmpty(self, p): """ - ExtendedAttribute : + ExtendedAttribute : """ pass def p_ExtendedAttributes(self, p): """ - ExtendedAttributes : COMMA ExtendedAttribute ExtendedAttributes + ExtendedAttributes : COMMA ExtendedAttribute ExtendedAttributes """ p[0] = [p[2]] if p[2] else [] p[0].extend(p[3]) def p_ExtendedAttributesEmpty(self, p): """ - ExtendedAttributes : + ExtendedAttributes : """ p[0] = [] def p_Other(self, p): """ - Other : INTEGER - | FLOATLITERAL - | IDENTIFIER - | STRING - | OTHER - | ELLIPSIS - | COLON - | SCOPE - | SEMICOLON - | LT - | EQUALS - | GT - | QUESTIONMARK - | DOMSTRING - | BYTESTRING - | USVSTRING - | UTF8STRING - | JSSTRING - | PROMISE - | ANY - | BOOLEAN - | BYTE - | DOUBLE - | FALSE - | FLOAT - | LONG - | NULL - | OBJECT - | OCTET - | OR - | OPTIONAL - | RECORD - | SEQUENCE - | SHORT - | SYMBOL - | TRUE - | UNSIGNED - | UNDEFINED - | ArgumentNameKeyword + Other : INTEGER + | FLOATLITERAL + | IDENTIFIER + | STRING + | OTHER + | ELLIPSIS + | COLON + | SCOPE + | SEMICOLON + | LT + | EQUALS + | GT + | QUESTIONMARK + | ASTERISK + | DOMSTRING + | BYTESTRING + | USVSTRING + | UTF8STRING + | JSSTRING + | PROMISE + | ANY + | BOOLEAN + | BYTE + | DOUBLE + | FALSE + | FLOAT + | LONG + | NULL + | OBJECT + | OCTET + | OR + | OPTIONAL + | RECORD + | SEQUENCE + | SHORT + | SYMBOL + | TRUE + | UNSIGNED + | UNDEFINED + | ArgumentNameKeyword """ pass def p_OtherOrComma(self, p): """ - OtherOrComma : Other - | COMMA + OtherOrComma : Other + | COMMA """ pass def p_TypeSingleType(self, p): """ - Type : SingleType + Type : SingleType """ p[0] = p[1] def p_TypeUnionType(self, p): """ - Type : UnionType Null + Type : UnionType Null """ p[0] = self.handleNullable(p[1], p[2]) def p_TypeWithExtendedAttributes(self, p): """ - TypeWithExtendedAttributes : ExtendedAttributeList Type + TypeWithExtendedAttributes : ExtendedAttributeList Type """ p[0] = p[2].withExtendedAttributes(p[1]) def p_SingleTypeDistinguishableType(self, p): """ - SingleType : DistinguishableType + SingleType : DistinguishableType """ p[0] = p[1] def p_SingleTypeAnyType(self, p): """ - SingleType : ANY + SingleType : ANY """ p[0] = BuiltinTypes[IDLBuiltinType.Types.any] - # Note: Promise is allowed, so we want to parametrize on ReturnType, - # not Type. Promise types can't be null, hence no "Null" in there. def p_SingleTypePromiseType(self, p): """ - SingleType : PROMISE LT ReturnType GT + SingleType : PROMISE LT Type GT """ p[0] = IDLPromiseType(self.getLocation(p, 1), p[3]) def p_UnionType(self, p): """ - UnionType : LPAREN UnionMemberType OR UnionMemberType UnionMemberTypes RPAREN + UnionType : LPAREN UnionMemberType OR UnionMemberType UnionMemberTypes RPAREN """ types = [p[2], p[4]] types.extend(p[5]) @@ -7163,35 +8486,36 @@ class Parser(Tokenizer): def p_UnionMemberTypeDistinguishableType(self, p): """ - UnionMemberType : ExtendedAttributeList DistinguishableType + UnionMemberType : ExtendedAttributeList DistinguishableType """ p[0] = p[2].withExtendedAttributes(p[1]) def p_UnionMemberType(self, p): """ - UnionMemberType : UnionType Null + UnionMemberType : UnionType Null """ p[0] = self.handleNullable(p[1], p[2]) def p_UnionMemberTypes(self, p): """ - UnionMemberTypes : OR UnionMemberType UnionMemberTypes + UnionMemberTypes : OR UnionMemberType UnionMemberTypes """ p[0] = [p[2]] p[0].extend(p[3]) def p_UnionMemberTypesEmpty(self, p): """ - UnionMemberTypes : + UnionMemberTypes : """ p[0] = [] def p_DistinguishableType(self, p): """ - DistinguishableType : PrimitiveType Null - | ARRAYBUFFER Null - | READABLESTREAM Null - | OBJECT Null + DistinguishableType : PrimitiveType Null + | ARRAYBUFFER Null + | READABLESTREAM Null + | OBJECT Null + | UNDEFINED Null """ if p[1] == "object": type = BuiltinTypes[IDLBuiltinType.Types.object] @@ -7199,6 +8523,8 @@ class Parser(Tokenizer): type = BuiltinTypes[IDLBuiltinType.Types.ArrayBuffer] elif p[1] == "ReadableStream": type = BuiltinTypes[IDLBuiltinType.Types.ReadableStream] + elif p[1] == "undefined": + type = BuiltinTypes[IDLBuiltinType.Types.undefined] else: type = BuiltinTypes[p[1]] @@ -7206,13 +8532,13 @@ class Parser(Tokenizer): def p_DistinguishableTypeStringType(self, p): """ - DistinguishableType : StringType Null + DistinguishableType : StringType Null """ p[0] = self.handleNullable(p[1], p[2]) def p_DistinguishableTypeSequenceType(self, p): """ - DistinguishableType : SEQUENCE LT TypeWithExtendedAttributes GT Null + DistinguishableType : SEQUENCE LT TypeWithExtendedAttributes GT Null """ innerType = p[3] type = IDLSequenceType(self.getLocation(p, 1), innerType) @@ -7220,23 +8546,32 @@ class Parser(Tokenizer): def p_DistinguishableTypeRecordType(self, p): """ - DistinguishableType : RECORD LT StringType COMMA TypeWithExtendedAttributes GT Null + DistinguishableType : RECORD LT StringType COMMA TypeWithExtendedAttributes GT Null """ keyType = p[3] valueType = p[5] type = IDLRecordType(self.getLocation(p, 1), keyType, valueType) p[0] = self.handleNullable(type, p[7]) + def p_DistinguishableTypeObservableArrayType(self, p): + """ + DistinguishableType : OBSERVABLEARRAY LT TypeWithExtendedAttributes GT Null + """ + innerType = p[3] + type = IDLObservableArrayType(self.getLocation(p, 1), innerType) + p[0] = self.handleNullable(type, p[5]) + def p_DistinguishableTypeScopedName(self, p): """ - DistinguishableType : ScopedName Null + DistinguishableType : ScopedName Null """ assert isinstance(p[1], IDLUnresolvedIdentifier) if p[1].name == "Promise": - raise WebIDLError("Promise used without saying what it's " - "parametrized over", - [self.getLocation(p, 1)]) + raise WebIDLError( + "Promise used without saying what it's " "parametrized over", + [self.getLocation(p, 1)], + ) type = None @@ -7245,8 +8580,9 @@ class Parser(Tokenizer): obj = self.globalScope()._lookupIdentifier(p[1]) assert not obj.isType() if obj.isTypedef(): - type = IDLTypedefType(self.getLocation(p, 1), obj.innerType, - obj.identifier.name) + type = IDLTypedefType( + self.getLocation(p, 1), obj.innerType, obj.identifier.name + ) elif obj.isCallback() and not obj.isInterface(): type = IDLCallbackType(obj.location, obj) else: @@ -7261,13 +8597,13 @@ class Parser(Tokenizer): def p_ConstType(self, p): """ - ConstType : PrimitiveType + ConstType : PrimitiveType """ p[0] = BuiltinTypes[p[1]] def p_ConstTypeIdentifier(self, p): """ - ConstType : IDENTIFIER + ConstType : IDENTIFIER """ identifier = IDLUnresolvedIdentifier(self.getLocation(p, 1), p[1]) @@ -7275,110 +8611,110 @@ class Parser(Tokenizer): def p_PrimitiveTypeUint(self, p): """ - PrimitiveType : UnsignedIntegerType + PrimitiveType : UnsignedIntegerType """ p[0] = p[1] def p_PrimitiveTypeBoolean(self, p): """ - PrimitiveType : BOOLEAN + PrimitiveType : BOOLEAN """ p[0] = IDLBuiltinType.Types.boolean def p_PrimitiveTypeByte(self, p): """ - PrimitiveType : BYTE + PrimitiveType : BYTE """ p[0] = IDLBuiltinType.Types.byte def p_PrimitiveTypeOctet(self, p): """ - PrimitiveType : OCTET + PrimitiveType : OCTET """ p[0] = IDLBuiltinType.Types.octet def p_PrimitiveTypeFloat(self, p): """ - PrimitiveType : FLOAT + PrimitiveType : FLOAT """ p[0] = IDLBuiltinType.Types.float def p_PrimitiveTypeUnrestictedFloat(self, p): """ - PrimitiveType : UNRESTRICTED FLOAT + PrimitiveType : UNRESTRICTED FLOAT """ p[0] = IDLBuiltinType.Types.unrestricted_float def p_PrimitiveTypeDouble(self, p): """ - PrimitiveType : DOUBLE + PrimitiveType : DOUBLE """ p[0] = IDLBuiltinType.Types.double def p_PrimitiveTypeUnrestictedDouble(self, p): """ - PrimitiveType : UNRESTRICTED DOUBLE + PrimitiveType : UNRESTRICTED DOUBLE """ p[0] = IDLBuiltinType.Types.unrestricted_double def p_StringType(self, p): """ - StringType : BuiltinStringType + StringType : BuiltinStringType """ p[0] = BuiltinTypes[p[1]] def p_BuiltinStringTypeDOMString(self, p): """ - BuiltinStringType : DOMSTRING + BuiltinStringType : DOMSTRING """ p[0] = IDLBuiltinType.Types.domstring def p_BuiltinStringTypeBytestring(self, p): """ - BuiltinStringType : BYTESTRING + BuiltinStringType : BYTESTRING """ p[0] = IDLBuiltinType.Types.bytestring def p_BuiltinStringTypeUSVString(self, p): """ - BuiltinStringType : USVSTRING + BuiltinStringType : USVSTRING """ p[0] = IDLBuiltinType.Types.usvstring def p_BuiltinStringTypeUTF8String(self, p): """ - BuiltinStringType : UTF8STRING + BuiltinStringType : UTF8STRING """ p[0] = IDLBuiltinType.Types.utf8string def p_BuiltinStringTypeJSString(self, p): """ - BuiltinStringType : JSSTRING + BuiltinStringType : JSSTRING """ p[0] = IDLBuiltinType.Types.jsstring def p_UnsignedIntegerTypeUnsigned(self, p): """ - UnsignedIntegerType : UNSIGNED IntegerType + UnsignedIntegerType : UNSIGNED IntegerType """ # Adding one to a given signed integer type gets you the unsigned type: p[0] = p[2] + 1 def p_UnsignedIntegerType(self, p): """ - UnsignedIntegerType : IntegerType + UnsignedIntegerType : IntegerType """ p[0] = p[1] def p_IntegerTypeShort(self, p): """ - IntegerType : SHORT + IntegerType : SHORT """ p[0] = IDLBuiltinType.Types.short def p_IntegerTypeLong(self, p): """ - IntegerType : LONG OptionalLong + IntegerType : LONG OptionalLong """ if p[2]: p[0] = IDLBuiltinType.Types.long_long @@ -7387,55 +8723,43 @@ class Parser(Tokenizer): def p_OptionalLong(self, p): """ - OptionalLong : LONG + OptionalLong : LONG """ p[0] = True def p_OptionalLongEmpty(self, p): """ - OptionalLong : + OptionalLong : """ p[0] = False def p_Null(self, p): """ - Null : QUESTIONMARK - | + Null : QUESTIONMARK + | """ if len(p) > 1: p[0] = self.getLocation(p, 1) else: p[0] = None - def p_ReturnTypeType(self, p): - """ - ReturnType : Type - """ - p[0] = p[1] - - def p_ReturnTypeUndefined(self, p): - """ - ReturnType : UNDEFINED - """ - p[0] = BuiltinTypes[IDLBuiltinType.Types.undefined] - def p_ScopedName(self, p): """ - ScopedName : AbsoluteScopedName - | RelativeScopedName + ScopedName : AbsoluteScopedName + | RelativeScopedName """ p[0] = p[1] def p_AbsoluteScopedName(self, p): """ - AbsoluteScopedName : SCOPE IDENTIFIER ScopedNameParts + AbsoluteScopedName : SCOPE IDENTIFIER ScopedNameParts """ assert False pass def p_RelativeScopedName(self, p): """ - RelativeScopedName : IDENTIFIER ScopedNameParts + RelativeScopedName : IDENTIFIER ScopedNameParts """ assert not p[2] # Not implemented! @@ -7443,100 +8767,122 @@ class Parser(Tokenizer): def p_ScopedNameParts(self, p): """ - ScopedNameParts : SCOPE IDENTIFIER ScopedNameParts + ScopedNameParts : SCOPE IDENTIFIER ScopedNameParts """ assert False pass def p_ScopedNamePartsEmpty(self, p): """ - ScopedNameParts : + ScopedNameParts : """ p[0] = None def p_ExtendedAttributeNoArgs(self, p): """ - ExtendedAttributeNoArgs : IDENTIFIER + ExtendedAttributeNoArgs : IDENTIFIER """ p[0] = (p[1],) def p_ExtendedAttributeArgList(self, p): """ - ExtendedAttributeArgList : IDENTIFIER LPAREN ArgumentList RPAREN + ExtendedAttributeArgList : IDENTIFIER LPAREN ArgumentList RPAREN """ p[0] = (p[1], p[3]) def p_ExtendedAttributeIdent(self, p): """ - ExtendedAttributeIdent : IDENTIFIER EQUALS STRING - | IDENTIFIER EQUALS IDENTIFIER + ExtendedAttributeIdent : IDENTIFIER EQUALS STRING + | IDENTIFIER EQUALS IDENTIFIER + """ + p[0] = (p[1], p[3]) + + def p_ExtendedAttributeWildcard(self, p): + """ + ExtendedAttributeWildcard : IDENTIFIER EQUALS ASTERISK """ p[0] = (p[1], p[3]) def p_ExtendedAttributeNamedArgList(self, p): """ - ExtendedAttributeNamedArgList : IDENTIFIER EQUALS IDENTIFIER LPAREN ArgumentList RPAREN + ExtendedAttributeNamedArgList : IDENTIFIER EQUALS IDENTIFIER LPAREN ArgumentList RPAREN """ p[0] = (p[1], p[3], p[5]) def p_ExtendedAttributeIdentList(self, p): """ - ExtendedAttributeIdentList : IDENTIFIER EQUALS LPAREN IdentifierList RPAREN + ExtendedAttributeIdentList : IDENTIFIER EQUALS LPAREN IdentifierList RPAREN """ p[0] = (p[1], p[4]) def p_IdentifierList(self, p): """ - IdentifierList : IDENTIFIER Identifiers + IdentifierList : IDENTIFIER Identifiers """ idents = list(p[2]) # This is only used for identifier-list-valued extended attributes, and if # we're going to restrict to IDENTIFIER here we should at least allow # escaping with leading '_' as usual for identifiers. ident = p[1] - if ident[0] == '_': + if ident[0] == "_": ident = ident[1:] idents.insert(0, ident) p[0] = idents def p_IdentifiersList(self, p): """ - Identifiers : COMMA IDENTIFIER Identifiers + Identifiers : COMMA IDENTIFIER Identifiers """ idents = list(p[3]) # This is only used for identifier-list-valued extended attributes, and if # we're going to restrict to IDENTIFIER here we should at least allow # escaping with leading '_' as usual for identifiers. ident = p[2] - if ident[0] == '_': + if ident[0] == "_": ident = ident[1:] idents.insert(0, ident) p[0] = idents def p_IdentifiersEmpty(self, p): """ - Identifiers : + Identifiers : """ p[0] = [] def p_error(self, p): if not p: - raise WebIDLError("Syntax Error at end of file. Possibly due to missing semicolon(;), braces(}) or both", - [self._filename]) + raise WebIDLError( + "Syntax Error at end of file. Possibly due to missing semicolon(;), braces(}) or both", + [self._filename], + ) else: - raise WebIDLError("invalid syntax", [Location(self.lexer, p.lineno, p.lexpos, self._filename)]) + raise WebIDLError( + "invalid syntax", + [Location(self.lexer, p.lineno, p.lexpos, self._filename)], + ) - def __init__(self, outputdir='', lexer=None): - Tokenizer.__init__(self, lexer) + def __init__(self, outputdir="", lexer=None, use_builtin_readable_stream=True): + Tokenizer.__init__(self, outputdir, lexer, use_builtin_readable_stream) logger = SqueakyCleanLogger() try: - self.parser = yacc.yacc(module=self, errorlog=logger, debug=False) + self.parser = yacc.yacc( + module=self, + outputdir=outputdir, + errorlog=logger, + debug=False, + write_tables=False, + # Pickling the grammar is a speedup in + # some cases (older Python?) but a + # significant slowdown in others. + # We're not pickling for now, until it + # becomes a speedup again. + # , picklefile='WebIDLGrammar.pkl' + ) finally: logger.reportGrammarErrors() self._globalScope = IDLScope(BuiltinLocation(""), None, None) - self._installBuiltins(self._globalScope) self._productions = [] @@ -7550,12 +8896,16 @@ class Parser(Tokenizer): assert isinstance(scope, IDLScope) # range omits the last value. - for x in range(IDLBuiltinType.Types.ArrayBuffer, IDLBuiltinType.Types.Float64Array + 1): + for x in range( + IDLBuiltinType.Types.ArrayBuffer, IDLBuiltinType.Types.Float64Array + 1 + ): builtin = BuiltinTypes[x] name = builtin.name - typedef = IDLTypedef(BuiltinLocation(""), scope, builtin, name) + typedef = IDLTypedef( + BuiltinLocation(""), scope, builtin, name + ) - @ staticmethod + @staticmethod def handleNullable(type, questionMarkLocation): if questionMarkLocation is not None: type = IDLNullableType(questionMarkLocation, type) @@ -7563,12 +8913,12 @@ class Parser(Tokenizer): return type def parse(self, t, filename=None): - self._filename = filename - self.lexer.input(t.decode(encoding = 'utf-8')) + self.lexer.input(t) # for tok in iter(self.lexer.token, None): # print tok + self._filename = filename self._productions.extend(self.parser.parse(lexer=self.lexer, tracking=True)) self._filename = None @@ -7580,7 +8930,6 @@ class Parser(Tokenizer): if isinstance(p, IDLInterface): interfaceStatements.append(p) - iterableIteratorIface = None for iface in interfaceStatements: iterable = None # We haven't run finish() on the interface yet, so we don't know @@ -7588,26 +8937,77 @@ class Parser(Tokenizer): # means we have to loop through the members to see if we have an # iterable member. for m in iface.members: - if isinstance(m, IDLIterable): + if isinstance(m, (IDLIterable, IDLAsyncIterable)): iterable = m break - if iterable and iterable.isPairIterator(): + if iterable and (iterable.isPairIterator() or iterable.isAsyncIterable()): + def simpleExtendedAttr(str): - return IDLExtendedAttribute(iface.location, (str, )) + return IDLExtendedAttribute(iface.location, (str,)) + + if isinstance(iterable, IDLAsyncIterable): + nextReturnType = IDLPromiseType( + iterable.location, BuiltinTypes[IDLBuiltinType.Types.any] + ) + else: + nextReturnType = BuiltinTypes[IDLBuiltinType.Types.object] nextMethod = IDLMethod( - iface.location, - IDLUnresolvedIdentifier(iface.location, "next"), - BuiltinTypes[IDLBuiltinType.Types.object], []) + iterable.location, + IDLUnresolvedIdentifier(iterable.location, "next"), + nextReturnType, + [], + ) nextMethod.addExtendedAttributes([simpleExtendedAttr("Throws")]) - itr_ident = IDLUnresolvedIdentifier(iface.location, - iface.identifier.name + "Iterator") - toStringTag = iface.identifier.name + " Iterator" - itr_iface = IDLInterface(iface.location, self.globalScope(), - itr_ident, None, [nextMethod], - isKnownNonPartial=True, - classNameOverride=toStringTag, - toStringTag=toStringTag) - itr_iface.addExtendedAttributes([simpleExtendedAttr("NoInterfaceObject")]) + + methods = [nextMethod] + + if iterable.getExtendedAttribute("GenerateReturnMethod"): + assert isinstance(iterable, IDLAsyncIterable) + + returnMethod = IDLMethod( + iterable.location, + IDLUnresolvedIdentifier(iterable.location, "return"), + IDLPromiseType( + iterable.location, BuiltinTypes[IDLBuiltinType.Types.any] + ), + [ + IDLArgument( + iterable.location, + IDLUnresolvedIdentifier( + BuiltinLocation(""), + "value", + ), + BuiltinTypes[IDLBuiltinType.Types.any], + optional=True, + ), + ], + ) + returnMethod.addExtendedAttributes([simpleExtendedAttr("Throws")]) + methods.append(returnMethod) + + if iterable.isIterable(): + itr_suffix = "Iterator" + else: + itr_suffix = "AsyncIterator" + itr_ident = IDLUnresolvedIdentifier( + iface.location, iface.identifier.name + itr_suffix + ) + if iterable.isIterable(): + classNameOverride = iface.identifier.name + " Iterator" + elif iterable.isAsyncIterable(): + classNameOverride = iface.identifier.name + " AsyncIterator" + itr_iface = IDLInterface( + iface.location, + self.globalScope(), + itr_ident, + None, + methods, + isKnownNonPartial=True, + classNameOverride=classNameOverride, + ) + itr_iface.addExtendedAttributes( + [simpleExtendedAttr("LegacyNoInterfaceObject")] + ) # Make sure the exposure set for the iterator interface is the # same as the exposure set for the iterable interface, because # we're going to generate methods on the iterable that return @@ -7616,17 +9016,22 @@ class Parser(Tokenizer): # Always append generated iterable interfaces after the # interface they're a member of, otherwise nativeType generation # won't work correctly. - itr_iface.iterableInterface = iface + if iterable.isIterable(): + itr_iface.iterableInterface = iface + else: + itr_iface.asyncIterableInterface = iface self._productions.append(itr_iface) iterable.iteratorType = IDLWrapperType(iface.location, itr_iface) # Make sure we finish IDLIncludesStatements before we finish the # IDLInterfaces. # XXX khuey hates this bit and wants to nuke it from orbit. - includesStatements = [p for p in self._productions if - isinstance(p, IDLIncludesStatement)] - otherStatements = [p for p in self._productions if - not isinstance(p, IDLIncludesStatement)] + includesStatements = [ + p for p in self._productions if isinstance(p, IDLIncludesStatement) + ] + otherStatements = [ + p for p in self._productions if not isinstance(p, IDLIncludesStatement) + ] for production in includesStatements: production.finish(self.globalScope()) for production in otherStatements: @@ -7650,7 +9055,6 @@ class Parser(Tokenizer): # Builtin IDL defined by WebIDL _builtins = """ - typedef unsigned long long DOMTimeStamp; typedef (ArrayBufferView or ArrayBuffer) BufferSource; """ @@ -7658,12 +9062,21 @@ class Parser(Tokenizer): def main(): # Parse arguments. from optparse import OptionParser + usageString = "usage: %prog [options] files" o = OptionParser(usage=usageString) - o.add_option("--cachedir", dest='cachedir', default=None, - help="Directory in which to cache lex/parse tables.") - o.add_option("--verbose-errors", action='store_true', default=False, - help="When an error happens, display the Python traceback.") + o.add_option( + "--cachedir", + dest="cachedir", + default=None, + help="Directory in which to cache lex/parse tables.", + ) + o.add_option( + "--verbose-errors", + action="store_true", + default=False, + help="When an error happens, display the Python traceback.", + ) (options, args) = o.parse_args() if len(args) < 1: @@ -7677,11 +9090,11 @@ def main(): try: for filename in fileList: fullPath = os.path.normpath(os.path.join(baseDir, filename)) - f = open(fullPath, 'rb') + f = open(fullPath, "rb") lines = f.readlines() f.close() print(fullPath) - parser.parse(''.join(lines), fullPath) + parser.parse("".join(lines), fullPath) parser.finish() except WebIDLError as e: if options.verbose_errors: @@ -7689,5 +9102,6 @@ def main(): else: print(e) -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/components/script/dom/bindings/codegen/parser/abstract.patch b/components/script/dom/bindings/codegen/parser/abstract.patch index 180e345b61b..316ed8ce0a1 100644 --- a/components/script/dom/bindings/codegen/parser/abstract.patch +++ b/components/script/dom/bindings/codegen/parser/abstract.patch @@ -1,12 +1,10 @@ --- WebIDL.py +++ WebIDL.py -@@ -1883,7 +1883,8 @@ class IDLInterface(IDLInterfaceOrNamespace): - identifier == "LegacyUnenumerableNamedProperties" or - identifier == "RunConstructorInCallerCompartment" or - identifier == "WantsEventListenerHooks" or -- identifier == "Serializable"): -+ identifier == "Serializable" or -+ identifier == "Abstract"): +@@ -1987,6 +1987,7 @@ class IDLInterface(IDLInterfaceOrNamespace): + or identifier == "RunConstructorInCallerCompartment" + or identifier == "WantsEventListenerHooks" + or identifier == "Serializable" ++ or identifier == "Abstract" + ): # Known extended attributes that do not take values if not attr.noArguments(): - raise WebIDLError("[%s] must take no arguments" % identifier, diff --git a/components/script/dom/bindings/codegen/parser/debug.patch b/components/script/dom/bindings/codegen/parser/debug.patch index a4f8739000d..ffab062d801 100644 --- a/components/script/dom/bindings/codegen/parser/debug.patch +++ b/components/script/dom/bindings/codegen/parser/debug.patch @@ -1,12 +1,10 @@ --- WebIDL.py +++ WebIDL.py -@@ -7382,7 +7382,8 @@ class Parser(Tokenizer): - self.parser = yacc.yacc(module=self, - outputdir=outputdir, - tabmodule='webidlyacc', -- errorlog=logger -+ errorlog=logger, -+ debug=False - # Pickling the grammar is a speedup in - # some cases (older Python?) but a - # significant slowdown in others. +@@ -8827,6 +8827,7 @@ class Parser(Tokenizer): + module=self, + outputdir=outputdir, + errorlog=logger, ++ debug=False, + write_tables=False, + # Pickling the grammar is a speedup in + # some cases (older Python?) but a diff --git a/components/script/dom/bindings/codegen/parser/inline.patch b/components/script/dom/bindings/codegen/parser/inline.patch index 46971ce5067..ad4d0f8f959 100644 --- a/components/script/dom/bindings/codegen/parser/inline.patch +++ b/components/script/dom/bindings/codegen/parser/inline.patch @@ -1,12 +1,10 @@ --- WebIDL.py +++ WebIDL.py -@@ -1884,7 +1884,8 @@ class IDLInterface(IDLInterfaceOrNamespace): - identifier == "RunConstructorInCallerCompartment" or - identifier == "WantsEventListenerHooks" or - identifier == "Serializable" or -- identifier == "Abstract"): -+ identifier == "Abstract" or -+ identifier == "Inline"): +@@ -1988,6 +1988,7 @@ class IDLInterface(IDLInterfaceOrNamespace): + or identifier == "WantsEventListenerHooks" + or identifier == "Serializable" + or identifier == "Abstract" ++ or identifier == "Inline" + ): # Known extended attributes that do not take values if not attr.noArguments(): - raise WebIDLError("[%s] must take no arguments" % identifier, diff --git a/components/script/dom/bindings/codegen/parser/readable-stream.patch b/components/script/dom/bindings/codegen/parser/readable-stream.patch new file mode 100644 index 00000000000..4b90067696e --- /dev/null +++ b/components/script/dom/bindings/codegen/parser/readable-stream.patch @@ -0,0 +1,162 @@ +--- WebIDL.py ++++ WebIDL.py +@@ -2498,6 +2498,9 @@ class IDLType(IDLObject): + def isRecord(self): + return False + ++ def isReadableStream(self): ++ return False ++ + def isArrayBuffer(self): + return False + +@@ -2526,7 +2529,7 @@ class IDLType(IDLObject): + def isSpiderMonkeyInterface(self): + """Returns a boolean indicating whether this type is an 'interface' + type that is implemented in SpiderMonkey.""" +- return self.isInterface() and self.isBufferSource() ++ return self.isInterface() and (self.isBufferSource() or self.isReadableStream()) + + def isAny(self): + return self.tag() == IDLType.Tags.any +@@ -2743,6 +2746,9 @@ class IDLNullableType(IDLParametrizedType): + def isRecord(self): + return self.inner.isRecord() + ++ def isReadableStream(self): ++ return self.inner.isReadableStream() ++ + def isArrayBuffer(self): + return self.inner.isArrayBuffer() + +@@ -3252,6 +3258,9 @@ class IDLTypedefType(IDLType): + def isRecord(self): + return self.inner.isRecord() + ++ def isReadableStream(self): ++ return self.inner.isReadableStream() ++ + def isDictionary(self): + return self.inner.isDictionary() + +@@ -3597,6 +3606,7 @@ class IDLBuiltinType(IDLType): + "Uint32Array", + "Float32Array", + "Float64Array", ++ "ReadableStream", + ) + + TagLookup = { +@@ -3632,6 +3642,7 @@ class IDLBuiltinType(IDLType): + Types.Uint32Array: IDLType.Tags.interface, + Types.Float32Array: IDLType.Tags.interface, + Types.Float64Array: IDLType.Tags.interface, ++ Types.ReadableStream: IDLType.Tags.interface, + } + + PrettyNames = { +@@ -3667,6 +3678,7 @@ class IDLBuiltinType(IDLType): + Types.Uint32Array: "Uint32Array", + Types.Float32Array: "Float32Array", + Types.Float64Array: "Float64Array", ++ Types.ReadableStream: "ReadableStream", + } + + def __init__( +@@ -3830,11 +3842,19 @@ class IDLBuiltinType(IDLType): + and self._typeTag <= IDLBuiltinType.Types.Float64Array + ) + ++ def isReadableStream(self): ++ return self._typeTag == IDLBuiltinType.Types.ReadableStream ++ + def isInterface(self): + # TypedArray things are interface types per the TypedArray spec, + # but we handle them as builtins because SpiderMonkey implements + # all of it internally. +- return self.isArrayBuffer() or self.isArrayBufferView() or self.isTypedArray() ++ return ( ++ self.isArrayBuffer() ++ or self.isArrayBufferView() ++ or self.isTypedArray() ++ or self.isReadableStream() ++ ) + + def isNonCallbackInterface(self): + # All the interfaces we can be are non-callback +@@ -3928,6 +3948,7 @@ class IDLBuiltinType(IDLType): + # ArrayBuffer is distinguishable from everything + # that's not an ArrayBuffer or a callback interface + (self.isArrayBuffer() and not other.isArrayBuffer()) ++ or (self.isReadableStream() and not other.isReadableStream()) + or + # ArrayBufferView is distinguishable from everything + # that's not an ArrayBufferView or typed array. +@@ -4134,6 +4155,11 @@ BuiltinTypes = { + "Float64Array", + IDLBuiltinType.Types.Float64Array, + ), ++ IDLBuiltinType.Types.ReadableStream: IDLBuiltinType( ++ BuiltinLocation(""), ++ "ReadableStream", ++ IDLBuiltinType.Types.ReadableStream, ++ ), + } + + +@@ -6883,6 +6909,9 @@ class Tokenizer(object): + def t_IDENTIFIER(self, t): + r"[_-]?[A-Za-z][0-9A-Z_a-z-]*" + t.type = self.keywords.get(t.value, "IDENTIFIER") ++ # If Builtin readable streams are disabled, mark ReadableStream as an identifier. ++ if t.type == "READABLESTREAM" and not self._use_builtin_readable_streams: ++ t.type = "IDENTIFIER" + return t + + def t_STRING(self, t): +@@ -6973,6 +7002,7 @@ class Tokenizer(object): + "setlike": "SETLIKE", + "iterable": "ITERABLE", + "namespace": "NAMESPACE", ++ "ReadableStream": "READABLESTREAM", + "constructor": "CONSTRUCTOR", + "symbol": "SYMBOL", + "async": "ASYNC", +@@ -6993,7 +7023,8 @@ class Tokenizer(object): + ], + ) + +- def __init__(self, outputdir, lexer=None): ++ def __init__(self, outputdir, lexer=None, use_builtin_readable_streams=True): ++ self._use_builtin_readable_streams = use_builtin_readable_streams + if lexer: + self.lexer = lexer + else: +@@ -8482,6 +8513,7 @@ class Parser(Tokenizer): + """ + DistinguishableType : PrimitiveType Null + | ARRAYBUFFER Null ++ | READABLESTREAM Null + | OBJECT Null + | UNDEFINED Null + """ +@@ -8489,6 +8521,8 @@ class Parser(Tokenizer): + type = BuiltinTypes[IDLBuiltinType.Types.object] + elif p[1] == "ArrayBuffer": + type = BuiltinTypes[IDLBuiltinType.Types.ArrayBuffer] ++ elif p[1] == "ReadableStream": ++ type = BuiltinTypes[IDLBuiltinType.Types.ReadableStream] + elif p[1] == "undefined": + type = BuiltinTypes[IDLBuiltinType.Types.undefined] + else: +@@ -8827,8 +8861,8 @@ class Parser(Tokenizer): + [Location(self.lexer, p.lineno, p.lexpos, self._filename)], + ) + +- def __init__(self, outputdir="", lexer=None): +- Tokenizer.__init__(self, outputdir, lexer) ++ def __init__(self, outputdir="", lexer=None, use_builtin_readable_stream=True): ++ Tokenizer.__init__(self, outputdir, lexer, use_builtin_readable_stream) + + logger = SqueakyCleanLogger() + try: diff --git a/components/script/dom/bindings/codegen/parser/tests/test_any_null.py b/components/script/dom/bindings/codegen/parser/tests/test_any_null.py index e3b690bf6f1..f9afdacb02f 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_any_null.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_any_null.py @@ -1,11 +1,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface DoubleNull { attribute any? foo; }; - """) + """ + ) results = parser.finish() except: diff --git a/components/script/dom/bindings/codegen/parser/tests/test_argument_identifier_conflicts.py b/components/script/dom/bindings/codegen/parser/tests/test_argument_identifier_conflicts.py index 9ae85531fa3..3f50cb05158 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_argument_identifier_conflicts.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_argument_identifier_conflicts.py @@ -1,11 +1,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface ArgumentIdentifierConflict { undefined foo(boolean arg1, boolean arg1); }; - """) + """ + ) results = parser.finish() except: diff --git a/components/script/dom/bindings/codegen/parser/tests/test_argument_keywords.py b/components/script/dom/bindings/codegen/parser/tests/test_argument_keywords.py index 2b29658d678..bbed33df926 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_argument_keywords.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_argument_keywords.py @@ -1,17 +1,22 @@ def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface Foo { undefined foo(object constructor); }; - """) + """ + ) results = parser.finish() - harness.check(len(results), 1, "Should have an interface"); - iface = results[0]; - harness.check(len(iface.members), 1, "Should have an operation"); - operation = iface.members[0]; - harness.check(len(operation.signatures()), 1, "Should have one signature"); - (retval, args) = operation.signatures()[0]; - harness.check(len(args), 1, "Should have an argument"); - harness.check(args[0].identifier.name, "constructor", - "Should have an identifier named 'constructor'"); + harness.check(len(results), 1, "Should have an interface") + iface = results[0] + harness.check(len(iface.members), 1, "Should have an operation") + operation = iface.members[0] + harness.check(len(operation.signatures()), 1, "Should have one signature") + (retval, args) = operation.signatures()[0] + harness.check(len(args), 1, "Should have an argument") + harness.check( + args[0].identifier.name, + "constructor", + "Should have an identifier named 'constructor'", + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_argument_novoid.py b/components/script/dom/bindings/codegen/parser/tests/test_argument_novoid.py deleted file mode 100644 index 42e0776e677..00000000000 --- a/components/script/dom/bindings/codegen/parser/tests/test_argument_novoid.py +++ /dev/null @@ -1,14 +0,0 @@ -def WebIDLTest(parser, harness): - threw = False - try: - parser.parse(""" - interface UndefinedArgument1 { - undefined foo(undefined arg2); - }; - """) - - results = parser.finish() - except: - threw = True - - harness.ok(threw, "Should have thrown.") diff --git a/components/script/dom/bindings/codegen/parser/tests/test_arraybuffer.py b/components/script/dom/bindings/codegen/parser/tests/test_arraybuffer.py index 7020db59f3e..b762d06ac29 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_arraybuffer.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_arraybuffer.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestArrayBuffer { attribute ArrayBuffer bufferAttr; undefined bufferMethod(ArrayBuffer arg1, ArrayBuffer? arg2, sequence arg3); @@ -36,7 +38,8 @@ def WebIDLTest(parser, harness): attribute Float64Array float64ArrayAttr; undefined float64ArrayMethod(Float64Array arg1, Float64Array? arg2, sequence arg3); }; - """) + """ + ) results = parser.finish() @@ -55,24 +58,35 @@ def WebIDLTest(parser, harness): harness.ok(attr.type.isSpiderMonkeyInterface(), "Should test as a js interface") (retType, arguments) = method.signatures()[0] - harness.ok(retType.isUndefined(), "Should have a undefined return type") + harness.ok(retType.isUndefined(), "Should have an undefined return type") harness.check(len(arguments), 3, "Expect 3 arguments") - harness.check(str(arguments[0].type), t, "Expect an ArrayBuffer type") - harness.ok(arguments[0].type.isSpiderMonkeyInterface(), "Should test as a js interface") + harness.check(str(arguments[0].type), t, "Expect an ArrayBuffer type") + harness.ok( + arguments[0].type.isSpiderMonkeyInterface(), "Should test as a js interface" + ) - harness.check(str(arguments[1].type), t + "OrNull", "Expect an ArrayBuffer type") - harness.ok(arguments[1].type.inner.isSpiderMonkeyInterface(), "Should test as a js interface") + harness.check( + str(arguments[1].type), t + "OrNull", "Expect an ArrayBuffer type" + ) + harness.ok( + arguments[1].type.inner.isSpiderMonkeyInterface(), + "Should test as a js interface", + ) - harness.check(str(arguments[2].type), t + "Sequence", "Expect an ArrayBuffer type") - harness.ok(arguments[2].type.inner.isSpiderMonkeyInterface(), "Should test as a js interface") + harness.check( + str(arguments[2].type), t + "Sequence", "Expect an ArrayBuffer type" + ) + harness.ok( + arguments[2].type.inner.isSpiderMonkeyInterface(), + "Should test as a js interface", + ) - - checkStuff(members[0], members[1], "ArrayBuffer") - checkStuff(members[2], members[3], "ArrayBufferView") - checkStuff(members[4], members[5], "Int8Array") - checkStuff(members[6], members[7], "Uint8Array") - checkStuff(members[8], members[9], "Uint8ClampedArray") + checkStuff(members[0], members[1], "ArrayBuffer") + checkStuff(members[2], members[3], "ArrayBufferView") + checkStuff(members[4], members[5], "Int8Array") + checkStuff(members[6], members[7], "Uint8Array") + checkStuff(members[8], members[9], "Uint8ClampedArray") checkStuff(members[10], members[11], "Int16Array") checkStuff(members[12], members[13], "Uint16Array") checkStuff(members[14], members[15], "Int32Array") diff --git a/components/script/dom/bindings/codegen/parser/tests/test_attr.py b/components/script/dom/bindings/codegen/parser/tests/test_attr.py index 35f680aaa82..e19689a81a9 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_attr.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_attr.py @@ -1,31 +1,35 @@ import WebIDL -def WebIDLTest(parser, harness): - testData = [("::TestAttr%s::b", "b", "Byte%s", False), - ("::TestAttr%s::rb", "rb", "Byte%s", True), - ("::TestAttr%s::o", "o", "Octet%s", False), - ("::TestAttr%s::ro", "ro", "Octet%s", True), - ("::TestAttr%s::s", "s", "Short%s", False), - ("::TestAttr%s::rs", "rs", "Short%s", True), - ("::TestAttr%s::us", "us", "UnsignedShort%s", False), - ("::TestAttr%s::rus", "rus", "UnsignedShort%s", True), - ("::TestAttr%s::l", "l", "Long%s", False), - ("::TestAttr%s::rl", "rl", "Long%s", True), - ("::TestAttr%s::ul", "ul", "UnsignedLong%s", False), - ("::TestAttr%s::rul", "rul", "UnsignedLong%s", True), - ("::TestAttr%s::ll", "ll", "LongLong%s", False), - ("::TestAttr%s::rll", "rll", "LongLong%s", True), - ("::TestAttr%s::ull", "ull", "UnsignedLongLong%s", False), - ("::TestAttr%s::rull", "rull", "UnsignedLongLong%s", True), - ("::TestAttr%s::str", "str", "String%s", False), - ("::TestAttr%s::rstr", "rstr", "String%s", True), - ("::TestAttr%s::obj", "obj", "Object%s", False), - ("::TestAttr%s::robj", "robj", "Object%s", True), - ("::TestAttr%s::object", "object", "Object%s", False), - ("::TestAttr%s::f", "f", "Float%s", False), - ("::TestAttr%s::rf", "rf", "Float%s", True)] - parser.parse(""" +def WebIDLTest(parser, harness): + testData = [ + ("::TestAttr%s::b", "b", "Byte%s", False), + ("::TestAttr%s::rb", "rb", "Byte%s", True), + ("::TestAttr%s::o", "o", "Octet%s", False), + ("::TestAttr%s::ro", "ro", "Octet%s", True), + ("::TestAttr%s::s", "s", "Short%s", False), + ("::TestAttr%s::rs", "rs", "Short%s", True), + ("::TestAttr%s::us", "us", "UnsignedShort%s", False), + ("::TestAttr%s::rus", "rus", "UnsignedShort%s", True), + ("::TestAttr%s::l", "l", "Long%s", False), + ("::TestAttr%s::rl", "rl", "Long%s", True), + ("::TestAttr%s::ul", "ul", "UnsignedLong%s", False), + ("::TestAttr%s::rul", "rul", "UnsignedLong%s", True), + ("::TestAttr%s::ll", "ll", "LongLong%s", False), + ("::TestAttr%s::rll", "rll", "LongLong%s", True), + ("::TestAttr%s::ull", "ull", "UnsignedLongLong%s", False), + ("::TestAttr%s::rull", "rull", "UnsignedLongLong%s", True), + ("::TestAttr%s::str", "str", "String%s", False), + ("::TestAttr%s::rstr", "rstr", "String%s", True), + ("::TestAttr%s::obj", "obj", "Object%s", False), + ("::TestAttr%s::robj", "robj", "Object%s", True), + ("::TestAttr%s::object", "object", "Object%s", False), + ("::TestAttr%s::f", "f", "Float%s", False), + ("::TestAttr%s::rf", "rf", "Float%s", True), + ] + + parser.parse( + """ interface TestAttr { attribute byte b; readonly attribute byte rb; @@ -77,13 +81,13 @@ def WebIDLTest(parser, harness): attribute float? f; readonly attribute float? rf; }; - """) + """ + ) results = parser.finish() def checkAttr(attr, QName, name, type, readonly): - harness.ok(isinstance(attr, WebIDL.IDLAttribute), - "Should be an IDLAttribute") + harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute") harness.ok(attr.isAttr(), "Attr is an Attr") harness.ok(not attr.isMethod(), "Attr is not an method") harness.ok(not attr.isConst(), "Attr is not a const") @@ -95,11 +99,14 @@ def WebIDLTest(parser, harness): harness.ok(True, "TestAttr interface parsed without error.") harness.check(len(results), 2, "Should be two productions.") iface = results[0] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should be an IDLInterface") - harness.check(iface.identifier.QName(), "::TestAttr", "Interface has the right QName") + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface") + harness.check( + iface.identifier.QName(), "::TestAttr", "Interface has the right QName" + ) harness.check(iface.identifier.name, "TestAttr", "Interface has the right name") - harness.check(len(iface.members), len(testData), "Expect %s members" % len(testData)) + harness.check( + len(iface.members), len(testData), "Expect %s members" % len(testData) + ) attrs = iface.members @@ -110,11 +117,16 @@ def WebIDLTest(parser, harness): checkAttr(attr, QName % "", name, type % "", readonly) iface = results[1] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should be an IDLInterface") - harness.check(iface.identifier.QName(), "::TestAttrNullable", "Interface has the right QName") - harness.check(iface.identifier.name, "TestAttrNullable", "Interface has the right name") - harness.check(len(iface.members), len(testData), "Expect %s members" % len(testData)) + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface") + harness.check( + iface.identifier.QName(), "::TestAttrNullable", "Interface has the right QName" + ) + harness.check( + iface.identifier.name, "TestAttrNullable", "Interface has the right name" + ) + harness.check( + len(iface.members), len(testData), "Expect %s members" % len(testData) + ) attrs = iface.members @@ -127,11 +139,13 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [SetterThrows] readonly attribute boolean foo; }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True @@ -140,11 +154,13 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [Throw] readonly attribute boolean foo; }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True @@ -153,24 +169,30 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [SameObject] readonly attribute boolean foo; }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True - harness.ok(threw, "Should not allow [SameObject] on attributes not of interface type") + harness.ok( + threw, "Should not allow [SameObject] on attributes not of interface type" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [SameObject] readonly attribute A foo; }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True diff --git a/components/script/dom/bindings/codegen/parser/tests/test_attr_sequence_type.py b/components/script/dom/bindings/codegen/parser/tests/test_attr_sequence_type.py index fb1b97812bc..f3249de900a 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_attr_sequence_type.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_attr_sequence_type.py @@ -1,11 +1,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface AttrSequenceType { attribute sequence foo; }; - """) + """ + ) results = parser.finish() except: @@ -17,51 +19,59 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface AttrUnionWithSequenceType { attribute (sequence or DOMString) foo; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Attribute type must not be a union with a sequence member type") + harness.ok(threw, "Attribute type must not be a union with a sequence member type") parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface AttrNullableUnionWithSequenceType { attribute (sequence? or DOMString) foo; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Attribute type must not be a union with a nullable sequence " - "member type") + harness.ok( + threw, + "Attribute type must not be a union with a nullable sequence " "member type", + ) parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface AttrUnionWithUnionWithSequenceType { attribute ((sequence or DOMString) or AttrUnionWithUnionWithSequenceType) foo; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Attribute type must not be a union type with a union member " - "type that has a sequence member type") + harness.ok( + threw, + "Attribute type must not be a union type with a union member " + "type that has a sequence member type", + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_attributes_on_types.py b/components/script/dom/bindings/codegen/parser/tests/test_attributes_on_types.py index 9ba39018c77..97a7f47859a 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_attributes_on_types.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_attributes_on_types.py @@ -1,14 +1,16 @@ # Import the WebIDL module, so we can do isinstance checks and whatnot import WebIDL + def WebIDLTest(parser, harness): # Basic functionality threw = False try: - parser.parse(""" + parser.parse( + """ typedef [EnforceRange] long Foo; typedef [Clamp] long Bar; - typedef [TreatNullAs=EmptyString] DOMString Baz; + typedef [LegacyNullToEmptyString] DOMString Baz; dictionary A { required [EnforceRange] long a; required [Clamp] long b; @@ -19,11 +21,12 @@ def WebIDLTest(parser, harness): attribute Foo typedefFoo; attribute [EnforceRange] long foo; attribute [Clamp] long bar; - attribute [TreatNullAs=EmptyString] DOMString baz; + attribute [LegacyNullToEmptyString] DOMString baz; undefined method([EnforceRange] long foo, [Clamp] long bar, - [TreatNullAs=EmptyString] DOMString baz); + [LegacyNullToEmptyString] DOMString baz); undefined method2(optional [EnforceRange] long foo, optional [Clamp] long bar, - optional [TreatNullAs=EmptyString] DOMString baz); + optional [LegacyNullToEmptyString] DOMString baz); + undefined method3(optional [LegacyNullToEmptyString] UTF8String foo = ""); }; interface C { attribute [EnforceRange] long? foo; @@ -40,34 +43,88 @@ def WebIDLTest(parser, harness): interface Iterable { iterable<[Clamp] long, [EnforceRange] long>; }; - """) + """ + ) results = parser.finish() except: threw = True harness.ok(not threw, "Should not have thrown on parsing normal") if not threw: - harness.check(results[0].innerType.hasEnforceRange(), True, "Foo is [EnforceRange]") + harness.check( + results[0].innerType.hasEnforceRange(), True, "Foo is [EnforceRange]" + ) harness.check(results[1].innerType.hasClamp(), True, "Bar is [Clamp]") - harness.check(results[2].innerType.treatNullAsEmpty, True, "Baz is [TreatNullAs=EmptyString]") + harness.check( + results[2].innerType.legacyNullToEmptyString, + True, + "Baz is [LegacyNullToEmptyString]", + ) A = results[3] - harness.check(A.members[0].type.hasEnforceRange(), True, "A.a is [EnforceRange]") + harness.check( + A.members[0].type.hasEnforceRange(), True, "A.a is [EnforceRange]" + ) harness.check(A.members[1].type.hasClamp(), True, "A.b is [Clamp]") - harness.check(A.members[2].type.hasEnforceRange(), True, "A.c is [EnforceRange]") - harness.check(A.members[3].type.hasEnforceRange(), True, "A.d is [EnforceRange]") + harness.check( + A.members[2].type.hasEnforceRange(), True, "A.c is [EnforceRange]" + ) + harness.check( + A.members[3].type.hasEnforceRange(), True, "A.d is [EnforceRange]" + ) B = results[4] - harness.check(B.members[0].type.hasEnforceRange(), True, "B.typedefFoo is [EnforceRange]") - harness.check(B.members[1].type.hasEnforceRange(), True, "B.foo is [EnforceRange]") + harness.check( + B.members[0].type.hasEnforceRange(), True, "B.typedefFoo is [EnforceRange]" + ) + harness.check( + B.members[1].type.hasEnforceRange(), True, "B.foo is [EnforceRange]" + ) harness.check(B.members[2].type.hasClamp(), True, "B.bar is [Clamp]") - harness.check(B.members[3].type.treatNullAsEmpty, True, "B.baz is [TreatNullAs=EmptyString]") + harness.check( + B.members[3].type.legacyNullToEmptyString, + True, + "B.baz is [LegacyNullToEmptyString]", + ) method = B.members[4].signatures()[0][1] - harness.check(method[0].type.hasEnforceRange(), True, "foo argument of method is [EnforceRange]") - harness.check(method[1].type.hasClamp(), True, "bar argument of method is [Clamp]") - harness.check(method[2].type.treatNullAsEmpty, True, "baz argument of method is [TreatNullAs=EmptyString]") + harness.check( + method[0].type.hasEnforceRange(), + True, + "foo argument of method is [EnforceRange]", + ) + harness.check( + method[1].type.hasClamp(), True, "bar argument of method is [Clamp]" + ) + harness.check( + method[2].type.legacyNullToEmptyString, + True, + "baz argument of method is [LegacyNullToEmptyString]", + ) method2 = B.members[5].signatures()[0][1] - harness.check(method[0].type.hasEnforceRange(), True, "foo argument of method2 is [EnforceRange]") - harness.check(method[1].type.hasClamp(), True, "bar argument of method2 is [Clamp]") - harness.check(method[2].type.treatNullAsEmpty, True, "baz argument of method2 is [TreatNullAs=EmptyString]") + harness.check( + method2[0].type.hasEnforceRange(), + True, + "foo argument of method2 is [EnforceRange]", + ) + harness.check( + method2[1].type.hasClamp(), True, "bar argument of method2 is [Clamp]" + ) + harness.check( + method2[2].type.legacyNullToEmptyString, + True, + "baz argument of method2 is [LegacyNullToEmptyString]", + ) + + method3 = B.members[6].signatures()[0][1] + harness.check( + method3[0].type.legacyNullToEmptyString, + True, + "bar argument of method2 is [LegacyNullToEmptyString]", + ) + harness.check( + method3[0].defaultValue.type.isUTF8String(), + True, + "default value of bar argument of method2 is correctly coerced to UTF8String", + ) + C = results[5] harness.ok(C.members[0].type.nullable(), "C.foo is nullable") harness.ok(C.members[0].type.hasEnforceRange(), "C.foo has [EnforceRange]") @@ -75,12 +132,18 @@ def WebIDLTest(parser, harness): harness.ok(C.members[1].type.hasClamp(), "C.bar has [Clamp]") method = C.members[2].signatures()[0][1] harness.ok(method[0].type.nullable(), "foo argument of method is nullable") - harness.ok(method[0].type.hasEnforceRange(), "foo argument of method has [EnforceRange]") + harness.ok( + method[0].type.hasEnforceRange(), + "foo argument of method has [EnforceRange]", + ) harness.ok(method[1].type.nullable(), "bar argument of method is nullable") harness.ok(method[1].type.hasClamp(), "bar argument of method has [Clamp]") method2 = C.members[3].signatures()[0][1] harness.ok(method2[0].type.nullable(), "foo argument of method2 is nullable") - harness.ok(method2[0].type.hasEnforceRange(), "foo argument of method2 has [EnforceRange]") + harness.ok( + method2[0].type.hasEnforceRange(), + "foo argument of method2 has [EnforceRange]", + ) harness.ok(method2[1].type.nullable(), "bar argument of method2 is nullable") harness.ok(method2[1].type.hasClamp(), "bar argument of method2 has [Clamp]") @@ -88,7 +151,8 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef [AllowShared] ArrayBufferView Foo; dictionary A { required [AllowShared] ArrayBufferView a; @@ -115,7 +179,8 @@ def WebIDLTest(parser, harness): interface Iterable { iterable<[Clamp] long, [AllowShared] ArrayBufferView>; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -131,63 +196,101 @@ def WebIDLTest(parser, harness): harness.ok(B.members[0].type.hasAllowShared(), "B.typedefFoo is [AllowShared]") harness.ok(B.members[1].type.hasAllowShared(), "B.foo is [AllowShared]") method = B.members[2].signatures()[0][1] - harness.ok(method[0].type.hasAllowShared(), "foo argument of method is [AllowShared]") + harness.ok( + method[0].type.hasAllowShared(), "foo argument of method is [AllowShared]" + ) method2 = B.members[3].signatures()[0][1] - harness.ok(method2[0].type.hasAllowShared(), "foo argument of method2 is [AllowShared]") + harness.ok( + method2[0].type.hasAllowShared(), "foo argument of method2 is [AllowShared]" + ) C = results[3] harness.ok(C.members[0].type.nullable(), "C.foo is nullable") harness.ok(C.members[0].type.hasAllowShared(), "C.foo is [AllowShared]") method = C.members[1].signatures()[0][1] harness.ok(method[0].type.nullable(), "foo argument of method is nullable") - harness.ok(method[0].type.hasAllowShared(), "foo argument of method is [AllowShared]") + harness.ok( + method[0].type.hasAllowShared(), "foo argument of method is [AllowShared]" + ) method2 = C.members[2].signatures()[0][1] harness.ok(method2[0].type.nullable(), "foo argument of method2 is nullable") - harness.ok(method2[0].type.hasAllowShared(), "foo argument of method2 is [AllowShared]") + harness.ok( + method2[0].type.hasAllowShared(), "foo argument of method2 is [AllowShared]" + ) - ATTRIBUTES = [("[Clamp]", "long"), ("[EnforceRange]", "long"), - ("[TreatNullAs=EmptyString]", "DOMString"), ("[AllowShared]", "ArrayBufferView")] + ATTRIBUTES = [ + ("[Clamp]", "long"), + ("[EnforceRange]", "long"), + ("[LegacyNullToEmptyString]", "DOMString"), + ("[AllowShared]", "ArrayBufferView"), + ] TEMPLATES = [ - ("required dictionary members", """ + ( + "required dictionary members", + """ dictionary Foo { %s required %s foo; }; - """), - ("optional arguments", """ + """, + ), + ( + "optional arguments", + """ interface Foo { undefined foo(%s optional %s foo); }; - """), - ("typedefs", """ + """, + ), + ( + "typedefs", + """ %s typedef %s foo; - """), - ("attributes", """ + """, + ), + ( + "attributes", + """ interface Foo { %s attribute %s foo; }; - """), - ("readonly attributes", """ + """, + ), + ( + "readonly attributes", + """ interface Foo { readonly attribute %s %s foo; }; - """), - ("readonly unresolved attributes", """ + """, + ), + ( + "readonly unresolved attributes", + """ interface Foo { readonly attribute Bar baz; }; typedef %s %s Bar; - """), - ("method", """ + """, + ), + ( + "method", + """ interface Foo { %s %s foo(); }; - """), - ("interface",""" + """, + ), + ( + "interface", + """ %s interface Foo { attribute %s foo; }; - """), - ("partial interface",""" + """, + ), + ( + "partial interface", + """ interface Foo { undefined foo(); }; @@ -195,20 +298,29 @@ def WebIDLTest(parser, harness): partial interface Foo { attribute %s bar; }; - """), - ("interface mixin",""" + """, + ), + ( + "interface mixin", + """ %s interface mixin Foo { attribute %s foo; }; - """), - ("namespace",""" + """, + ), + ( + "namespace", + """ %s namespace Foo { attribute %s foo; }; - """), - ("partial namespace",""" + """, + ), + ( + "partial namespace", + """ namespace Foo { undefined foo(); }; @@ -216,14 +328,18 @@ def WebIDLTest(parser, harness): partial namespace Foo { attribute %s bar; }; - """), - ("dictionary",""" + """, + ), + ( + "dictionary", + """ %s dictionary Foo { %s foo; }; - """) - ]; + """, + ), + ] for (name, template) in TEMPLATES: parser = parser.reset() @@ -242,15 +358,16 @@ def WebIDLTest(parser, harness): parser.finish() except: threw = True - harness.ok(threw, - "Should not allow %s on %s" % (attribute, name)) + harness.ok(threw, "Should not allow %s on %s" % (attribute, name)) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef [Clamp, EnforceRange] long Foo; - """) + """ + ) parser.finish() except: threw = True @@ -260,23 +377,26 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef [EnforceRange, Clamp] long Foo; - """) + """ + ) parser.finish() except: threw = True harness.ok(threw, "Should not allow mixing [Clamp] and [EnforceRange]") - parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef [Clamp] long Foo; typedef [EnforceRange] Foo bar; - """) + """ + ) parser.finish() except: threw = True @@ -286,25 +406,36 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef [EnforceRange] long Foo; typedef [Clamp] Foo bar; - """) + """ + ) parser.finish() except: threw = True harness.ok(threw, "Should not allow mixing [Clamp] and [EnforceRange] via typedefs") - TYPES = ["DOMString", "unrestricted float", "float", "unrestricted double", "double"] + TYPES = [ + "DOMString", + "unrestricted float", + "float", + "unrestricted double", + "double", + ] for type in TYPES: parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef [Clamp] %s Foo; - """ % type) + """ + % type + ) parser.finish() except: threw = True @@ -314,58 +445,70 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef [EnforceRange] %s Foo; - """ % type) + """ + % type + ) parser.finish() except: threw = True harness.ok(threw, "Should not allow [EnforceRange] on %s" % type) - parser = parser.reset() threw = False try: - parser.parse(""" - typedef [TreatNullAs=EmptyString] long Foo; - """) + parser.parse( + """ + typedef [LegacyNullToEmptyString] long Foo; + """ + ) parser.finish() except: threw = True - harness.ok(threw, "Should not allow [TreatNullAs] on long") + harness.ok(threw, "Should not allow [LegacyNullToEmptyString] on long") parser = parser.reset() threw = False try: - parser.parse(""" - typedef [TreatNullAs=EmptyString] JSString Foo; - """) + parser.parse( + """ + typedef [LegacyNullToEmptyString] JSString Foo; + """ + ) parser.finish() except: threw = True - harness.ok(threw, "Should not allow [TreatNullAs] on JSString") + harness.ok(threw, "Should not allow [LegacyNullToEmptyString] on JSString") parser = parser.reset() threw = False try: - parser.parse(""" - typedef [TreatNullAs=EmptyString] DOMString? Foo; - """) + parser.parse( + """ + typedef [LegacyNullToEmptyString] DOMString? Foo; + """ + ) parser.finish() except: threw = True - harness.ok(threw, "Should not allow [TreatNullAs] on nullable DOMString") + harness.ok( + threw, "Should not allow [LegacyNullToEmptyString] on nullable DOMString" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef [AllowShared] DOMString Foo; - """) + """ + ) results = parser.finish() except: threw = True @@ -374,9 +517,11 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef [AllowShared=something] ArrayBufferView Foo; - """) + """ + ) results = parser.finish() except: threw = True @@ -385,31 +530,41 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Foo { undefined foo([Clamp] Bar arg); }; typedef long Bar; - """) + """ + ) results = parser.finish() except: threw = True harness.ok(not threw, "Should allow type attributes on unresolved types") - harness.check(results[0].members[0].signatures()[0][1][0].type.hasClamp(), True, - "Unresolved types with type attributes should correctly resolve with attributes") + harness.check( + results[0].members[0].signatures()[0][1][0].type.hasClamp(), + True, + "Unresolved types with type attributes should correctly resolve with attributes", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Foo { undefined foo(Bar arg); }; typedef [Clamp] long Bar; - """) + """ + ) results = parser.finish() except: threw = True harness.ok(not threw, "Should allow type attributes on typedefs") - harness.check(results[0].members[0].signatures()[0][1][0].type.hasClamp(), True, - "Unresolved types that resolve to typedefs with attributes should correctly resolve with attributes") + harness.check( + results[0].members[0].signatures()[0][1][0].type.hasClamp(), + True, + "Unresolved types that resolve to typedefs with attributes should correctly resolve with attributes", + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_builtin_filename.py b/components/script/dom/bindings/codegen/parser/tests/test_builtin_filename.py index 631e52eba0b..6c913bba822 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_builtin_filename.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_builtin_filename.py @@ -1,11 +1,14 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface Test { attribute long b; }; - """); + """ + ) attr = parser.finish()[0].members[0] - harness.check(attr.type.filename(), '', 'Filename on builtin type') + harness.check(attr.type.filename(), "", "Filename on builtin type") diff --git a/components/script/dom/bindings/codegen/parser/tests/test_builtins.py b/components/script/dom/bindings/codegen/parser/tests/test_builtins.py index f8563fc2d9b..a75a12e8143 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_builtins.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_builtins.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestBuiltins { attribute boolean b; attribute byte s8; @@ -12,30 +14,46 @@ def WebIDLTest(parser, harness): attribute unsigned long u32; attribute long long s64; attribute unsigned long long u64; - attribute DOMTimeStamp ts; }; - """) + """ + ) results = parser.finish() harness.ok(True, "TestBuiltins interface parsed without error.") harness.check(len(results), 1, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface") iface = results[0] - harness.check(iface.identifier.QName(), "::TestBuiltins", "Interface has the right QName") + harness.check( + iface.identifier.QName(), "::TestBuiltins", "Interface has the right QName" + ) harness.check(iface.identifier.name, "TestBuiltins", "Interface has the right name") harness.check(iface.parent, None, "Interface has no parent") members = iface.members - harness.check(len(members), 10, "Should be one production") + harness.check(len(members), 9, "Should be one production") names = ["b", "s8", "u8", "s16", "u16", "s32", "u32", "s64", "u64", "ts"] - types = ["Boolean", "Byte", "Octet", "Short", "UnsignedShort", "Long", "UnsignedLong", "LongLong", "UnsignedLongLong", "UnsignedLongLong"] - for i in range(10): + types = [ + "Boolean", + "Byte", + "Octet", + "Short", + "UnsignedShort", + "Long", + "UnsignedLong", + "LongLong", + "UnsignedLongLong", + "UnsignedLongLong", + ] + for i in range(9): attr = members[i] harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute") - harness.check(attr.identifier.QName(), "::TestBuiltins::" + names[i], "Attr has correct QName") + harness.check( + attr.identifier.QName(), + "::TestBuiltins::" + names[i], + "Attr has correct QName", + ) harness.check(attr.identifier.name, names[i], "Attr has correct name") harness.check(str(attr.type), types[i], "Attr type is the correct name") harness.ok(attr.type.isPrimitive(), "Should be a primitive type") diff --git a/components/script/dom/bindings/codegen/parser/tests/test_bytestring.py b/components/script/dom/bindings/codegen/parser/tests/test_bytestring.py index 51289f5db6e..a6f9f6ab9cb 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_bytestring.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_bytestring.py @@ -2,24 +2,30 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestByteString { attribute ByteString bs; attribute DOMString ds; }; - """) + """ + ) - results = parser.finish(); + results = parser.finish() harness.ok(True, "TestByteString interface parsed without error.") harness.check(len(results), 1, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface") iface = results[0] - harness.check(iface.identifier.QName(), "::TestByteString", "Interface has the right QName") - harness.check(iface.identifier.name, "TestByteString", "Interface has the right name") + harness.check( + iface.identifier.QName(), "::TestByteString", "Interface has the right QName" + ) + harness.check( + iface.identifier.name, "TestByteString", "Interface has the right name" + ) harness.check(iface.parent, None, "Interface has no parent") members = iface.members @@ -27,7 +33,9 @@ def WebIDLTest(parser, harness): attr = members[0] harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute") - harness.check(attr.identifier.QName(), "::TestByteString::bs", "Attr has correct QName") + harness.check( + attr.identifier.QName(), "::TestByteString::bs", "Attr has correct QName" + ) harness.check(attr.identifier.name, "bs", "Attr has correct name") harness.check(str(attr.type), "ByteString", "Attr type is the correct name") harness.ok(attr.type.isByteString(), "Should be ByteString type") @@ -37,7 +45,9 @@ def WebIDLTest(parser, harness): # now check we haven't broken DOMStrings in the process. attr = members[1] harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute") - harness.check(attr.identifier.QName(), "::TestByteString::ds", "Attr has correct QName") + harness.check( + attr.identifier.QName(), "::TestByteString::ds", "Attr has correct QName" + ) harness.check(attr.identifier.name, "ds", "Attr has correct name") harness.check(str(attr.type), "String", "Attr type is the correct name") harness.ok(attr.type.isDOMString(), "Should be DOMString type") @@ -47,53 +57,69 @@ def WebIDLTest(parser, harness): # Cannot represent constant ByteString in IDL. threw = False try: - parser.parse(""" + parser.parse( + """ interface ConstByteString { const ByteString foo = "hello" }; - """) + """ + ) except WebIDL.WebIDLError: threw = True - harness.ok(threw, "Should have thrown a WebIDL error for ByteString default in interface") + harness.ok( + threw, "Should have thrown a WebIDL error for ByteString default in interface" + ) # Can have optional ByteStrings with default values try: - parser.parse(""" + parser.parse( + """ interface OptionalByteString { undefined passByteString(optional ByteString arg = "hello"); - }; - """) - results2 = parser.finish(); + }; + """ + ) + results2 = parser.finish() except WebIDL.WebIDLError as e: - harness.ok(False, - "Should not have thrown a WebIDL error for ByteString " - "default in dictionary. " + str(e)) + harness.ok( + False, + "Should not have thrown a WebIDL error for ByteString " + "default in dictionary. " + str(e), + ) # Can have a default ByteString value in a dictionary try: - parser.parse(""" + parser.parse( + """ dictionary OptionalByteStringDict { ByteString item = "some string"; }; - """) - results3 = parser.finish(); + """ + ) + results3 = parser.finish() except WebIDL.WebIDLError as e: - harness.ok(False, - "Should not have thrown a WebIDL error for ByteString " - "default in dictionary. " + str(e)) + harness.ok( + False, + "Should not have thrown a WebIDL error for ByteString " + "default in dictionary. " + str(e), + ) # Don't allow control characters in ByteString literals threw = False try: - parser.parse(""" + parser.parse( + """ dictionary OptionalByteStringDict2 { ByteString item = "\x03"; }; - """) + """ + ) results4 = parser.finish() except WebIDL.WebIDLError as e: threw = True - harness.ok(threw, - "Should have thrown a WebIDL error for invalid ByteString " - "default in dictionary") + harness.ok( + threw, + "Should have thrown a WebIDL error for invalid ByteString " + "default in dictionary", + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_callback.py b/components/script/dom/bindings/codegen/parser/tests/test_callback.py index c304d085ce5..407644a6a8d 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_callback.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_callback.py @@ -1,32 +1,37 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestCallback { attribute CallbackType? listener; }; callback CallbackType = boolean (unsigned long arg); - """) + """ + ) results = parser.finish() harness.ok(True, "TestCallback interface parsed without error.") harness.check(len(results), 2, "Should be two productions.") iface = results[0] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should be an IDLInterface") - harness.check(iface.identifier.QName(), "::TestCallback", "Interface has the right QName") + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface") + harness.check( + iface.identifier.QName(), "::TestCallback", "Interface has the right QName" + ) harness.check(iface.identifier.name, "TestCallback", "Interface has the right name") harness.check(len(iface.members), 1, "Expect %s members" % 1) attr = iface.members[0] - harness.ok(isinstance(attr, WebIDL.IDLAttribute), - "Should be an IDLAttribute") + harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute") harness.ok(attr.isAttr(), "Should be an attribute") harness.ok(not attr.isMethod(), "Attr is not an method") harness.ok(not attr.isConst(), "Attr is not a const") - harness.check(attr.identifier.QName(), "::TestCallback::listener", "Attr has the right QName") + harness.check( + attr.identifier.QName(), "::TestCallback::listener", "Attr has the right QName" + ) harness.check(attr.identifier.name, "listener", "Attr has the right name") t = attr.type harness.ok(not isinstance(t, WebIDL.IDLWrapperType), "Attr has the right type") diff --git a/components/script/dom/bindings/codegen/parser/tests/test_callback_constructor.py b/components/script/dom/bindings/codegen/parser/tests/test_callback_constructor.py index 4999deef623..832a92bb147 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_callback_constructor.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_callback_constructor.py @@ -1,33 +1,46 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestCallbackConstructor { attribute CallbackConstructorType? constructorAttribute; }; callback constructor CallbackConstructorType = TestCallbackConstructor (unsigned long arg); - """) + """ + ) results = parser.finish() harness.ok(True, "TestCallbackConstructor interface parsed without error.") harness.check(len(results), 2, "Should be two productions.") iface = results[0] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should be an IDLInterface") - harness.check(iface.identifier.QName(), "::TestCallbackConstructor", "Interface has the right QName") - harness.check(iface.identifier.name, "TestCallbackConstructor", "Interface has the right name") + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface") + harness.check( + iface.identifier.QName(), + "::TestCallbackConstructor", + "Interface has the right QName", + ) + harness.check( + iface.identifier.name, "TestCallbackConstructor", "Interface has the right name" + ) harness.check(len(iface.members), 1, "Expect %s members" % 1) attr = iface.members[0] - harness.ok(isinstance(attr, WebIDL.IDLAttribute), - "Should be an IDLAttribute") + harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute") harness.ok(attr.isAttr(), "Should be an attribute") harness.ok(not attr.isMethod(), "Attr is not an method") harness.ok(not attr.isConst(), "Attr is not a const") - harness.check(attr.identifier.QName(), "::TestCallbackConstructor::constructorAttribute", "Attr has the right QName") - harness.check(attr.identifier.name, "constructorAttribute", "Attr has the right name") + harness.check( + attr.identifier.QName(), + "::TestCallbackConstructor::constructorAttribute", + "Attr has the right QName", + ) + harness.check( + attr.identifier.name, "constructorAttribute", "Attr has the right name" + ) t = attr.type harness.ok(not isinstance(t, WebIDL.IDLWrapperType), "Attr has the right type") harness.ok(isinstance(t, WebIDL.IDLNullableType), "Attr has the right type") @@ -39,25 +52,33 @@ def WebIDLTest(parser, harness): parser.reset() threw = False try: - parser.parse(""" - [TreatNonObjectAsNull] + parser.parse( + """ + [LegacyTreatNonObjectAsNull] callback constructor CallbackConstructorType = object (); - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Should throw on TreatNonObjectAsNull callback constructors") + harness.ok( + threw, "Should throw on LegacyTreatNonObjectAsNull callback constructors" + ) parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [MOZ_CAN_RUN_SCRIPT_BOUNDARY] callback constructor CallbackConstructorType = object (); - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Should not permit MOZ_CAN_RUN_SCRIPT_BOUNDARY callback constructors") + harness.ok( + threw, "Should not permit MOZ_CAN_RUN_SCRIPT_BOUNDARY callback constructors" + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_callback_interface.py b/components/script/dom/bindings/codegen/parser/tests/test_callback_interface.py index 34813bcab99..0d657f48032 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_callback_interface.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_callback_interface.py @@ -1,11 +1,14 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ callback interface TestCallbackInterface { attribute boolean bool; }; - """) + """ + ) results = parser.finish() @@ -16,13 +19,15 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestInterface { }; callback interface TestCallbackInterface : TestInterface { attribute boolean bool; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -32,13 +37,15 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestInterface : TestCallbackInterface { }; callback interface TestCallbackInterface { attribute boolean bool; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -46,7 +53,8 @@ def WebIDLTest(parser, harness): harness.ok(threw, "Should not allow callback parent of non-callback interface") parser = parser.reset() - parser.parse(""" + parser.parse( + """ callback interface TestCallbackInterface1 { undefined foo(); }; @@ -86,9 +94,13 @@ def WebIDLTest(parser, harness): callback interface TestCallbackInterface10 : TestCallbackInterface1 { undefined bar(); }; - """) + """ + ) results = parser.finish() for (i, iface) in enumerate(results): - harness.check(iface.isSingleOperationInterface(), i < 4, - "Interface %s should be a single operation interface" % - iface.identifier.name) + harness.check( + iface.isSingleOperationInterface(), + i < 4, + "Interface %s should be a single operation interface" + % iface.identifier.name, + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_cereactions.py b/components/script/dom/bindings/codegen/parser/tests/test_cereactions.py index ebc688bfd9c..c56c3dbde10 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_cereactions.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_cereactions.py @@ -1,11 +1,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface Foo { [CEReactions(DOMString a)] undefined foo(boolean arg2); }; - """) + """ + ) results = parser.finish() except: @@ -16,11 +18,13 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Foo { [CEReactions(DOMString b)] readonly attribute boolean bar; }; - """) + """ + ) results = parser.finish() except: @@ -31,54 +35,72 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Foo { [CEReactions] attribute boolean bar; }; - """) + """ + ) results = parser.finish() except Exception as e: - harness.ok(False, "Shouldn't have thrown for [CEReactions] used on writable attribute. %s" % e) + harness.ok( + False, + "Shouldn't have thrown for [CEReactions] used on writable attribute. %s" + % e, + ) threw = True parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Foo { [CEReactions] undefined foo(boolean arg2); }; - """) + """ + ) results = parser.finish() except Exception as e: - harness.ok(False, "Shouldn't have thrown for [CEReactions] used on regular operations. %s" % e) + harness.ok( + False, + "Shouldn't have thrown for [CEReactions] used on regular operations. %s" + % e, + ) threw = True parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Foo { [CEReactions] readonly attribute boolean A; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Should have thrown for [CEReactions] used on a readonly attribute") + harness.ok( + threw, "Should have thrown for [CEReactions] used on a readonly attribute" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [CEReactions] interface Foo { } - """) + """ + ) results = parser.finish() except: @@ -89,45 +111,47 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Foo { [CEReactions] getter any(DOMString name); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown for [CEReactions] used on a named getter") + harness.ok(threw, "Should have thrown for [CEReactions] used on a named getter") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Foo { [CEReactions] legacycaller double compute(double x); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown for [CEReactions] used on a legacycaller") + harness.ok(threw, "Should have thrown for [CEReactions] used on a legacycaller") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Foo { [CEReactions] stringifier DOMString (); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown for [CEReactions] used on a stringifier") - + harness.ok(threw, "Should have thrown for [CEReactions] used on a stringifier") diff --git a/components/script/dom/bindings/codegen/parser/tests/test_conditional_dictionary_member.py b/components/script/dom/bindings/codegen/parser/tests/test_conditional_dictionary_member.py index 8420f2ee4e0..2aef8ebe8ff 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_conditional_dictionary_member.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_conditional_dictionary_member.py @@ -1,23 +1,28 @@ def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ dictionary Dict { any foo; [ChromeOnly] any bar; }; - """) + """ + ) results = parser.finish() harness.check(len(results), 1, "Should have a dictionary") - members = results[0].members; + members = results[0].members harness.check(len(members), 2, "Should have two members") # Note that members are ordered lexicographically, so "bar" comes # before "foo". - harness.ok(members[0].getExtendedAttribute("ChromeOnly"), - "First member is not ChromeOnly") - harness.ok(not members[1].getExtendedAttribute("ChromeOnly"), - "Second member is ChromeOnly") + harness.ok( + members[0].getExtendedAttribute("ChromeOnly"), "First member is not ChromeOnly" + ) + harness.ok( + not members[1].getExtendedAttribute("ChromeOnly"), "Second member is ChromeOnly" + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ dictionary Dict { any foo; any bar; @@ -26,14 +31,16 @@ def WebIDLTest(parser, harness): interface Iface { [Constant, Cached] readonly attribute Dict dict; }; - """) + """ + ) results = parser.finish() harness.check(len(results), 2, "Should have a dictionary and an interface") parser = parser.reset() exception = None try: - parser.parse(""" + parser.parse( + """ dictionary Dict { any foo; [ChromeOnly] any bar; @@ -42,21 +49,25 @@ def WebIDLTest(parser, harness): interface Iface { [Constant, Cached] readonly attribute Dict dict; }; - """) - results = parser.finish() + """ + ) + results = parser.finish() except Exception as e: - exception = e + exception = e harness.ok(exception, "Should have thrown.") - harness.check(exception.message, - "[Cached] and [StoreInSlot] must not be used on an attribute " - "whose type contains a [ChromeOnly] dictionary member", - "Should have thrown the right exception") + harness.check( + exception.message, + "[Cached] and [StoreInSlot] must not be used on an attribute " + "whose type contains a [ChromeOnly] dictionary member", + "Should have thrown the right exception", + ) parser = parser.reset() exception = None try: - parser.parse(""" + parser.parse( + """ dictionary ParentDict { [ChromeOnly] any bar; }; @@ -68,21 +79,25 @@ def WebIDLTest(parser, harness): interface Iface { [Constant, Cached] readonly attribute Dict dict; }; - """) - results = parser.finish() + """ + ) + results = parser.finish() except Exception as e: - exception = e + exception = e harness.ok(exception, "Should have thrown (2).") - harness.check(exception.message, - "[Cached] and [StoreInSlot] must not be used on an attribute " - "whose type contains a [ChromeOnly] dictionary member", - "Should have thrown the right exception (2)") + harness.check( + exception.message, + "[Cached] and [StoreInSlot] must not be used on an attribute " + "whose type contains a [ChromeOnly] dictionary member", + "Should have thrown the right exception (2)", + ) parser = parser.reset() exception = None try: - parser.parse(""" + parser.parse( + """ dictionary GrandParentDict { [ChromeOnly] any baz; }; @@ -98,13 +113,16 @@ def WebIDLTest(parser, harness): interface Iface { [Constant, Cached] readonly attribute Dict dict; }; - """) - results = parser.finish() + """ + ) + results = parser.finish() except Exception as e: - exception = e + exception = e harness.ok(exception, "Should have thrown (3).") - harness.check(exception.message, - "[Cached] and [StoreInSlot] must not be used on an attribute " - "whose type contains a [ChromeOnly] dictionary member", - "Should have thrown the right exception (3)") + harness.check( + exception.message, + "[Cached] and [StoreInSlot] must not be used on an attribute " + "whose type contains a [ChromeOnly] dictionary member", + "Should have thrown the right exception (3)", + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_const.py b/components/script/dom/bindings/codegen/parser/tests/test_const.py index 918f284a226..f2d4b79d467 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_const.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_const.py @@ -22,8 +22,10 @@ expected = [ ("::TestConsts::udbi", "udbi", "UnrestrictedDouble", 2), ] + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestConsts { const byte zero = 0; const byte b = -1; @@ -45,22 +47,25 @@ def WebIDLTest(parser, harness): const unrestricted float ufli = 2; const unrestricted double udbi = 2; }; - """) + """ + ) results = parser.finish() harness.ok(True, "TestConsts interface parsed without error.") harness.check(len(results), 1, "Should be one production.") iface = results[0] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should be an IDLInterface") - harness.check(iface.identifier.QName(), "::TestConsts", "Interface has the right QName") + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface") + harness.check( + iface.identifier.QName(), "::TestConsts", "Interface has the right QName" + ) harness.check(iface.identifier.name, "TestConsts", "Interface has the right name") - harness.check(len(iface.members), len(expected), "Expect %s members" % len(expected)) + harness.check( + len(iface.members), len(expected), "Expect %s members" % len(expected) + ) for (const, (QName, name, type, value)) in zip(iface.members, expected): - harness.ok(isinstance(const, WebIDL.IDLConst), - "Should be an IDLConst") + harness.ok(isinstance(const, WebIDL.IDLConst), "Should be an IDLConst") harness.ok(const.isConst(), "Const is a const") harness.ok(not const.isAttr(), "Const is not an attr") harness.ok(not const.isMethod(), "Const is not a method") @@ -68,19 +73,23 @@ def WebIDLTest(parser, harness): harness.check(const.identifier.name, name, "Const has the right name") harness.check(str(const.type), type, "Const has the right type") harness.ok(const.type.isPrimitive(), "All consts should be primitive") - harness.check(str(const.value.type), str(const.type), - "Const's value has the same type as the type") + harness.check( + str(const.value.type), + str(const.type), + "Const's value has the same type as the type", + ) harness.check(const.value.value, value, "Const value has the right value.") - parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestConsts { const boolean? zero = 0; }; - """) + """ + ) parser.finish() except: threw = True diff --git a/components/script/dom/bindings/codegen/parser/tests/test_constructor.py b/components/script/dom/bindings/codegen/parser/tests/test_constructor.py index 83e1f4fc34f..de5d52f1412 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_constructor.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_constructor.py @@ -1,21 +1,39 @@ import WebIDL + def WebIDLTest(parser, harness): def checkArgument(argument, QName, name, type, optional, variadic): - harness.ok(isinstance(argument, WebIDL.IDLArgument), - "Should be an IDLArgument") - harness.check(argument.identifier.QName(), QName, "Argument has the right QName") + harness.ok(isinstance(argument, WebIDL.IDLArgument), "Should be an IDLArgument") + harness.check( + argument.identifier.QName(), QName, "Argument has the right QName" + ) harness.check(argument.identifier.name, name, "Argument has the right name") harness.check(str(argument.type), type, "Argument has the right return type") - harness.check(argument.optional, optional, "Argument has the right optional value") - harness.check(argument.variadic, variadic, "Argument has the right variadic value") + harness.check( + argument.optional, optional, "Argument has the right optional value" + ) + harness.check( + argument.variadic, variadic, "Argument has the right variadic value" + ) - def checkMethod(method, QName, name, signatures, - static=True, getter=False, setter=False, deleter=False, - legacycaller=False, stringifier=False, chromeOnly=False, - htmlConstructor=False, secureContext=False, pref=None, func=None): - harness.ok(isinstance(method, WebIDL.IDLMethod), - "Should be an IDLMethod") + def checkMethod( + method, + QName, + name, + signatures, + static=True, + getter=False, + setter=False, + deleter=False, + legacycaller=False, + stringifier=False, + chromeOnly=False, + htmlConstructor=False, + secureContext=False, + pref=None, + func=None, + ): + harness.ok(isinstance(method, WebIDL.IDLMethod), "Should be an IDLMethod") harness.ok(method.isMethod(), "Method is a method") harness.ok(not method.isAttr(), "Method is not an attr") harness.ok(not method.isConst(), "Method is not a const") @@ -24,23 +42,58 @@ def WebIDLTest(parser, harness): harness.check(method.isStatic(), static, "Method has the correct static value") harness.check(method.isGetter(), getter, "Method has the correct getter value") harness.check(method.isSetter(), setter, "Method has the correct setter value") - harness.check(method.isDeleter(), deleter, "Method has the correct deleter value") - harness.check(method.isLegacycaller(), legacycaller, "Method has the correct legacycaller value") - harness.check(method.isStringifier(), stringifier, "Method has the correct stringifier value") - harness.check(method.getExtendedAttribute("ChromeOnly") is not None, chromeOnly, "Method has the correct value for ChromeOnly") - harness.check(method.isHTMLConstructor(), htmlConstructor, "Method has the correct htmlConstructor value") - harness.check(len(method.signatures()), len(signatures), "Method has the correct number of signatures") - harness.check(method.getExtendedAttribute("Pref"), pref, "Method has the correct pref value") - harness.check(method.getExtendedAttribute("Func"), func, "Method has the correct func value") - harness.check(method.getExtendedAttribute("SecureContext") is not None, secureContext, "Method has the correct SecureContext value") + harness.check( + method.isDeleter(), deleter, "Method has the correct deleter value" + ) + harness.check( + method.isLegacycaller(), + legacycaller, + "Method has the correct legacycaller value", + ) + harness.check( + method.isStringifier(), + stringifier, + "Method has the correct stringifier value", + ) + harness.check( + method.getExtendedAttribute("ChromeOnly") is not None, + chromeOnly, + "Method has the correct value for ChromeOnly", + ) + harness.check( + method.isHTMLConstructor(), + htmlConstructor, + "Method has the correct htmlConstructor value", + ) + harness.check( + len(method.signatures()), + len(signatures), + "Method has the correct number of signatures", + ) + harness.check( + method.getExtendedAttribute("Pref"), + pref, + "Method has the correct pref value", + ) + harness.check( + method.getExtendedAttribute("Func"), + func, + "Method has the correct func value", + ) + harness.check( + method.getExtendedAttribute("SecureContext") is not None, + secureContext, + "Method has the correct SecureContext value", + ) sigpairs = zip(method.signatures(), signatures) for (gotSignature, expectedSignature) in sigpairs: (gotRetType, gotArgs) = gotSignature (expectedRetType, expectedArgs) = expectedSignature - harness.check(str(gotRetType), expectedRetType, - "Method has the expected return type.") + harness.check( + str(gotRetType), expectedRetType, "Method has the expected return type." + ) for i in range(0, len(gotArgs)): (QName, name, type, optional, variadic) = expectedArgs[i] @@ -48,33 +101,88 @@ def WebIDLTest(parser, harness): def checkResults(results): harness.check(len(results), 3, "Should be three productions") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") - harness.ok(isinstance(results[1], WebIDL.IDLInterface), - "Should be an IDLInterface") - harness.ok(isinstance(results[2], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok( + isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface" + ) + harness.ok( + isinstance(results[1], WebIDL.IDLInterface), "Should be an IDLInterface" + ) + harness.ok( + isinstance(results[2], WebIDL.IDLInterface), "Should be an IDLInterface" + ) - checkMethod(results[0].ctor(), "::TestConstructorNoArgs::constructor", - "constructor", [("TestConstructorNoArgs (Wrapper)", [])]) - harness.check(len(results[0].members), 0, - "TestConstructorNoArgs should not have members") - checkMethod(results[1].ctor(), "::TestConstructorWithArgs::constructor", - "constructor", - [("TestConstructorWithArgs (Wrapper)", - [("::TestConstructorWithArgs::constructor::name", "name", "String", False, False)])]) - harness.check(len(results[1].members), 0, - "TestConstructorWithArgs should not have members") - checkMethod(results[2].ctor(), "::TestConstructorOverloads::constructor", - "constructor", - [("TestConstructorOverloads (Wrapper)", - [("::TestConstructorOverloads::constructor::foo", "foo", "Object", False, False)]), - ("TestConstructorOverloads (Wrapper)", - [("::TestConstructorOverloads::constructor::bar", "bar", "Boolean", False, False)])]) - harness.check(len(results[2].members), 0, - "TestConstructorOverloads should not have members") + checkMethod( + results[0].ctor(), + "::TestConstructorNoArgs::constructor", + "constructor", + [("TestConstructorNoArgs (Wrapper)", [])], + ) + harness.check( + len(results[0].members), 0, "TestConstructorNoArgs should not have members" + ) + checkMethod( + results[1].ctor(), + "::TestConstructorWithArgs::constructor", + "constructor", + [ + ( + "TestConstructorWithArgs (Wrapper)", + [ + ( + "::TestConstructorWithArgs::constructor::name", + "name", + "String", + False, + False, + ) + ], + ) + ], + ) + harness.check( + len(results[1].members), + 0, + "TestConstructorWithArgs should not have members", + ) + checkMethod( + results[2].ctor(), + "::TestConstructorOverloads::constructor", + "constructor", + [ + ( + "TestConstructorOverloads (Wrapper)", + [ + ( + "::TestConstructorOverloads::constructor::foo", + "foo", + "Object", + False, + False, + ) + ], + ), + ( + "TestConstructorOverloads (Wrapper)", + [ + ( + "::TestConstructorOverloads::constructor::bar", + "bar", + "Boolean", + False, + False, + ) + ], + ), + ], + ) + harness.check( + len(results[2].members), + 0, + "TestConstructorOverloads should not have members", + ) - parser.parse(""" + parser.parse( + """ interface TestConstructorNoArgs { constructor(); }; @@ -87,111 +195,146 @@ def WebIDLTest(parser, harness): constructor(object foo); constructor(boolean bar); }; - """) + """ + ) results = parser.finish() checkResults(results) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestPrefConstructor { [Pref="dom.webidl.test1"] constructor(); }; - """) + """ + ) results = parser.finish() harness.check(len(results), 1, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface") - checkMethod(results[0].ctor(), "::TestPrefConstructor::constructor", - "constructor", [("TestPrefConstructor (Wrapper)", [])], - pref=["dom.webidl.test1"]) + checkMethod( + results[0].ctor(), + "::TestPrefConstructor::constructor", + "constructor", + [("TestPrefConstructor (Wrapper)", [])], + pref=["dom.webidl.test1"], + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestChromeOnlyConstructor { [ChromeOnly] constructor(); }; - """) + """ + ) results = parser.finish() harness.check(len(results), 1, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface") - checkMethod(results[0].ctor(), "::TestChromeOnlyConstructor::constructor", - "constructor", [("TestChromeOnlyConstructor (Wrapper)", [])], - chromeOnly=True) + checkMethod( + results[0].ctor(), + "::TestChromeOnlyConstructor::constructor", + "constructor", + [("TestChromeOnlyConstructor (Wrapper)", [])], + chromeOnly=True, + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestSCConstructor { [SecureContext] constructor(); }; - """) + """ + ) results = parser.finish() harness.check(len(results), 1, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface") - checkMethod(results[0].ctor(), "::TestSCConstructor::constructor", - "constructor", [("TestSCConstructor (Wrapper)", [])], - secureContext=True) + checkMethod( + results[0].ctor(), + "::TestSCConstructor::constructor", + "constructor", + [("TestSCConstructor (Wrapper)", [])], + secureContext=True, + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestFuncConstructor { [Func="Document::IsWebAnimationsEnabled"] constructor(); }; - """) + """ + ) results = parser.finish() harness.check(len(results), 1, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface") - checkMethod(results[0].ctor(), "::TestFuncConstructor::constructor", - "constructor", [("TestFuncConstructor (Wrapper)", [])], - func=["Document::IsWebAnimationsEnabled"]) + checkMethod( + results[0].ctor(), + "::TestFuncConstructor::constructor", + "constructor", + [("TestFuncConstructor (Wrapper)", [])], + func=["Document::IsWebAnimationsEnabled"], + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestPrefChromeOnlySCFuncConstructor { [ChromeOnly, Pref="dom.webidl.test1", SecureContext, Func="Document::IsWebAnimationsEnabled"] constructor(); }; - """) + """ + ) results = parser.finish() harness.check(len(results), 1, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface") - checkMethod(results[0].ctor(), "::TestPrefChromeOnlySCFuncConstructor::constructor", - "constructor", [("TestPrefChromeOnlySCFuncConstructor (Wrapper)", [])], - func=["Document::IsWebAnimationsEnabled"], pref=["dom.webidl.test1"], - chromeOnly=True, secureContext=True) + checkMethod( + results[0].ctor(), + "::TestPrefChromeOnlySCFuncConstructor::constructor", + "constructor", + [("TestPrefChromeOnlySCFuncConstructor (Wrapper)", [])], + func=["Document::IsWebAnimationsEnabled"], + pref=["dom.webidl.test1"], + chromeOnly=True, + secureContext=True, + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestHTMLConstructor { [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() harness.check(len(results), 1, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface") - checkMethod(results[0].ctor(), "::TestHTMLConstructor::constructor", - "constructor", [("TestHTMLConstructor (Wrapper)", [])], - htmlConstructor=True) + checkMethod( + results[0].ctor(), + "::TestHTMLConstructor::constructor", + "constructor", + [("TestHTMLConstructor (Wrapper)", [])], + htmlConstructor=True, + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestChromeOnlyConstructor { constructor() [ChromeOnly] constructor(DOMString a); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -202,11 +345,13 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestHTMLConstructorWithArgs { [HTMLConstructor] constructor(DOMString a); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -217,11 +362,13 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ callback interface TestHTMLConstructorOnCallbackInterface { [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -232,12 +379,14 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestHTMLConstructorAndConstructor { constructor(); [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -247,165 +396,187 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestHTMLConstructorAndConstructor { [Throws] constructor(); [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Can't have both a throwing constructor and a HTMLConstructor") + harness.ok(threw, "Can't have both a throwing constructor and a HTMLConstructor") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestHTMLConstructorAndConstructor { constructor(DOMString a); [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Can't have both a HTMLConstructor and a constructor operation") + harness.ok(threw, "Can't have both a HTMLConstructor and a constructor operation") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestHTMLConstructorAndConstructor { [Throws] constructor(DOMString a); [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Can't have both a HTMLConstructor and a throwing constructor " - "operation") + harness.ok( + threw, + "Can't have both a HTMLConstructor and a throwing constructor " "operation", + ) # Test HTMLConstructor and [ChromeOnly] constructor operation parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestHTMLConstructorAndConstructor { [ChromeOnly] constructor(); [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Can't have both a ChromeOnly constructor and a HTMLConstructor") + harness.ok(threw, "Can't have both a ChromeOnly constructor and a HTMLConstructor") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestHTMLConstructorAndConstructor { [Throws, ChromeOnly] constructor(); [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Can't have both a throwing chromeonly constructor and a " - "HTMLConstructor") + harness.ok( + threw, + "Can't have both a throwing chromeonly constructor and a " "HTMLConstructor", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestHTMLConstructorAndConstructor { [ChromeOnly] constructor(DOMString a); [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Can't have both a HTMLConstructor and a chromeonly constructor " - "operation") + harness.ok( + threw, + "Can't have both a HTMLConstructor and a chromeonly constructor " "operation", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestHTMLConstructorAndConstructor { [Throws, ChromeOnly] constructor(DOMString a); [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Can't have both a HTMLConstructor and a throwing chromeonly " - "constructor operation") + harness.ok( + threw, + "Can't have both a HTMLConstructor and a throwing chromeonly " + "constructor operation", + ) parser = parser.reset() threw = False try: - parser.parse(""" - [NoInterfaceObject] + parser.parse( + """ + [LegacyNoInterfaceObject] interface InterfaceWithoutInterfaceObject { constructor(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Can't have a constructor operation on a [NoInterfaceObject] " - "interface") + harness.ok( + threw, + "Can't have a constructor operation on a [LegacyNoInterfaceObject] " + "interface", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface InterfaceWithPartial { }; partial interface InterfaceWithPartial { constructor(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Can't have a constructor operation on a partial interface") + harness.ok(threw, "Can't have a constructor operation on a partial interface") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface InterfaceWithMixin { }; @@ -414,11 +585,10 @@ def WebIDLTest(parser, harness): }; InterfaceWithMixin includes Mixin - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Can't have a constructor operation on a mixin") - + harness.ok(threw, "Can't have a constructor operation on a mixin") diff --git a/components/script/dom/bindings/codegen/parser/tests/test_constructor_global.py b/components/script/dom/bindings/codegen/parser/tests/test_constructor_global.py index b7eabb1e35b..5f3663602e4 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_constructor_global.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_constructor_global.py @@ -1,14 +1,17 @@ import traceback + def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ [Global, Exposed=TestConstructorGlobal] interface TestConstructorGlobal { constructor(); }; - """) + """ + ) results = parser.finish() except: @@ -19,12 +22,14 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" - [Global, Exposed=TestNamedConstructorGlobal, - NamedConstructor=FooBar] - interface TestNamedConstructorGlobal { + parser.parse( + """ + [Global, Exposed=TestLegacyFactoryFunctionGlobal, + LegacyFactoryFunction=FooBar] + interface TestLegacyFactoryFunctionGlobal { }; - """) + """ + ) results = parser.finish() except: threw = True @@ -34,12 +39,14 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" - [NamedConstructor=FooBar, Global, - Exposed=TestNamedConstructorGlobal] - interface TestNamedConstructorGlobal { + parser.parse( + """ + [LegacyFactoryFunction=FooBar, Global, + Exposed=TestLegacyFactoryFunctionGlobal] + interface TestLegacyFactoryFunctionGlobal { }; - """) + """ + ) results = parser.finish() except: threw = True @@ -49,12 +56,14 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Global, Exposed=TestHTMLConstructorGlobal] interface TestHTMLConstructorGlobal { [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() except: diff --git a/components/script/dom/bindings/codegen/parser/tests/test_constructor_no_interface_object.py b/components/script/dom/bindings/codegen/parser/tests/test_constructor_no_interface_object.py index 24cc36066cd..9855352a9d4 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_constructor_no_interface_object.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_constructor_no_interface_object.py @@ -1,12 +1,14 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" - [NoInterfaceObject] - interface TestConstructorNoInterfaceObject { + parser.parse( + """ + [LegacyNoInterfaceObject] + interface TestConstructorLegacyNoInterfaceObject { constructor(); }; - """) + """ + ) results = parser.finish() except: @@ -16,23 +18,27 @@ def WebIDLTest(parser, harness): parser = parser.reset() - parser.parse(""" - [NoInterfaceObject, NamedConstructor=FooBar] - interface TestNamedConstructorNoInterfaceObject { + parser.parse( + """ + [LegacyNoInterfaceObject, LegacyFactoryFunction=FooBar] + interface TestLegacyFactoryFunctionLegacyNoInterfaceObject { }; - """) + """ + ) - # Test HTMLConstructor and NoInterfaceObject + # Test HTMLConstructor and LegacyNoInterfaceObject parser = parser.reset() threw = False try: - parser.parse(""" - [NoInterfaceObject] - interface TestHTMLConstructorNoInterfaceObject { + parser.parse( + """ + [LegacyNoInterfaceObject] + interface TestHTMLConstructorLegacyNoInterfaceObject { [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() except: diff --git a/components/script/dom/bindings/codegen/parser/tests/test_deduplicate.py b/components/script/dom/bindings/codegen/parser/tests/test_deduplicate.py index 6249d36fb8f..6649f4ec05d 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_deduplicate.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_deduplicate.py @@ -1,15 +1,20 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface Foo; interface Bar; interface Foo; - """); + """ + ) results = parser.finish() # There should be no duplicate interfaces in the result. - expectedNames = sorted(['Foo', 'Bar']) + expectedNames = sorted(["Foo", "Bar"]) actualNames = sorted(map(lambda iface: iface.identifier.name, results)) - harness.check(actualNames, expectedNames, "Parser shouldn't output duplicate names.") + harness.check( + actualNames, expectedNames, "Parser shouldn't output duplicate names." + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_dictionary.py b/components/script/dom/bindings/codegen/parser/tests/test_dictionary.py index dcdc43d5c47..e7d04f995a9 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_dictionary.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_dictionary.py @@ -1,5 +1,6 @@ def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ dictionary Dict2 : Dict1 { long child = 5; Dict1 aaandAnother; @@ -8,27 +9,33 @@ def WebIDLTest(parser, harness): long parent; double otherParent; }; - """) + """ + ) results = parser.finish() - dict1 = results[1]; - dict2 = results[0]; + dict1 = results[1] + dict2 = results[0] harness.check(len(dict1.members), 2, "Dict1 has two members") harness.check(len(dict2.members), 2, "Dict2 has four members") - harness.check(dict1.members[0].identifier.name, "otherParent", - "'o' comes before 'p'") - harness.check(dict1.members[1].identifier.name, "parent", - "'o' really comes before 'p'") - harness.check(dict2.members[0].identifier.name, "aaandAnother", - "'a' comes before 'c'") - harness.check(dict2.members[1].identifier.name, "child", - "'a' really comes before 'c'") + harness.check( + dict1.members[0].identifier.name, "otherParent", "'o' comes before 'p'" + ) + harness.check( + dict1.members[1].identifier.name, "parent", "'o' really comes before 'p'" + ) + harness.check( + dict2.members[0].identifier.name, "aaandAnother", "'a' comes before 'c'" + ) + harness.check( + dict2.members[1].identifier.name, "child", "'a' really comes before 'c'" + ) # Test partial dictionary. - parser = parser.reset(); - parser.parse(""" + parser = parser.reset() + parser.parse( + """ dictionary A { long c; long g; @@ -37,30 +44,29 @@ def WebIDLTest(parser, harness): long h; long d; }; - """) + """ + ) results = parser.finish() - dict1 = results[0]; + dict1 = results[0] harness.check(len(dict1.members), 4, "Dict1 has four members") - harness.check(dict1.members[0].identifier.name, "c", - "c should be first") - harness.check(dict1.members[1].identifier.name, "d", - "d should come after c") - harness.check(dict1.members[2].identifier.name, "g", - "g should come after d") - harness.check(dict1.members[3].identifier.name, "h", - "h should be last") + harness.check(dict1.members[0].identifier.name, "c", "c should be first") + harness.check(dict1.members[1].identifier.name, "d", "d should come after c") + harness.check(dict1.members[2].identifier.name, "g", "g should come after d") + harness.check(dict1.members[3].identifier.name, "h", "h should be last") # Now reset our parser parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Dict { long prop = 5; long prop; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -68,28 +74,33 @@ def WebIDLTest(parser, harness): harness.ok(threw, "Should not allow name duplication in a dictionary") # Test no name duplication across normal and partial dictionary. - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { long prop = 5; }; partial dictionary A { long prop; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Should not allow name duplication across normal and partial dictionary") + harness.ok( + threw, "Should not allow name duplication across normal and partial dictionary" + ) # Now reset our parser again parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Dict1 : Dict2 { long prop = 5; }; @@ -99,24 +110,28 @@ def WebIDLTest(parser, harness): dictionary Dict3 { double prop; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Should not allow name duplication in a dictionary and " - "its ancestor") + harness.ok( + threw, "Should not allow name duplication in a dictionary and " "its ancestor" + ) # More reset parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Iface {}; dictionary Dict : Iface { long prop; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -127,10 +142,12 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A : B {}; dictionary B : A {}; - """) + """ + ) results = parser.finish() except: threw = True @@ -140,27 +157,33 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { - [TreatNullAs=EmptyString] DOMString foo; + [LegacyNullToEmptyString] DOMString foo; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Should not allow [TreatNullAs] on dictionary members"); + harness.ok( + threw, "Should not allow [LegacyNullToEmptyString] on dictionary members" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { undefined doFoo(A arg); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -170,13 +193,15 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { undefined doFoo(optional A arg); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -186,47 +211,53 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { undefined doFoo((A or DOMString) arg); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Trailing union arg containing a dictionary must be optional") + harness.ok(threw, "Trailing union arg containing a dictionary must be optional") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { undefined doFoo(optional (A or DOMString) arg); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Trailing union arg containing a dictionary must have a default value") + harness.ok( + threw, "Trailing union arg containing a dictionary must have a default value" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { undefined doFoo(A arg1, optional long arg2); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -236,13 +267,15 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { undefined doFoo(optional A arg1, optional long arg2); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -252,245 +285,289 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { undefined doFoo(A arg1, optional long arg2, long arg3); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(not threw, - "Dictionary arg followed by non-optional arg doesn't have to be optional") + harness.ok( + not threw, + "Dictionary arg followed by non-optional arg doesn't have to be optional", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { undefined doFoo((A or DOMString) arg1, optional long arg2); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Union arg containing dictionary followed by optional arg must " - "be optional") + harness.ok( + threw, + "Union arg containing dictionary followed by optional arg must " "be optional", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { undefined doFoo(optional (A or DOMString) arg1, optional long arg2); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Union arg containing dictionary followed by optional arg must " - "have a default value") + harness.ok( + threw, + "Union arg containing dictionary followed by optional arg must " + "have a default value", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { undefined doFoo(A arg1, long arg2); }; - """) + """ + ) results = parser.finish() harness.ok(True, "Dictionary arg followed by required arg can be required") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { undefined doFoo(optional A? arg1 = {}); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = x harness.ok(threw, "Optional dictionary arg must not be nullable") - harness.ok("nullable" in str(threw), - "Must have the expected exception for optional nullable dictionary arg") + harness.ok( + "nullable" in str(threw), + "Must have the expected exception for optional nullable dictionary arg", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { required long x; }; interface X { undefined doFoo(A? arg1); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = x harness.ok(threw, "Required dictionary arg must not be nullable") - harness.ok("nullable" in str(threw), - "Must have the expected exception for required nullable " - "dictionary arg") + harness.ok( + "nullable" in str(threw), + "Must have the expected exception for required nullable " "dictionary arg", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { undefined doFoo(optional (A or long)? arg1 = {}); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = x harness.ok(threw, "Dictionary arg must not be in an optional nullable union") - harness.ok("nullable" in str(threw), - "Must have the expected exception for optional nullable union " - "arg containing dictionary") + harness.ok( + "nullable" in str(threw), + "Must have the expected exception for optional nullable union " + "arg containing dictionary", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { required long x; }; interface X { undefined doFoo((A or long)? arg1); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = x harness.ok(threw, "Dictionary arg must not be in a required nullable union") - harness.ok("nullable" in str(threw), - "Must have the expected exception for required nullable union " - "arg containing dictionary") + harness.ok( + "nullable" in str(threw), + "Must have the expected exception for required nullable union " + "arg containing dictionary", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { undefined doFoo(sequence arg1); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(not threw, - "Nullable union should be allowed in a sequence argument") + harness.ok(not threw, "Nullable union should be allowed in a sequence argument") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { undefined doFoo(optional (A or long?) arg1); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Dictionary must not be in a union with a nullable type") + harness.ok(threw, "Dictionary must not be in a union with a nullable type") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { undefined doFoo(optional (long? or A) arg1); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "A nullable type must not be in a union with a dictionary") + harness.ok(threw, "A nullable type must not be in a union with a dictionary") parser = parser.reset() - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { A? doFoo(); }; - """) + """ + ) results = parser.finish() harness.ok(True, "Dictionary return value can be nullable") parser = parser.reset() - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { undefined doFoo(optional A arg = {}); }; - """) + """ + ) results = parser.finish() harness.ok(True, "Dictionary arg should actually parse") parser = parser.reset() - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { undefined doFoo(optional (A or DOMString) arg = {}); }; - """) + """ + ) results = parser.finish() harness.ok(True, "Union arg containing a dictionary should actually parse") parser = parser.reset() - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { undefined doFoo(optional (A or DOMString) arg = "abc"); }; - """) + """ + ) results = parser.finish() - harness.ok(True, "Union arg containing a dictionary with string default should actually parse") + harness.ok( + True, + "Union arg containing a dictionary with string default should actually parse", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { Foo foo; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -500,7 +577,8 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo3 : Foo { short d; }; @@ -516,78 +594,102 @@ def WebIDLTest(parser, harness): dictionary Foo { Foo1 b; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Member type must not be a Dictionary that " - "inherits from its Dictionary.") + harness.ok( + threw, + "Member type must not be a Dictionary that " "inherits from its Dictionary.", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { (Foo or DOMString)[]? b; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Member type must not be a Nullable type " - "whose inner type includes its Dictionary.") + harness.ok( + threw, + "Member type must not be a Nullable type " + "whose inner type includes its Dictionary.", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { (DOMString or Foo) b; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Member type must not be a Union type, one of " - "whose member types includes its Dictionary.") + harness.ok( + threw, + "Member type must not be a Union type, one of " + "whose member types includes its Dictionary.", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { sequence>> c; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Member type must not be a Sequence type " - "whose element type includes its Dictionary.") + harness.ok( + threw, + "Member type must not be a Sequence type " + "whose element type includes its Dictionary.", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { (DOMString or Foo)[] d; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Member type must not be an Array type " - "whose element type includes its Dictionary.") + harness.ok( + threw, + "Member type must not be an Array type " + "whose element type includes its Dictionary.", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { Foo1 b; }; @@ -603,34 +705,41 @@ def WebIDLTest(parser, harness): dictionary Foo1 : Foo2 { long a; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Member type must not be a Dictionary, one of whose " - "members or inherited members has a type that includes " - "its Dictionary.") + harness.ok( + threw, + "Member type must not be a Dictionary, one of whose " + "members or inherited members has a type that includes " + "its Dictionary.", + ) - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { }; dictionary Bar { Foo? d; }; - """) + """ + ) results = parser.finish() except: threw = True harness.ok(threw, "Member type must not be a nullable dictionary") - parser = parser.reset(); - parser.parse(""" + parser = parser.reset() + parser.parse( + """ dictionary Foo { unrestricted float urFloat = 0; unrestricted float urFloat2 = 1.1; @@ -648,103 +757,117 @@ def WebIDLTest(parser, harness): unrestricted double negativeInfUrDouble = -Infinity; unrestricted double nanUrDouble = NaN; }; - """) + """ + ) results = parser.finish() harness.ok(True, "Parsing default values for unrestricted types succeeded.") - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { double f = Infinity; }; - """) + """ + ) results = parser.finish() except: threw = True harness.ok(threw, "Only unrestricted values can be initialized to Infinity") - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { double f = -Infinity; }; - """) + """ + ) results = parser.finish() except: threw = True harness.ok(threw, "Only unrestricted values can be initialized to -Infinity") - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { double f = NaN; }; - """) + """ + ) results = parser.finish() except: threw = True harness.ok(threw, "Only unrestricted values can be initialized to NaN") - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { float f = Infinity; }; - """) + """ + ) results = parser.finish() except: threw = True harness.ok(threw, "Only unrestricted values can be initialized to Infinity") - - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { float f = -Infinity; }; - """) + """ + ) results = parser.finish() except: threw = True harness.ok(threw, "Only unrestricted values can be initialized to -Infinity") - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { float f = NaN; }; - """) + """ + ) results = parser.finish() except: threw = True harness.ok(threw, "Only unrestricted values can be initialized to NaN") - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { long module; }; - """) + """ + ) results = parser.finish() except: threw = True diff --git a/components/script/dom/bindings/codegen/parser/tests/test_distinguishability.py b/components/script/dom/bindings/codegen/parser/tests/test_distinguishability.py index 1fa12832d7f..e96026c2a09 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_distinguishability.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_distinguishability.py @@ -1,8 +1,13 @@ +import traceback + + def firstArgType(method): return method.signatures()[0][1][0].type + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ // Give our dictionary a required member so we don't need to // mess with optional and default values. dictionary Dict { @@ -17,7 +22,8 @@ def WebIDLTest(parser, harness): undefined passNullableUnion((object? or DOMString) arg); undefined passNullable(Foo? arg); }; - """) + """ + ) results = parser.finish() iface = results[2] @@ -30,31 +36,38 @@ def WebIDLTest(parser, harness): dictType = firstArgType(dictMethod) ifaceType = firstArgType(ifaceMethod) - harness.ok(dictType.isDictionary(), "Should have dictionary type"); - harness.ok(ifaceType.isInterface(), "Should have interface type"); - harness.ok(ifaceType.isCallbackInterface(), "Should have callback interface type"); + harness.ok(dictType.isDictionary(), "Should have dictionary type") + harness.ok(ifaceType.isInterface(), "Should have interface type") + harness.ok(ifaceType.isCallbackInterface(), "Should have callback interface type") - harness.ok(not dictType.isDistinguishableFrom(ifaceType), - "Dictionary not distinguishable from callback interface") - harness.ok(not ifaceType.isDistinguishableFrom(dictType), - "Callback interface not distinguishable from dictionary") + harness.ok( + not dictType.isDistinguishableFrom(ifaceType), + "Dictionary not distinguishable from callback interface", + ) + harness.ok( + not ifaceType.isDistinguishableFrom(dictType), + "Callback interface not distinguishable from dictionary", + ) nullableUnionType = firstArgType(nullableUnionMethod) nullableIfaceType = firstArgType(nullableIfaceMethod) - harness.ok(nullableUnionType.isUnion(), "Should have union type"); - harness.ok(nullableIfaceType.isInterface(), "Should have interface type"); - harness.ok(nullableIfaceType.nullable(), "Should have nullable type"); + harness.ok(nullableUnionType.isUnion(), "Should have union type") + harness.ok(nullableIfaceType.isInterface(), "Should have interface type") + harness.ok(nullableIfaceType.nullable(), "Should have nullable type") - harness.ok(not nullableUnionType.isDistinguishableFrom(nullableIfaceType), - "Nullable type not distinguishable from union with nullable " - "member type") - harness.ok(not nullableIfaceType.isDistinguishableFrom(nullableUnionType), - "Union with nullable member type not distinguishable from " - "nullable type") + harness.ok( + not nullableUnionType.isDistinguishableFrom(nullableIfaceType), + "Nullable type not distinguishable from union with nullable " "member type", + ) + harness.ok( + not nullableIfaceType.isDistinguishableFrom(nullableUnionType), + "Union with nullable member type not distinguishable from " "nullable type", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestIface { undefined passKid(Kid arg); undefined passParent(Parent arg); @@ -70,7 +83,8 @@ def WebIDLTest(parser, harness): interface Grandparent {}; interface Unrelated1 {}; interface Unrelated2 {}; - """) + """ + ) results = parser.finish() iface = results[0] @@ -80,21 +94,26 @@ def WebIDLTest(parser, harness): for type1 in argTypes: for type2 in argTypes: - distinguishable = (type1 is not type2 and - (type1 in unrelatedTypes or - type2 in unrelatedTypes)) + distinguishable = type1 is not type2 and ( + type1 in unrelatedTypes or type2 in unrelatedTypes + ) - harness.check(type1.isDistinguishableFrom(type2), - distinguishable, - "Type %s should %sbe distinguishable from type %s" % - (type1, "" if distinguishable else "not ", type2)) - harness.check(type2.isDistinguishableFrom(type1), - distinguishable, - "Type %s should %sbe distinguishable from type %s" % - (type2, "" if distinguishable else "not ", type1)) + harness.check( + type1.isDistinguishableFrom(type2), + distinguishable, + "Type %s should %sbe distinguishable from type %s" + % (type1, "" if distinguishable else "not ", type2), + ) + harness.check( + type2.isDistinguishableFrom(type1), + distinguishable, + "Type %s should %sbe distinguishable from type %s" + % (type2, "" if distinguishable else "not ", type1), + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface Dummy {}; interface TestIface { undefined method(long arg1, TestIface arg2); @@ -102,17 +121,19 @@ def WebIDLTest(parser, harness): undefined method(long arg1, Dummy arg2); undefined method(DOMString arg1, DOMString arg2, DOMString arg3); }; - """) + """ + ) results = parser.finish() - harness.check(len(results[1].members), 1, - "Should look like we have one method") - harness.check(len(results[1].members[0].signatures()), 4, - "Should have four signatures") + harness.check(len(results[1].members), 1, "Should look like we have one method") + harness.check( + len(results[1].members[0].signatures()), 4, "Should have four signatures" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Dummy {}; interface TestIface { undefined method(long arg1, TestIface arg2); @@ -120,19 +141,23 @@ def WebIDLTest(parser, harness): undefined method(any arg1, Dummy arg2); undefined method(DOMString arg1, DOMString arg2, DOMString arg3); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should throw when args before the distinguishing arg are not " - "all the same type") + harness.ok( + threw, + "Should throw when args before the distinguishing arg are not " + "all the same type", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Dummy {}; interface TestIface { undefined method(long arg1, TestIface arg2); @@ -140,7 +165,8 @@ def WebIDLTest(parser, harness): undefined method(any arg1, DOMString arg2); undefined method(DOMString arg1, DOMString arg2, DOMString arg3); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -148,57 +174,133 @@ def WebIDLTest(parser, harness): harness.ok(threw, "Should throw when there is no distinguishing index") # Now let's test our whole distinguishability table - argTypes = [ "long", "short", "long?", "short?", "boolean", - "boolean?", "DOMString", "ByteString", "UTF8String", "Enum", "Enum2", - "Interface", "Interface?", - "AncestorInterface", "UnrelatedInterface", "CallbackInterface", - "CallbackInterface?", "CallbackInterface2", - "object", "Callback", "Callback2", "Dict", - "Dict2", "sequence", "sequence", - "record", - "record", - "record", - "record", - "any", "Promise", "Promise?", - "USVString", "JSString", "ArrayBuffer", "ArrayBufferView", - "Uint8Array", "Uint16Array", - "(long or Callback)", "(long or Dict)", + argTypes = [ + "long", + "short", + "long?", + "short?", + "boolean", + "boolean?", + "undefined", + "undefined?", + "DOMString", + "ByteString", + "UTF8String", + "Enum", + "Enum2", + "Interface", + "Interface?", + "AncestorInterface", + "UnrelatedInterface", + "CallbackInterface", + "CallbackInterface?", + "CallbackInterface2", + "object", + "Callback", + "Callback2", + "Dict", + "Dict2", + "sequence", + "sequence", + "record", + "record", + "record", + "record", + "any", + "Promise", + "Promise?", + "USVString", + "JSString", + "ArrayBuffer", + "ArrayBufferView", + "Uint8Array", + "Uint16Array", + "(long or Callback)", + "(long or Dict)", ] # Try to categorize things a bit to keep list lengths down def allBut(list1, list2): - return [a for a in list1 if a not in list2 and - (a != "any" and a != "Promise" and a != "Promise?")] - unions = [ "(long or Callback)", "(long or Dict)" ] - numerics = [ "long", "short", "long?", "short?" ] - booleans = [ "boolean", "boolean?" ] + return [ + a + for a in list1 + if a not in list2 + and (a != "any" and a != "Promise" and a != "Promise?") + ] + + unions = ["(long or Callback)", "(long or Dict)"] + numerics = ["long", "short", "long?", "short?"] + booleans = ["boolean", "boolean?"] + undefineds = ["undefined", "undefined?"] primitives = numerics + booleans nonNumerics = allBut(argTypes, numerics + unions) nonBooleans = allBut(argTypes, booleans) - strings = [ "DOMString", "ByteString", "Enum", "Enum2", "USVString", "JSString", "UTF8String" ] + strings = [ + "DOMString", + "ByteString", + "Enum", + "Enum2", + "USVString", + "JSString", + "UTF8String", + ] nonStrings = allBut(argTypes, strings) - nonObjects = primitives + strings - objects = allBut(argTypes, nonObjects ) + nonObjects = undefineds + primitives + strings + objects = allBut(argTypes, nonObjects) bufferSourceTypes = ["ArrayBuffer", "ArrayBufferView", "Uint8Array", "Uint16Array"] - interfaces = [ "Interface", "Interface?", "AncestorInterface", - "UnrelatedInterface" ] + bufferSourceTypes - nullables = (["long?", "short?", "boolean?", "Interface?", - "CallbackInterface?", "Dict", "Dict2", - "Date?", "any", "Promise?"] + - allBut(unions, [ "(long or Callback)" ])) - sequences = [ "sequence", "sequence" ] + interfaces = [ + "Interface", + "Interface?", + "AncestorInterface", + "UnrelatedInterface", + ] + bufferSourceTypes + nullables = [ + "long?", + "short?", + "boolean?", + "undefined?", + "Interface?", + "CallbackInterface?", + "Dict", + "Dict2", + "Date?", + "any", + "Promise?", + ] + allBut(unions, ["(long or Callback)"]) + sequences = ["sequence", "sequence"] nonUserObjects = nonObjects + interfaces + sequences otherObjects = allBut(argTypes, nonUserObjects + ["object"]) - notRelatedInterfaces = (nonObjects + ["UnrelatedInterface"] + - otherObjects + sequences + bufferSourceTypes) - records = [ "record", "record", - "record", "record" ] # JSString not supported in records + notRelatedInterfaces = ( + nonObjects + + ["UnrelatedInterface"] + + otherObjects + + sequences + + bufferSourceTypes + ) + records = [ + "record", + "record", + "record", + "record", + ] # JSString not supported in records + dictionaryLike = ( + [ + "Dict", + "Dict2", + "CallbackInterface", + "CallbackInterface?", + "CallbackInterface2", + ] + + records + + allBut(unions, ["(long or Callback)"]) + ) # Build a representation of the distinguishability table as a dict # of dicts, holding True values where needed, holes elsewhere. - data = dict(); + data = dict() for type in argTypes: data[type] = dict() + def setDistinguishable(type, types): for other in types: data[type][other] = True @@ -209,6 +311,10 @@ def WebIDLTest(parser, harness): setDistinguishable("short?", allBut(nonNumerics, nullables)) setDistinguishable("boolean", nonBooleans) setDistinguishable("boolean?", allBut(nonBooleans, nullables)) + setDistinguishable("undefined", allBut(argTypes, undefineds + dictionaryLike)) + setDistinguishable( + "undefined?", allBut(argTypes, undefineds + dictionaryLike + nullables) + ) setDistinguishable("DOMString", nonStrings) setDistinguishable("ByteString", nonStrings) setDistinguishable("UTF8String", nonStrings) @@ -219,36 +325,44 @@ def WebIDLTest(parser, harness): setDistinguishable("Interface", notRelatedInterfaces) setDistinguishable("Interface?", allBut(notRelatedInterfaces, nullables)) setDistinguishable("AncestorInterface", notRelatedInterfaces) - setDistinguishable("UnrelatedInterface", - allBut(argTypes, ["object", "UnrelatedInterface"])) - setDistinguishable("CallbackInterface", nonUserObjects) - setDistinguishable("CallbackInterface?", allBut(nonUserObjects, nullables)) - setDistinguishable("CallbackInterface2", nonUserObjects) + setDistinguishable( + "UnrelatedInterface", allBut(argTypes, ["object", "UnrelatedInterface"]) + ) + setDistinguishable("CallbackInterface", allBut(nonUserObjects, undefineds)) + setDistinguishable( + "CallbackInterface?", allBut(nonUserObjects, nullables + undefineds) + ) + setDistinguishable("CallbackInterface2", allBut(nonUserObjects, undefineds)) setDistinguishable("object", nonObjects) setDistinguishable("Callback", nonUserObjects) setDistinguishable("Callback2", nonUserObjects) - setDistinguishable("Dict", allBut(nonUserObjects, nullables)) - setDistinguishable("Dict2", allBut(nonUserObjects, nullables)) - setDistinguishable("sequence", - allBut(argTypes, sequences + ["object"])) - setDistinguishable("sequence", - allBut(argTypes, sequences + ["object"])) - setDistinguishable("record", nonUserObjects) - setDistinguishable("record", nonUserObjects) + setDistinguishable("Dict", allBut(nonUserObjects, nullables + undefineds)) + setDistinguishable("Dict2", allBut(nonUserObjects, nullables + undefineds)) + setDistinguishable("sequence", allBut(argTypes, sequences + ["object"])) + setDistinguishable("sequence", allBut(argTypes, sequences + ["object"])) + setDistinguishable("record", allBut(nonUserObjects, undefineds)) + setDistinguishable("record", allBut(nonUserObjects, undefineds)) # JSString not supported in records - setDistinguishable("record", nonUserObjects) - setDistinguishable("record", nonUserObjects) + setDistinguishable("record", allBut(nonUserObjects, undefineds)) + setDistinguishable("record", allBut(nonUserObjects, undefineds)) setDistinguishable("any", []) setDistinguishable("Promise", []) setDistinguishable("Promise?", []) setDistinguishable("ArrayBuffer", allBut(argTypes, ["ArrayBuffer", "object"])) - setDistinguishable("ArrayBufferView", allBut(argTypes, ["ArrayBufferView", "Uint8Array", "Uint16Array", "object"])) - setDistinguishable("Uint8Array", allBut(argTypes, ["ArrayBufferView", "Uint8Array", "object"])) - setDistinguishable("Uint16Array", allBut(argTypes, ["ArrayBufferView", "Uint16Array", "object"])) - setDistinguishable("(long or Callback)", - allBut(nonUserObjects, numerics)) - setDistinguishable("(long or Dict)", - allBut(nonUserObjects, numerics + nullables)) + setDistinguishable( + "ArrayBufferView", + allBut(argTypes, ["ArrayBufferView", "Uint8Array", "Uint16Array", "object"]), + ) + setDistinguishable( + "Uint8Array", allBut(argTypes, ["ArrayBufferView", "Uint8Array", "object"]) + ) + setDistinguishable( + "Uint16Array", allBut(argTypes, ["ArrayBufferView", "Uint16Array", "object"]) + ) + setDistinguishable("(long or Callback)", allBut(nonUserObjects, numerics)) + setDistinguishable( + "(long or Dict)", allBut(nonUserObjects, numerics + nullables + undefineds) + ) def areDistinguishable(type1, type2): return data[type1].get(type2, False) @@ -271,10 +385,18 @@ def WebIDLTest(parser, harness): interface TestInterface {%s }; """ - methodTemplate = """ - undefined myMethod(%s arg);""" - methods = (methodTemplate % type1) + (methodTemplate % type2) + if type1 in undefineds or type2 in undefineds: + methods = """ + (%s or %s) myMethod();""" % ( + type1, + type2, + ) + else: + methodTemplate = """ + undefined myMethod(%s arg);""" + methods = (methodTemplate % type1) + (methodTemplate % type2) idl = idlTemplate % methods + parser = parser.reset() threw = False try: @@ -284,11 +406,17 @@ def WebIDLTest(parser, harness): threw = True if areDistinguishable(type1, type2): - harness.ok(not threw, - "Should not throw for '%s' and '%s' because they are distinguishable" % (type1, type2)) + harness.ok( + not threw, + "Should not throw for '%s' and '%s' because they are distinguishable" + % (type1, type2), + ) else: - harness.ok(threw, - "Should throw for '%s' and '%s' because they are not distinguishable" % (type1, type2)) + harness.ok( + threw, + "Should throw for '%s' and '%s' because they are not distinguishable" + % (type1, type2), + ) # Enumerate over everything in both orders, since order matters in # terms of our implementation of distinguishability checks diff --git a/components/script/dom/bindings/codegen/parser/tests/test_double_null.py b/components/script/dom/bindings/codegen/parser/tests/test_double_null.py index 700c7eade00..a8876a7fd2d 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_double_null.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_double_null.py @@ -1,11 +1,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface DoubleNull { attribute byte?? foo; }; - """) + """ + ) results = parser.finish() except: diff --git a/components/script/dom/bindings/codegen/parser/tests/test_duplicate_qualifiers.py b/components/script/dom/bindings/codegen/parser/tests/test_duplicate_qualifiers.py index 4874b3aafe6..89a4e1acf0b 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_duplicate_qualifiers.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_duplicate_qualifiers.py @@ -1,11 +1,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface DuplicateQualifiers1 { getter getter byte foo(unsigned long index); }; - """) + """ + ) results = parser.finish() except: @@ -15,11 +17,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface DuplicateQualifiers2 { setter setter byte foo(unsigned long index, byte value); }; - """) + """ + ) results = parser.finish() except: @@ -29,11 +33,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface DuplicateQualifiers4 { deleter deleter byte foo(unsigned long index); }; - """) + """ + ) results = parser.finish() except: @@ -43,11 +49,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface DuplicateQualifiers5 { getter deleter getter byte foo(unsigned long index); }; - """) + """ + ) results = parser.finish() except: diff --git a/components/script/dom/bindings/codegen/parser/tests/test_empty_enum.py b/components/script/dom/bindings/codegen/parser/tests/test_empty_enum.py index ee0079f06da..09333a659cd 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_empty_enum.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_empty_enum.py @@ -1,11 +1,14 @@ import WebIDL + def WebIDLTest(parser, harness): try: - parser.parse(""" + parser.parse( + """ enum TestEmptyEnum { }; - """) + """ + ) harness.ok(False, "Should have thrown!") except: diff --git a/components/script/dom/bindings/codegen/parser/tests/test_empty_sequence_default_value.py b/components/script/dom/bindings/codegen/parser/tests/test_empty_sequence_default_value.py index 5f04c6ae751..21837743523 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_empty_sequence_default_value.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_empty_sequence_default_value.py @@ -1,13 +1,16 @@ import WebIDL + def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface X { const sequence foo = []; }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -17,29 +20,35 @@ def WebIDLTest(parser, harness): parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface X { undefined foo(optional sequence arg = []); }; - """) - results = parser.finish(); + """ + ) + results = parser.finish() - harness.ok(isinstance( - results[0].members[0].signatures()[0][1][0].defaultValue, - WebIDL.IDLEmptySequenceValue), - "Should have IDLEmptySequenceValue as default value of argument") + harness.ok( + isinstance( + results[0].members[0].signatures()[0][1][0].defaultValue, + WebIDL.IDLEmptySequenceValue, + ), + "Should have IDLEmptySequenceValue as default value of argument", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ dictionary X { sequence foo = []; }; - """) - results = parser.finish(); - - harness.ok(isinstance(results[0].members[0].defaultValue, - WebIDL.IDLEmptySequenceValue), - "Should have IDLEmptySequenceValue as default value of " - "dictionary member") + """ + ) + results = parser.finish() + harness.ok( + isinstance(results[0].members[0].defaultValue, WebIDL.IDLEmptySequenceValue), + "Should have IDLEmptySequenceValue as default value of " "dictionary member", + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_enum.py b/components/script/dom/bindings/codegen/parser/tests/test_enum.py index c5617ead99a..56c6b3f64aa 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_enum.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_enum.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ enum TestEnum { "", "foo", @@ -12,16 +14,15 @@ def WebIDLTest(parser, harness): TestEnum doFoo(boolean arg); readonly attribute TestEnum foo; }; - """) + """ + ) results = parser.finish() harness.ok(True, "TestEnumInterfaces interface parsed without error.") harness.check(len(results), 2, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLEnum), - "Should be an IDLEnum") - harness.ok(isinstance(results[1], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[0], WebIDL.IDLEnum), "Should be an IDLEnum") + harness.ok(isinstance(results[1], WebIDL.IDLInterface), "Should be an IDLInterface") enum = results[0] harness.check(enum.identifier.QName(), "::TestEnum", "Enum has the right QName") @@ -30,32 +31,41 @@ def WebIDLTest(parser, harness): iface = results[1] - harness.check(iface.identifier.QName(), "::TestEnumInterface", "Interface has the right QName") - harness.check(iface.identifier.name, "TestEnumInterface", "Interface has the right name") + harness.check( + iface.identifier.QName(), "::TestEnumInterface", "Interface has the right QName" + ) + harness.check( + iface.identifier.name, "TestEnumInterface", "Interface has the right name" + ) harness.check(iface.parent, None, "Interface has no parent") members = iface.members harness.check(len(members), 2, "Should be one production") - harness.ok(isinstance(members[0], WebIDL.IDLMethod), - "Should be an IDLMethod") + harness.ok(isinstance(members[0], WebIDL.IDLMethod), "Should be an IDLMethod") method = members[0] - harness.check(method.identifier.QName(), "::TestEnumInterface::doFoo", - "Method has correct QName") + harness.check( + method.identifier.QName(), + "::TestEnumInterface::doFoo", + "Method has correct QName", + ) harness.check(method.identifier.name, "doFoo", "Method has correct name") signatures = method.signatures() harness.check(len(signatures), 1, "Expect one signature") (returnType, arguments) = signatures[0] - harness.check(str(returnType), "TestEnum (Wrapper)", "Method type is the correct name") + harness.check( + str(returnType), "TestEnum (Wrapper)", "Method type is the correct name" + ) harness.check(len(arguments), 1, "Method has the right number of arguments") arg = arguments[0] harness.ok(isinstance(arg, WebIDL.IDLArgument), "Should be an IDLArgument") harness.check(str(arg.type), "Boolean", "Argument has the right type") attr = members[1] - harness.check(attr.identifier.QName(), "::TestEnumInterface::foo", - "Attr has correct QName") + harness.check( + attr.identifier.QName(), "::TestEnumInterface::foo", "Attr has correct QName" + ) harness.check(attr.identifier.name, "foo", "Attr has correct name") harness.check(str(attr.type), "TestEnum (Wrapper)", "Attr type is the correct name") @@ -64,7 +74,8 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ enum Enum { "a", "b", @@ -73,7 +84,8 @@ def WebIDLTest(parser, harness): interface TestInterface { undefined foo(optional Enum e = "d"); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -82,12 +94,14 @@ def WebIDLTest(parser, harness): # Now reset our parser parser = parser.reset() - parser.parse(""" + parser.parse( + """ enum Enum { "a", "b", "c", }; - """) + """ + ) results = parser.finish() harness.check(len(results), 1, "Should allow trailing comma in enum") diff --git a/components/script/dom/bindings/codegen/parser/tests/test_enum_duplicate_values.py b/components/script/dom/bindings/codegen/parser/tests/test_enum_duplicate_values.py index 51205d209e7..8969281e1c7 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_enum_duplicate_values.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_enum_duplicate_values.py @@ -1,13 +1,16 @@ import WebIDL + def WebIDLTest(parser, harness): try: - parser.parse(""" + parser.parse( + """ enum TestEnumDuplicateValue { "", "" }; - """) + """ + ) harness.ok(False, "Should have thrown!") except: harness.ok(True, "Enum TestEnumDuplicateValue should throw") diff --git a/components/script/dom/bindings/codegen/parser/tests/test_error_colno.py b/components/script/dom/bindings/codegen/parser/tests/test_error_colno.py index 7afd15513c6..1c9bb065580 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_error_colno.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_error_colno.py @@ -1,20 +1,24 @@ import WebIDL + def WebIDLTest(parser, harness): # Check that error messages put the '^' in the right place. threw = False - input = 'interface ?' + input = "interface ?" try: parser.parse(input) results = parser.finish() except WebIDL.WebIDLError as e: threw = True - lines = str(e).split('\n') + lines = str(e).split("\n") - harness.check(len(lines), 3, 'Expected number of lines in error message') - harness.check(lines[1], input, 'Second line shows error') - harness.check(lines[2], ' ' * (len(input) - 1) + '^', - 'Correct column pointer in error message') + harness.check(len(lines), 3, "Expected number of lines in error message") + harness.check(lines[1], input, "Second line shows error") + harness.check( + lines[2], + " " * (len(input) - 1) + "^", + "Correct column pointer in error message", + ) harness.ok(threw, "Should have thrown.") diff --git a/components/script/dom/bindings/codegen/parser/tests/test_error_lineno.py b/components/script/dom/bindings/codegen/parser/tests/test_error_lineno.py index 70bb1883682..0d10e006787 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_error_lineno.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_error_lineno.py @@ -1,5 +1,6 @@ import WebIDL + def WebIDLTest(parser, harness): # Check that error messages put the '^' in the right place. @@ -16,13 +17,22 @@ interface ?""" results = parser.finish() except WebIDL.WebIDLError as e: threw = True - lines = str(e).split('\n') + lines = str(e).split("\n") - harness.check(len(lines), 3, 'Expected number of lines in error message') - harness.ok(lines[0].endswith('line 6:10'), 'First line of error should end with "line 6:10", but was "%s".' % lines[0]) - harness.check(lines[1], 'interface ?', 'Second line of error message is the line which caused the error.') - harness.check(lines[2], ' ' * (len('interface ?') - 1) + '^', - 'Correct column pointer in error message.') + harness.check(len(lines), 3, "Expected number of lines in error message") + harness.ok( + lines[0].endswith("line 6:10"), + 'First line of error should end with "line 6:10", but was "%s".' % lines[0], + ) + harness.check( + lines[1], + "interface ?", + "Second line of error message is the line which caused the error.", + ) + harness.check( + lines[2], + " " * (len("interface ?") - 1) + "^", + "Correct column pointer in error message.", + ) harness.ok(threw, "Should have thrown.") - diff --git a/components/script/dom/bindings/codegen/parser/tests/test_exposed_extended_attribute.py b/components/script/dom/bindings/codegen/parser/tests/test_exposed_extended_attribute.py index 39993eaeae5..c5ea8e4b88b 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_exposed_extended_attribute.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_exposed_extended_attribute.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ [Global, Exposed=Foo] interface Foo {}; [Global=(Bar, Bar1,Bar2), Exposed=Bar] interface Bar {}; [Global=(Baz, Baz2), Exposed=Baz] interface Baz {}; @@ -18,39 +20,56 @@ def WebIDLTest(parser, harness): partial interface Iface { undefined method2(); }; - """) + """ + ) results = parser.finish() - harness.check(len(results), 5, "Should know about five things"); + harness.check(len(results), 5, "Should know about five things") iface = results[3] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should have an interface here"); + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should have an interface here") members = iface.members harness.check(len(members), 3, "Should have three members") - harness.ok(members[0].exposureSet == set(["Foo", "Bar"]), - "method1 should have the right exposure set") - harness.ok(members[0]._exposureGlobalNames == set(["Foo", "Bar1"]), - "method1 should have the right exposure global names") + harness.ok( + members[0].exposureSet == set(["Foo", "Bar"]), + "method1 should have the right exposure set", + ) + harness.ok( + members[0]._exposureGlobalNames == set(["Foo", "Bar1"]), + "method1 should have the right exposure global names", + ) - harness.ok(members[1].exposureSet == set(["Bar"]), - "attr should have the right exposure set") - harness.ok(members[1]._exposureGlobalNames == set(["Bar1"]), - "attr should have the right exposure global names") + harness.ok( + members[1].exposureSet == set(["Bar"]), + "attr should have the right exposure set", + ) + harness.ok( + members[1]._exposureGlobalNames == set(["Bar1"]), + "attr should have the right exposure global names", + ) - harness.ok(members[2].exposureSet == set(["Foo"]), - "method2 should have the right exposure set") - harness.ok(members[2]._exposureGlobalNames == set(["Foo"]), - "method2 should have the right exposure global names") + harness.ok( + members[2].exposureSet == set(["Foo"]), + "method2 should have the right exposure set", + ) + harness.ok( + members[2]._exposureGlobalNames == set(["Foo"]), + "method2 should have the right exposure global names", + ) - harness.ok(iface.exposureSet == set(["Foo", "Bar"]), - "Iface should have the right exposure set") - harness.ok(iface._exposureGlobalNames == set(["Foo", "Bar1"]), - "Iface should have the right exposure global names") + harness.ok( + iface.exposureSet == set(["Foo", "Bar"]), + "Iface should have the right exposure set", + ) + harness.ok( + iface._exposureGlobalNames == set(["Foo", "Bar1"]), + "Iface should have the right exposure global names", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ [Global, Exposed=Foo] interface Foo {}; [Global=(Bar, Bar1, Bar2), Exposed=Bar] interface Bar {}; [Global=(Baz, Baz2), Exposed=Baz] interface Baz {}; @@ -59,28 +78,36 @@ def WebIDLTest(parser, harness): interface Iface2 { undefined method3(); }; - """) + """ + ) results = parser.finish() - harness.check(len(results), 4, "Should know about four things"); + harness.check(len(results), 4, "Should know about four things") iface = results[3] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should have an interface here"); + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should have an interface here") members = iface.members harness.check(len(members), 1, "Should have one member") - harness.ok(members[0].exposureSet == set(["Foo"]), - "method3 should have the right exposure set") - harness.ok(members[0]._exposureGlobalNames == set(["Foo"]), - "method3 should have the right exposure global names") + harness.ok( + members[0].exposureSet == set(["Foo"]), + "method3 should have the right exposure set", + ) + harness.ok( + members[0]._exposureGlobalNames == set(["Foo"]), + "method3 should have the right exposure global names", + ) - harness.ok(iface.exposureSet == set(["Foo"]), - "Iface2 should have the right exposure set") - harness.ok(iface._exposureGlobalNames == set(["Foo"]), - "Iface2 should have the right exposure global names") + harness.ok( + iface.exposureSet == set(["Foo"]), "Iface2 should have the right exposure set" + ) + harness.ok( + iface._exposureGlobalNames == set(["Foo"]), + "Iface2 should have the right exposure global names", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ [Global, Exposed=Foo] interface Foo {}; [Global=(Bar, Bar1, Bar2), Exposed=Bar] interface Bar {}; [Global=(Baz, Baz2), Exposed=Baz] interface Baz {}; @@ -96,33 +123,43 @@ def WebIDLTest(parser, harness): }; Iface3 includes Mixin; - """) + """ + ) results = parser.finish() - harness.check(len(results), 6, "Should know about six things"); + harness.check(len(results), 6, "Should know about six things") iface = results[3] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should have an interface here"); + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should have an interface here") members = iface.members harness.check(len(members), 2, "Should have two members") - harness.ok(members[0].exposureSet == set(["Foo"]), - "method4 should have the right exposure set") - harness.ok(members[0]._exposureGlobalNames == set(["Foo"]), - "method4 should have the right exposure global names") + harness.ok( + members[0].exposureSet == set(["Foo"]), + "method4 should have the right exposure set", + ) + harness.ok( + members[0]._exposureGlobalNames == set(["Foo"]), + "method4 should have the right exposure global names", + ) - harness.ok(members[1].exposureSet == set(["Foo", "Bar"]), - "method5 should have the right exposure set") - harness.ok(members[1]._exposureGlobalNames == set(["Foo", "Bar1"]), - "method5 should have the right exposure global names") + harness.ok( + members[1].exposureSet == set(["Foo", "Bar"]), + "method5 should have the right exposure set", + ) + harness.ok( + members[1]._exposureGlobalNames == set(["Foo", "Bar1"]), + "method5 should have the right exposure global names", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Exposed=Foo] interface Bar { }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -133,12 +170,14 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Bar { [Exposed=Foo] readonly attribute bool attr; }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -149,12 +188,14 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Bar { [Exposed=Foo] undefined operation(); }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -165,12 +206,14 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Bar { [Exposed=Foo] const long constant = 5; }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -181,7 +224,8 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Global, Exposed=Foo] interface Foo {}; [Global, Exposed=Bar] interface Bar {}; @@ -190,16 +234,20 @@ def WebIDLTest(parser, harness): [Exposed=Bar] undefined method(); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True - harness.ok(threw, "Should have thrown on member exposed where its interface is not.") + harness.ok( + threw, "Should have thrown on member exposed where its interface is not." + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ [Global, Exposed=Foo] interface Foo {}; [Global, Exposed=Bar] interface Bar {}; @@ -214,25 +262,122 @@ def WebIDLTest(parser, harness): }; Baz includes Mixin; - """) + """ + ) results = parser.finish() - harness.check(len(results), 5, "Should know about five things"); + harness.check(len(results), 5, "Should know about five things") iface = results[2] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should have an interface here"); + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should have an interface here") members = iface.members harness.check(len(members), 2, "Should have two members") - harness.ok(members[0].exposureSet == set(["Foo"]), - "method should have the right exposure set") - harness.ok(members[0]._exposureGlobalNames == set(["Foo"]), - "method should have the right exposure global names") + harness.ok( + members[0].exposureSet == set(["Foo"]), + "method should have the right exposure set", + ) + harness.ok( + members[0]._exposureGlobalNames == set(["Foo"]), + "method should have the right exposure global names", + ) - harness.ok(members[1].exposureSet == set(["Bar"]), - "otherMethod should have the right exposure set") - harness.ok(members[1]._exposureGlobalNames == set(["Bar"]), - "otherMethod should have the right exposure global names") + harness.ok( + members[1].exposureSet == set(["Bar"]), + "otherMethod should have the right exposure set", + ) + harness.ok( + members[1]._exposureGlobalNames == set(["Bar"]), + "otherMethod should have the right exposure global names", + ) + parser = parser.reset() + parser.parse( + """ + [Global, Exposed=Foo] interface Foo {}; + [Global, Exposed=Bar] interface Bar {}; + [Exposed=*] + interface Baz { + undefined methodWild(); + }; + + [Exposed=Bar] + interface mixin Mixin { + undefined methodNotWild(); + }; + + Baz includes Mixin; + """ + ) + + results = parser.finish() + + harness.check(len(results), 5, "Should know about five things") + iface = results[2] + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should have an interface here") + members = iface.members + harness.check(len(members), 2, "Should have two members") + + harness.ok( + members[0].exposureSet == set(["Foo", "Bar"]), + "methodWild should have the right exposure set", + ) + harness.ok( + members[0]._exposureGlobalNames == set(["Foo", "Bar"]), + "methodWild should have the right exposure global names", + ) + + harness.ok( + members[1].exposureSet == set(["Bar"]), + "methodNotWild should have the right exposure set", + ) + harness.ok( + members[1]._exposureGlobalNames == set(["Bar"]), + "methodNotWild should have the right exposure global names", + ) + + parser = parser.reset() + threw = False + try: + parser.parse( + """ + [Global, Exposed=Foo] interface Foo {}; + [Global, Exposed=Bar] interface Bar {}; + + [Exposed=Foo] + interface Baz { + [Exposed=*] + undefined method(); + }; + """ + ) + + results = parser.finish() + except Exception as x: + threw = True + + harness.ok( + threw, "Should have thrown on member exposed where its interface is not." + ) + + parser = parser.reset() + threw = False + try: + parser.parse( + """ + [Global, Exposed=Foo] interface Foo {}; + [Global, Exposed=Bar] interface Bar {}; + + [Exposed=(Foo,*)] + interface Baz { + undefined method(); + }; + """ + ) + + results = parser.finish() + except Exception as x: + threw = True + + harness.ok(threw, "Should have thrown on a wildcard in an identifier list.") diff --git a/components/script/dom/bindings/codegen/parser/tests/test_extended_attributes.py b/components/script/dom/bindings/codegen/parser/tests/test_extended_attributes.py index 66909f322c2..423a67540c7 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_extended_attributes.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_extended_attributes.py @@ -1,74 +1,91 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" - [NoInterfaceObject] + parser.parse( + """ + [LegacyNoInterfaceObject] interface TestExtendedAttr { - [Unforgeable] readonly attribute byte b; + [LegacyUnforgeable] readonly attribute byte b; }; - """) + """ + ) results = parser.finish() parser = parser.reset() - parser.parse(""" + parser.parse( + """ [Pref="foo.bar",Pref=flop] interface TestExtendedAttr { [Pref="foo.bar"] attribute byte b; }; - """) + """ + ) results = parser.finish() parser = parser.reset() - parser.parse(""" - interface TestLenientThis { - [LenientThis] attribute byte b; + parser.parse( + """ + interface TestLegacyLenientThis { + [LegacyLenientThis] attribute byte b; }; - """) + """ + ) results = parser.finish() - harness.ok(results[0].members[0].hasLenientThis(), - "Should have a lenient this") + harness.ok( + results[0].members[0].hasLegacyLenientThis(), "Should have a lenient this" + ) parser = parser.reset() threw = False try: - parser.parse(""" - interface TestLenientThis2 { - [LenientThis=something] attribute byte b; + parser.parse( + """ + interface TestLegacyLenientThis2 { + [LegacyLenientThis=something] attribute byte b; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "[LenientThis] must take no arguments") + harness.ok(threw, "[LegacyLenientThis] must take no arguments") parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestClamp { undefined testClamp([Clamp] long foo); undefined testNotClamp(long foo); }; - """) + """ + ) results = parser.finish() # Pull out the first argument out of the arglist of the first (and # only) signature. - harness.ok(results[0].members[0].signatures()[0][1][0].type.hasClamp(), - "Should be clamped") - harness.ok(not results[0].members[1].signatures()[0][1][0].type.hasClamp(), - "Should not be clamped") + harness.ok( + results[0].members[0].signatures()[0][1][0].type.hasClamp(), "Should be clamped" + ) + harness.ok( + not results[0].members[1].signatures()[0][1][0].type.hasClamp(), + "Should not be clamped", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestClamp2 { undefined testClamp([Clamp=something] long foo); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -76,32 +93,39 @@ def WebIDLTest(parser, harness): harness.ok(threw, "[Clamp] must take no arguments") parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestEnforceRange { undefined testEnforceRange([EnforceRange] long foo); undefined testNotEnforceRange(long foo); }; - """) + """ + ) results = parser.finish() # Pull out the first argument out of the arglist of the first (and # only) signature. - harness.ok(results[0].members[0].signatures()[0][1][0].type.hasEnforceRange(), - "Should be enforceRange") - harness.ok(not results[0].members[1].signatures()[0][1][0].type.hasEnforceRange(), - "Should not be enforceRange") + harness.ok( + results[0].members[0].signatures()[0][1][0].type.hasEnforceRange(), + "Should be enforceRange", + ) + harness.ok( + not results[0].members[1].signatures()[0][1][0].type.hasEnforceRange(), + "Should not be enforceRange", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestEnforceRange2 { undefined testEnforceRange([EnforceRange=something] long foo); }; - """) + """ + ) results = parser.finish() except: threw = True harness.ok(threw, "[EnforceRange] must take no arguments") - diff --git a/components/script/dom/bindings/codegen/parser/tests/test_float_types.py b/components/script/dom/bindings/codegen/parser/tests/test_float_types.py index 8fbe9394042..d37443819d8 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_float_types.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_float_types.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ typedef float myFloat; typedef unrestricted float myUnrestrictedFloat; interface FloatTypes { @@ -15,14 +17,14 @@ def WebIDLTest(parser, harness): attribute double ld; undefined m1(float arg1, double arg2, float? arg3, double? arg4, - myFloat arg5, unrestricted float arg6, - unrestricted double arg7, unrestricted float? arg8, - unrestricted double? arg9, myUnrestrictedFloat arg10); + myFloat arg5, unrestricted float arg6, + unrestricted double arg7, unrestricted float? arg8, + unrestricted double? arg9, myUnrestrictedFloat arg10); [LenientFloat] undefined m2(float arg1, double arg2, float? arg3, double? arg4, - myFloat arg5, unrestricted float arg6, - unrestricted double arg7, unrestricted float? arg8, - unrestricted double? arg9, myUnrestrictedFloat arg10); + myFloat arg5, unrestricted float arg6, + unrestricted double arg7, unrestricted float? arg8, + unrestricted double? arg9, myUnrestrictedFloat arg10); [LenientFloat] undefined m3(float arg); [LenientFloat] @@ -32,14 +34,14 @@ def WebIDLTest(parser, harness): [LenientFloat] undefined m6(sequence arg); }; - """) + """ + ) results = parser.finish() harness.check(len(results), 3, "Should be two typedefs and one interface.") iface = results[2] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface") types = [a.type for a in iface.members if a.isAttr()] harness.ok(types[0].isFloat(), "'float' is a float") harness.ok(not types[0].isUnrestricted(), "'float' is not unrestricted") @@ -55,71 +57,89 @@ def WebIDLTest(parser, harness): argtypes = [a.type for a in method.signatures()[0][1]] for (idx, type) in enumerate(argtypes): harness.ok(type.isFloat(), "Type %d should be float" % idx) - harness.check(type.isUnrestricted(), idx >= 5, - "Type %d should %sbe unrestricted" % ( - idx, "" if idx >= 4 else "not ")) + harness.check( + type.isUnrestricted(), + idx >= 5, + "Type %d should %sbe unrestricted" % (idx, "" if idx >= 4 else "not "), + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface FloatTypes { [LenientFloat] long m(float arg); }; - """) + """ + ) except Exception as x: threw = True - harness.ok(threw, "[LenientFloat] only allowed on undefined-retuning methods") + harness.ok(threw, "[LenientFloat] only allowed on methods returning undefined") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface FloatTypes { [LenientFloat] undefined m(unrestricted float arg); }; - """) + """ + ) except Exception as x: threw = True - harness.ok(threw, "[LenientFloat] only allowed on methods with unrestricted float args") + harness.ok( + threw, "[LenientFloat] only allowed on methods with unrestricted float args" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface FloatTypes { [LenientFloat] undefined m(sequence arg); }; - """) + """ + ) except Exception as x: threw = True - harness.ok(threw, "[LenientFloat] only allowed on methods with unrestricted float args (2)") + harness.ok( + threw, "[LenientFloat] only allowed on methods with unrestricted float args (2)" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface FloatTypes { [LenientFloat] undefined m((unrestricted float or FloatTypes) arg); }; - """) + """ + ) except Exception as x: threw = True - harness.ok(threw, "[LenientFloat] only allowed on methods with unrestricted float args (3)") + harness.ok( + threw, "[LenientFloat] only allowed on methods with unrestricted float args (3)" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface FloatTypes { [LenientFloat] readonly attribute float foo; }; - """) + """ + ) except Exception as x: threw = True harness.ok(threw, "[LenientFloat] only allowed on writable attributes") diff --git a/components/script/dom/bindings/codegen/parser/tests/test_forward_decl.py b/components/script/dom/bindings/codegen/parser/tests/test_forward_decl.py index cac24c832cc..1c81718400a 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_forward_decl.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_forward_decl.py @@ -1,14 +1,17 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface ForwardDeclared; interface ForwardDeclared; interface TestForwardDecl { attribute ForwardDeclared foo; }; - """) + """ + ) results = parser.finish() diff --git a/components/script/dom/bindings/codegen/parser/tests/test_global_extended_attr.py b/components/script/dom/bindings/codegen/parser/tests/test_global_extended_attr.py index 3958f8ce104..9ee27efbc8d 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_global_extended_attr.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_global_extended_attr.py @@ -1,106 +1,129 @@ def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ [Global, Exposed=Foo] interface Foo : Bar { getter any(DOMString name); }; [Exposed=Foo] interface Bar {}; - """) + """ + ) results = parser.finish() - harness.ok(results[0].isOnGlobalProtoChain(), - "[Global] interface should be on global's proto chain") - harness.ok(results[1].isOnGlobalProtoChain(), - "[Global] interface should be on global's proto chain") + harness.ok( + results[0].isOnGlobalProtoChain(), + "[Global] interface should be on global's proto chain", + ) + harness.ok( + results[1].isOnGlobalProtoChain(), + "[Global] interface should be on global's proto chain", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Global, Exposed=Foo] interface Foo { getter any(DOMString name); setter undefined(DOMString name, any arg); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown for [Global] used on an interface with a " - "named setter") + harness.ok( + threw, + "Should have thrown for [Global] used on an interface with a " "named setter", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Global, Exposed=Foo] interface Foo { getter any(DOMString name); deleter undefined(DOMString name); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown for [Global] used on an interface with a " - "named deleter") + harness.ok( + threw, + "Should have thrown for [Global] used on an interface with a " "named deleter", + ) parser = parser.reset() threw = False try: - parser.parse(""" - [Global, OverrideBuiltins, Exposed=Foo] + parser.parse( + """ + [Global, LegacyOverrideBuiltIns, Exposed=Foo] interface Foo { }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown for [Global] used on an interface with a " - "[OverrideBuiltins]") + harness.ok( + threw, + "Should have thrown for [Global] used on an interface with a " + "[LegacyOverrideBuiltIns]", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Global, Exposed=Foo] interface Foo : Bar { }; - [OverrideBuiltins, Exposed=Foo] + [LegacyOverrideBuiltIns, Exposed=Foo] interface Bar { }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown for [Global] used on an interface with an " - "[OverrideBuiltins] ancestor") + harness.ok( + threw, + "Should have thrown for [Global] used on an interface with an " + "[LegacyOverrideBuiltIns] ancestor", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Global, Exposed=Foo] interface Foo { }; [Exposed=Foo] interface Bar : Foo { }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown for [Global] used on an interface with a " - "descendant") + harness.ok( + threw, + "Should have thrown for [Global] used on an interface with a " "descendant", + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_identifier_conflict.py b/components/script/dom/bindings/codegen/parser/tests/test_identifier_conflict.py index 0e9a6654aa7..7404c86f944 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_identifier_conflict.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_identifier_conflict.py @@ -1,39 +1,49 @@ # Import the WebIDL module, so we can do isinstance checks and whatnot import WebIDL + def WebIDLTest(parser, harness): try: - parser.parse(""" + parser.parse( + """ enum Foo { "a" }; interface Foo; - """) + """ + ) results = parser.finish() harness.ok(False, "Should fail to parse") except Exception as e: - harness.ok("Name collision" in e.message, - "Should have name collision for interface") + harness.ok( + "Name collision" in str(e), "Should have name collision for interface" + ) parser = parser.reset() try: - parser.parse(""" + parser.parse( + """ dictionary Foo { long x; }; enum Foo { "a" }; - """) + """ + ) results = parser.finish() harness.ok(False, "Should fail to parse") except Exception as e: - harness.ok("Name collision" in e.message, - "Should have name collision for dictionary") + harness.ok( + "Name collision" in str(e), "Should have name collision for dictionary" + ) parser = parser.reset() try: - parser.parse(""" + parser.parse( + """ enum Foo { "a" }; enum Foo { "b" }; - """) + """ + ) results = parser.finish() harness.ok(False, "Should fail to parse") except Exception as e: - harness.ok("Multiple unresolvable definitions" in e.message, - "Should have name collision for dictionary") - + harness.ok( + "Multiple unresolvable definitions" in str(e), + "Should have name collision for dictionary", + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_incomplete_parent.py b/components/script/dom/bindings/codegen/parser/tests/test_incomplete_parent.py index 8f30c212d7b..ed476b8ed4c 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_incomplete_parent.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_incomplete_parent.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestIncompleteParent : NotYetDefined { undefined foo(); }; @@ -11,7 +13,8 @@ def WebIDLTest(parser, harness): interface EvenHigherOnTheChain { }; - """) + """ + ) parser.finish() diff --git a/components/script/dom/bindings/codegen/parser/tests/test_incomplete_types.py b/components/script/dom/bindings/codegen/parser/tests/test_incomplete_types.py index fdc39604070..0d54f708bba 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_incomplete_types.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_incomplete_types.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestIncompleteTypes { attribute FooInterface attr1; @@ -10,35 +12,50 @@ def WebIDLTest(parser, harness): interface FooInterface { }; - """) + """ + ) results = parser.finish() harness.ok(True, "TestIncompleteTypes interface parsed without error.") harness.check(len(results), 2, "Should be two productions.") iface = results[0] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should be an IDLInterface") - harness.check(iface.identifier.QName(), "::TestIncompleteTypes", "Interface has the right QName") - harness.check(iface.identifier.name, "TestIncompleteTypes", "Interface has the right name") + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface") + harness.check( + iface.identifier.QName(), + "::TestIncompleteTypes", + "Interface has the right QName", + ) + harness.check( + iface.identifier.name, "TestIncompleteTypes", "Interface has the right name" + ) harness.check(len(iface.members), 2, "Expect 2 members") attr = iface.members[0] - harness.ok(isinstance(attr, WebIDL.IDLAttribute), - "Should be an IDLAttribute") + harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute") method = iface.members[1] - harness.ok(isinstance(method, WebIDL.IDLMethod), - "Should be an IDLMethod") + harness.ok(isinstance(method, WebIDL.IDLMethod), "Should be an IDLMethod") - harness.check(attr.identifier.QName(), "::TestIncompleteTypes::attr1", - "Attribute has the right QName") - harness.check(attr.type.name, "FooInterface", - "Previously unresolved type has the right name") + harness.check( + attr.identifier.QName(), + "::TestIncompleteTypes::attr1", + "Attribute has the right QName", + ) + harness.check( + attr.type.name, "FooInterface", "Previously unresolved type has the right name" + ) - harness.check(method.identifier.QName(), "::TestIncompleteTypes::method1", - "Attribute has the right QName") + harness.check( + method.identifier.QName(), + "::TestIncompleteTypes::method1", + "Attribute has the right QName", + ) (returnType, args) = method.signatures()[0] - harness.check(returnType.name, "FooInterface", - "Previously unresolved type has the right name") - harness.check(args[0].type.name, "FooInterface", - "Previously unresolved type has the right name") + harness.check( + returnType.name, "FooInterface", "Previously unresolved type has the right name" + ) + harness.check( + args[0].type.name, + "FooInterface", + "Previously unresolved type has the right name", + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_interface.py b/components/script/dom/bindings/codegen/parser/tests/test_interface.py index a10bcd9863d..85748848e1b 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_interface.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_interface.py @@ -1,12 +1,12 @@ import WebIDL + def WebIDLTest(parser, harness): parser.parse("interface Foo { };") results = parser.finish() harness.ok(True, "Empty interface parsed without error.") harness.check(len(results), 1, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface") iface = results[0] harness.check(iface.identifier.QName(), "::Foo", "Interface has the right QName") harness.check(iface.identifier.name, "Foo", "Interface has the right name") @@ -16,16 +16,15 @@ def WebIDLTest(parser, harness): results = parser.finish() harness.ok(True, "Empty interface parsed without error.") harness.check(len(results), 2, "Should be two productions") - harness.ok(isinstance(results[1], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[1], WebIDL.IDLInterface), "Should be an IDLInterface") iface = results[1] harness.check(iface.identifier.QName(), "::Bar", "Interface has the right QName") harness.check(iface.identifier.name, "Bar", "Interface has the right name") - harness.ok(isinstance(iface.parent, WebIDL.IDLInterface), - "Interface has a parent") + harness.ok(isinstance(iface.parent, WebIDL.IDLInterface), "Interface has a parent") parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface QNameBase { attribute long foo; }; @@ -34,32 +33,42 @@ def WebIDLTest(parser, harness): attribute long long foo; attribute byte bar; }; - """) + """ + ) results = parser.finish() harness.check(len(results), 2, "Should be two productions") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") - harness.ok(isinstance(results[1], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface") + harness.ok(isinstance(results[1], WebIDL.IDLInterface), "Should be an IDLInterface") harness.check(results[1].parent, results[0], "Inheritance chain is right") harness.check(len(results[0].members), 1, "Expect 1 productions") harness.check(len(results[1].members), 2, "Expect 2 productions") base = results[0] derived = results[1] - harness.check(base.members[0].identifier.QName(), "::QNameBase::foo", - "Member has the right QName") - harness.check(derived.members[0].identifier.QName(), "::QNameDerived::foo", - "Member has the right QName") - harness.check(derived.members[1].identifier.QName(), "::QNameDerived::bar", - "Member has the right QName") + harness.check( + base.members[0].identifier.QName(), + "::QNameBase::foo", + "Member has the right QName", + ) + harness.check( + derived.members[0].identifier.QName(), + "::QNameDerived::foo", + "Member has the right QName", + ) + harness.check( + derived.members[1].identifier.QName(), + "::QNameDerived::bar", + "Member has the right QName", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A : B {}; interface B : A {}; - """) + """ + ) results = parser.finish() except: threw = True @@ -69,32 +78,42 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A : C {}; interface C : B {}; interface B : A {}; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Should not allow indirect cycles in interface inheritance chains") + harness.ok( + threw, "Should not allow indirect cycles in interface inheritance chains" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A; interface B : A {}; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Should not allow inheriting from an interface that is only forward declared") + harness.ok( + threw, + "Should not allow inheriting from an interface that is only forward declared", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface A { constructor(); constructor(long arg); @@ -105,26 +124,43 @@ def WebIDLTest(parser, harness): readonly attribute boolean y; undefined foo(long arg); }; - """); - results = parser.finish(); - harness.check(len(results), 2, - "Should have two results with partial interface") + """ + ) + results = parser.finish() + harness.check(len(results), 2, "Should have two results with partial interface") iface = results[0] - harness.check(len(iface.members), 3, - "Should have three members with partial interface") - harness.check(iface.members[0].identifier.name, "x", - "First member should be x with partial interface") - harness.check(iface.members[1].identifier.name, "foo", - "Second member should be foo with partial interface") - harness.check(len(iface.members[1].signatures()), 2, - "Should have two foo signatures with partial interface") - harness.check(iface.members[2].identifier.name, "y", - "Third member should be y with partial interface") - harness.check(len(iface.ctor().signatures()), 2, - "Should have two constructors with partial interface") + harness.check( + len(iface.members), 3, "Should have three members with partial interface" + ) + harness.check( + iface.members[0].identifier.name, + "x", + "First member should be x with partial interface", + ) + harness.check( + iface.members[1].identifier.name, + "foo", + "Second member should be foo with partial interface", + ) + harness.check( + len(iface.members[1].signatures()), + 2, + "Should have two foo signatures with partial interface", + ) + harness.check( + iface.members[2].identifier.name, + "y", + "Third member should be y with partial interface", + ) + harness.check( + len(iface.ctor().signatures()), + 2, + "Should have two constructors with partial interface", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ partial interface A { readonly attribute boolean y; undefined foo(long arg); @@ -135,236 +171,289 @@ def WebIDLTest(parser, harness): readonly attribute boolean x; undefined foo(); }; - """); - results = parser.finish(); - harness.check(len(results), 2, - "Should have two results with reversed partial interface") + """ + ) + results = parser.finish() + harness.check( + len(results), 2, "Should have two results with reversed partial interface" + ) iface = results[1] - harness.check(len(iface.members), 3, - "Should have three members with reversed partial interface") - harness.check(iface.members[0].identifier.name, "x", - "First member should be x with reversed partial interface") - harness.check(iface.members[1].identifier.name, "foo", - "Second member should be foo with reversed partial interface") - harness.check(len(iface.members[1].signatures()), 2, - "Should have two foo signatures with reversed partial interface") - harness.check(iface.members[2].identifier.name, "y", - "Third member should be y with reversed partial interface") - harness.check(len(iface.ctor().signatures()), 2, - "Should have two constructors with reversed partial interface") + harness.check( + len(iface.members), + 3, + "Should have three members with reversed partial interface", + ) + harness.check( + iface.members[0].identifier.name, + "x", + "First member should be x with reversed partial interface", + ) + harness.check( + iface.members[1].identifier.name, + "foo", + "Second member should be foo with reversed partial interface", + ) + harness.check( + len(iface.members[1].signatures()), + 2, + "Should have two foo signatures with reversed partial interface", + ) + harness.check( + iface.members[2].identifier.name, + "y", + "Third member should be y with reversed partial interface", + ) + harness.check( + len(iface.ctor().signatures()), + 2, + "Should have two constructors with reversed partial interface", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { readonly attribute boolean x; }; interface A { readonly attribute boolean y; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow two non-partial interfaces with the same name") + harness.ok(threw, "Should not allow two non-partial interfaces with the same name") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ partial interface A { readonly attribute boolean x; }; partial interface A { readonly attribute boolean y; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Must have a non-partial interface for a given name") + harness.ok(threw, "Must have a non-partial interface for a given name") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { boolean x; }; partial interface A { readonly attribute boolean y; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow a name collision between partial interface " - "and other object") + harness.ok( + threw, + "Should not allow a name collision between partial interface " + "and other object", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { boolean x; }; interface A { readonly attribute boolean y; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow a name collision between interface " - "and other object") + harness.ok( + threw, "Should not allow a name collision between interface " "and other object" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { boolean x; }; interface A; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow a name collision between external interface " - "and other object") + harness.ok( + threw, + "Should not allow a name collision between external interface " + "and other object", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { readonly attribute boolean x; }; interface A; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow a name collision between external interface " - "and interface") + harness.ok( + threw, + "Should not allow a name collision between external interface " "and interface", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface A; interface A; - """) + """ + ) results = parser.finish() - harness.ok(len(results) == 1 and - isinstance(results[0], WebIDL.IDLExternalInterface), - "Should allow name collisions between external interface " - "declarations") + harness.ok( + len(results) == 1 and isinstance(results[0], WebIDL.IDLExternalInterface), + "Should allow name collisions between external interface " "declarations", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [SomeRandomAnnotation] interface A { readonly attribute boolean y; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow unknown extended attributes on interfaces") + harness.ok(threw, "Should not allow unknown extended attributes on interfaces") parser = parser.reset() - parser.parse(""" + parser.parse( + """ [Global, Exposed=Window] interface Window {}; [Exposed=Window, LegacyWindowAlias=A] interface B {}; [Exposed=Window, LegacyWindowAlias=(C, D)] interface E {}; - """); - results = parser.finish(); - harness.check(results[1].legacyWindowAliases, ["A"], - "Should support a single identifier") - harness.check(results[2].legacyWindowAliases, ["C", "D"], - "Should support an identifier list") + """ + ) + results = parser.finish() + harness.check( + results[1].legacyWindowAliases, ["A"], "Should support a single identifier" + ) + harness.check( + results[2].legacyWindowAliases, ["C", "D"], "Should support an identifier list" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [LegacyWindowAlias] interface A {}; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow [LegacyWindowAlias] with no value") + harness.ok(threw, "Should not allow [LegacyWindowAlias] with no value") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Exposed=Worker, LegacyWindowAlias=B] interface A {}; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow [LegacyWindowAlias] without Window exposure") + harness.ok(threw, "Should not allow [LegacyWindowAlias] without Window exposure") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Global, Exposed=Window] interface Window {}; [Exposed=Window] interface A {}; [Exposed=Window, LegacyWindowAlias=A] interface B {}; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow [LegacyWindowAlias] to conflict with other identifiers") + harness.ok( + threw, "Should not allow [LegacyWindowAlias] to conflict with other identifiers" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Global, Exposed=Window] interface Window {}; [Exposed=Window, LegacyWindowAlias=A] interface B {}; [Exposed=Window] interface A {}; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow [LegacyWindowAlias] to conflict with other identifiers") + harness.ok( + threw, "Should not allow [LegacyWindowAlias] to conflict with other identifiers" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Global, Exposed=Window] interface Window {}; [Exposed=Window, LegacyWindowAlias=A] interface B {}; [Exposed=Window, LegacyWindowAlias=A] interface C {}; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow [LegacyWindowAlias] to conflict with other identifiers") + harness.ok( + threw, "Should not allow [LegacyWindowAlias] to conflict with other identifiers" + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_interface_const_identifier_conflicts.py b/components/script/dom/bindings/codegen/parser/tests/test_interface_const_identifier_conflicts.py index db944e7aaf7..5750f87a6fc 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_interface_const_identifier_conflicts.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_interface_const_identifier_conflicts.py @@ -1,12 +1,14 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface IdentifierConflict { const byte thing1 = 1; const unsigned long thing1 = 1; }; - """) + """ + ) results = parser.finish() except: diff --git a/components/script/dom/bindings/codegen/parser/tests/test_interface_identifier_conflicts_across_members.py b/components/script/dom/bindings/codegen/parser/tests/test_interface_identifier_conflicts_across_members.py index 1a73fb917ed..c1a544ce718 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_interface_identifier_conflicts_across_members.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_interface_identifier_conflicts_across_members.py @@ -1,12 +1,14 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface IdentifierConflictAcrossMembers1 { const byte thing1 = 1; readonly attribute long thing1; }; - """) + """ + ) results = parser.finish() except: @@ -16,12 +18,14 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface IdentifierConflictAcrossMembers2 { readonly attribute long thing1; const byte thing1 = 1; }; - """) + """ + ) results = parser.finish() except: @@ -31,12 +35,14 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface IdentifierConflictAcrossMembers3 { getter boolean thing1(DOMString name); readonly attribute long thing1; }; - """) + """ + ) results = parser.finish() except: @@ -46,12 +52,14 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface IdentifierConflictAcrossMembers1 { const byte thing1 = 1; long thing1(); }; - """) + """ + ) results = parser.finish() except: diff --git a/components/script/dom/bindings/codegen/parser/tests/test_interface_maplikesetlikeiterable.py b/components/script/dom/bindings/codegen/parser/tests/test_interface_maplikesetlikeiterable.py index 835212d2965..18c6023dd3b 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_interface_maplikesetlikeiterable.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_interface_maplikesetlikeiterable.py @@ -1,33 +1,42 @@ import WebIDL import traceback -def WebIDLTest(parser, harness): + +def WebIDLTest(parser, harness): def shouldPass(prefix, iface, expectedMembers, numProductions=1): p = parser.reset() p.parse(iface) results = p.finish() - harness.check(len(results), numProductions, - "%s - Should have production count %d" % (prefix, numProductions)) - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "%s - Should be an IDLInterface" % (prefix)) + harness.check( + len(results), + numProductions, + "%s - Should have production count %d" % (prefix, numProductions), + ) + harness.ok( + isinstance(results[0], WebIDL.IDLInterface), + "%s - Should be an IDLInterface" % (prefix), + ) # Make a copy, since we plan to modify it expectedMembers = list(expectedMembers) for m in results[0].members: name = m.identifier.name if (name, type(m)) in expectedMembers: - harness.ok(True, "%s - %s - Should be a %s" % (prefix, name, - type(m))) + harness.ok(True, "%s - %s - Should be a %s" % (prefix, name, type(m))) expectedMembers.remove((name, type(m))) else: - harness.ok(False, "%s - %s - Unknown symbol of type %s" % - (prefix, name, type(m))) + harness.ok( + False, + "%s - %s - Unknown symbol of type %s" % (prefix, name, type(m)), + ) # A bit of a hoop because we can't generate the error string if we pass if len(expectedMembers) == 0: harness.ok(True, "Found all the members") else: - harness.ok(False, - "Expected member not found: %s of type %s" % - (expectedMembers[0][0], expectedMembers[0][1])) + harness.ok( + False, + "Expected member not found: %s of type %s" + % (expectedMembers[0][0], expectedMembers[0][1]), + ) return results def shouldFail(prefix, iface): @@ -35,68 +44,81 @@ def WebIDLTest(parser, harness): p = parser.reset() p.parse(iface) p.finish() - harness.ok(False, - prefix + " - Interface passed when should've failed") + harness.ok(False, prefix + " - Interface passed when should've failed") except WebIDL.WebIDLError as e: - harness.ok(True, - prefix + " - Interface failed as expected") + harness.ok(True, prefix + " - Interface failed as expected") except Exception as e: - harness.ok(False, - prefix + " - Interface failed but not as a WebIDLError exception: %s" % e) + harness.ok( + False, + prefix + + " - Interface failed but not as a WebIDLError exception: %s" % e, + ) - iterableMembers = [(x, WebIDL.IDLMethod) for x in ["entries", "keys", - "values", "forEach"]] - setROMembers = ([(x, WebIDL.IDLMethod) for x in ["has"]] + - [("__setlike", WebIDL.IDLMaplikeOrSetlike)] + - iterableMembers) + iterableMembers = [ + (x, WebIDL.IDLMethod) for x in ["entries", "keys", "values", "forEach"] + ] + setROMembers = ( + [(x, WebIDL.IDLMethod) for x in ["has"]] + + [("__setlike", WebIDL.IDLMaplikeOrSetlike)] + + iterableMembers + ) setROMembers.extend([("size", WebIDL.IDLAttribute)]) - setRWMembers = ([(x, WebIDL.IDLMethod) for x in ["add", - "clear", - "delete"]] + - setROMembers) - setROChromeMembers = ([(x, WebIDL.IDLMethod) for x in ["__add", - "__clear", - "__delete"]] + - setROMembers) - setRWChromeMembers = ([(x, WebIDL.IDLMethod) for x in ["__add", - "__clear", - "__delete"]] + - setRWMembers) - mapROMembers = ([(x, WebIDL.IDLMethod) for x in ["get", "has"]] + - [("__maplike", WebIDL.IDLMaplikeOrSetlike)] + - iterableMembers) + setRWMembers = [ + (x, WebIDL.IDLMethod) for x in ["add", "clear", "delete"] + ] + setROMembers + setROChromeMembers = [ + (x, WebIDL.IDLMethod) for x in ["__add", "__clear", "__delete"] + ] + setROMembers + setRWChromeMembers = [ + (x, WebIDL.IDLMethod) for x in ["__add", "__clear", "__delete"] + ] + setRWMembers + mapROMembers = ( + [(x, WebIDL.IDLMethod) for x in ["get", "has"]] + + [("__maplike", WebIDL.IDLMaplikeOrSetlike)] + + iterableMembers + ) mapROMembers.extend([("size", WebIDL.IDLAttribute)]) - mapRWMembers = ([(x, WebIDL.IDLMethod) for x in ["set", - "clear", - "delete"]] + mapROMembers) - mapRWChromeMembers = ([(x, WebIDL.IDLMethod) for x in ["__set", - "__clear", - "__delete"]] + - mapRWMembers) + mapRWMembers = [ + (x, WebIDL.IDLMethod) for x in ["set", "clear", "delete"] + ] + mapROMembers + mapRWChromeMembers = [ + (x, WebIDL.IDLMethod) for x in ["__set", "__clear", "__delete"] + ] + mapRWMembers # OK, now that we've used iterableMembers to set up the above, append # __iterable to it for the iterable<> case. iterableMembers.append(("__iterable", WebIDL.IDLIterable)) + asyncIterableMembers = [ + (x, WebIDL.IDLMethod) for x in ["entries", "keys", "values"] + ] + asyncIterableMembers.append(("__iterable", WebIDL.IDLAsyncIterable)) + valueIterableMembers = [("__iterable", WebIDL.IDLIterable)] valueIterableMembers.append(("__indexedgetter", WebIDL.IDLMethod)) valueIterableMembers.append(("length", WebIDL.IDLAttribute)) + valueAsyncIterableMembers = [("__iterable", WebIDL.IDLAsyncIterable)] + valueAsyncIterableMembers.append(("values", WebIDL.IDLMethod)) + disallowedIterableNames = ["keys", "entries", "values"] disallowedMemberNames = ["forEach", "has", "size"] + disallowedIterableNames mapDisallowedMemberNames = ["get"] + disallowedMemberNames disallowedNonMethodNames = ["clear", "delete"] mapDisallowedNonMethodNames = ["set"] + disallowedNonMethodNames setDisallowedNonMethodNames = ["add"] + disallowedNonMethodNames - unrelatedMembers = [("unrelatedAttribute", WebIDL.IDLAttribute), - ("unrelatedMethod", WebIDL.IDLMethod)] + unrelatedMembers = [ + ("unrelatedAttribute", WebIDL.IDLAttribute), + ("unrelatedMethod", WebIDL.IDLMethod), + ] # # Simple Usage Tests # - shouldPass("Iterable (key only)", - """ + shouldPass( + "Iterable (key only)", + """ interface Foo1 { iterable; readonly attribute unsigned long length; @@ -104,10 +126,13 @@ def WebIDLTest(parser, harness): attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, valueIterableMembers + unrelatedMembers) + """, + valueIterableMembers + unrelatedMembers, + ) - shouldPass("Iterable (key only) inheriting from parent", - """ + shouldPass( + "Iterable (key only) inheriting from parent", + """ interface Foo1 : Foo2 { iterable; readonly attribute unsigned long length; @@ -117,21 +142,28 @@ def WebIDLTest(parser, harness): attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, valueIterableMembers, numProductions=2) + """, + valueIterableMembers, + numProductions=2, + ) - shouldPass("Iterable (key and value)", - """ + shouldPass( + "Iterable (key and value)", + """ interface Foo1 { iterable; attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, iterableMembers + unrelatedMembers, - # numProductions == 2 because of the generated iterator iface, - numProductions=2) + """, + iterableMembers + unrelatedMembers, + # numProductions == 2 because of the generated iterator iface, + numProductions=2, + ) - shouldPass("Iterable (key and value) inheriting from parent", - """ + shouldPass( + "Iterable (key and value) inheriting from parent", + """ interface Foo1 : Foo2 { iterable; }; @@ -139,21 +171,115 @@ def WebIDLTest(parser, harness): attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, iterableMembers, - # numProductions == 3 because of the generated iterator iface, - numProductions=3) + """, + iterableMembers, + # numProductions == 3 because of the generated iterator iface, + numProductions=3, + ) - shouldPass("Maplike (readwrite)", - """ + shouldPass( + "Async iterable (key only)", + """ + interface Foo1 { + async iterable; + attribute long unrelatedAttribute; + long unrelatedMethod(); + }; + """, + valueAsyncIterableMembers + unrelatedMembers, + # numProductions == 2 because of the generated iterator iface, + numProductions=2, + ) + + shouldPass( + "Async iterable (key only) inheriting from parent", + """ + interface Foo1 : Foo2 { + async iterable; + }; + interface Foo2 { + attribute long unrelatedAttribute; + long unrelatedMethod(); + }; + """, + valueAsyncIterableMembers, + # numProductions == 3 because of the generated iterator iface, + numProductions=3, + ) + + shouldPass( + "Async iterable with argument (key only)", + """ + interface Foo1 { + async iterable(optional long foo); + attribute long unrelatedAttribute; + long unrelatedMethod(); + }; + """, + valueAsyncIterableMembers + unrelatedMembers, + # numProductions == 2 because of the generated iterator iface, + numProductions=2, + ) + + shouldPass( + "Async iterable (key and value)", + """ + interface Foo1 { + async iterable; + attribute long unrelatedAttribute; + long unrelatedMethod(); + }; + """, + asyncIterableMembers + unrelatedMembers, + # numProductions == 2 because of the generated iterator iface, + numProductions=2, + ) + + shouldPass( + "Async iterable (key and value) inheriting from parent", + """ + interface Foo1 : Foo2 { + async iterable; + }; + interface Foo2 { + attribute long unrelatedAttribute; + long unrelatedMethod(); + }; + """, + asyncIterableMembers, + # numProductions == 3 because of the generated iterator iface, + numProductions=3, + ) + + shouldPass( + "Async iterable with argument (key and value)", + """ + interface Foo1 { + async iterable(optional long foo); + attribute long unrelatedAttribute; + long unrelatedMethod(); + }; + """, + asyncIterableMembers + unrelatedMembers, + # numProductions == 2 because of the generated iterator iface, + numProductions=2, + ) + + shouldPass( + "Maplike (readwrite)", + """ interface Foo1 { maplike; attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, mapRWMembers + unrelatedMembers) + """, + mapRWMembers + unrelatedMembers, + ) - shouldPass("Maplike (readwrite) inheriting from parent", - """ + shouldPass( + "Maplike (readwrite) inheriting from parent", + """ interface Foo1 : Foo2 { maplike; }; @@ -161,19 +287,26 @@ def WebIDLTest(parser, harness): attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, mapRWMembers, numProductions=2) + """, + mapRWMembers, + numProductions=2, + ) - shouldPass("Maplike (readwrite)", - """ + shouldPass( + "Maplike (readwrite)", + """ interface Foo1 { maplike; attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, mapRWMembers + unrelatedMembers) + """, + mapRWMembers + unrelatedMembers, + ) - shouldPass("Maplike (readwrite) inheriting from parent", - """ + shouldPass( + "Maplike (readwrite) inheriting from parent", + """ interface Foo1 : Foo2 { maplike; }; @@ -181,19 +314,26 @@ def WebIDLTest(parser, harness): attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, mapRWMembers, numProductions=2) + """, + mapRWMembers, + numProductions=2, + ) - shouldPass("Maplike (readonly)", - """ + shouldPass( + "Maplike (readonly)", + """ interface Foo1 { readonly maplike; attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, mapROMembers + unrelatedMembers) + """, + mapROMembers + unrelatedMembers, + ) - shouldPass("Maplike (readonly) inheriting from parent", - """ + shouldPass( + "Maplike (readonly) inheriting from parent", + """ interface Foo1 : Foo2 { readonly maplike; }; @@ -201,19 +341,26 @@ def WebIDLTest(parser, harness): attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, mapROMembers, numProductions=2) + """, + mapROMembers, + numProductions=2, + ) - shouldPass("Setlike (readwrite)", - """ + shouldPass( + "Setlike (readwrite)", + """ interface Foo1 { setlike; attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, setRWMembers + unrelatedMembers) + """, + setRWMembers + unrelatedMembers, + ) - shouldPass("Setlike (readwrite) inheriting from parent", - """ + shouldPass( + "Setlike (readwrite) inheriting from parent", + """ interface Foo1 : Foo2 { setlike; }; @@ -221,19 +368,26 @@ def WebIDLTest(parser, harness): attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, setRWMembers, numProductions=2) + """, + setRWMembers, + numProductions=2, + ) - shouldPass("Setlike (readonly)", - """ + shouldPass( + "Setlike (readonly)", + """ interface Foo1 { readonly setlike; attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, setROMembers + unrelatedMembers) + """, + setROMembers + unrelatedMembers, + ) - shouldPass("Setlike (readonly) inheriting from parent", - """ + shouldPass( + "Setlike (readonly) inheriting from parent", + """ interface Foo1 : Foo2 { readonly setlike; }; @@ -241,95 +395,166 @@ def WebIDLTest(parser, harness): attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, setROMembers, numProductions=2) + """, + setROMembers, + numProductions=2, + ) - shouldPass("Inheritance of maplike/setlike", - """ + shouldPass( + "Inheritance of maplike/setlike", + """ interface Foo1 { maplike; }; interface Foo2 : Foo1 { }; - """, mapRWMembers, numProductions=2) + """, + mapRWMembers, + numProductions=2, + ) - shouldPass("JS Implemented maplike interface", - """ + shouldFail( + "JS Implemented maplike interface", + """ [JSImplementation="@mozilla.org/dom/test-interface-js-maplike;1"] interface Foo1 { constructor(); setlike; }; - """, setRWChromeMembers) + """, + ) - shouldPass("JS Implemented maplike interface", - """ + shouldFail( + "JS Implemented maplike interface", + """ [JSImplementation="@mozilla.org/dom/test-interface-js-maplike;1"] interface Foo1 { constructor(); maplike; }; - """, mapRWChromeMembers) + """, + ) # # Multiple maplike/setlike tests # - shouldFail("Two maplike/setlikes on same interface", - """ + shouldFail( + "Two maplike/setlikes on same interface", + """ interface Foo1 { setlike; maplike; }; - """) + """, + ) - shouldFail("Two iterable/setlikes on same interface", - """ + shouldFail( + "Two iterable/setlikes on same interface", + """ interface Foo1 { iterable; maplike; }; - """) + """, + ) - shouldFail("Two iterables on same interface", - """ + shouldFail( + "Two iterables on same interface", + """ interface Foo1 { iterable; iterable; }; - """) + """, + ) - shouldFail("Two maplike/setlikes in partials", - """ + shouldFail( + "Two iterables on same interface", + """ + interface Foo1 { + iterable; + async iterable; + }; + """, + ) + + shouldFail( + "Two iterables on same interface", + """ + interface Foo1 { + async iterable; + async iterable; + }; + """, + ) + + shouldFail( + "Async iterable with non-optional arguments", + """ + interface Foo1 { + async iterable(long foo); + }; + """, + ) + + shouldFail( + "Async iterable with non-optional arguments", + """ + interface Foo1 { + async iterable(optional long foo, long bar); + }; + """, + ) + + shouldFail( + "Async iterable with non-optional arguments", + """ + interface Foo1 { + async iterable(long foo); + }; + """, + ) + + shouldFail( + "Two maplike/setlikes in partials", + """ interface Foo1 { maplike; }; partial interface Foo1 { setlike; }; - """) + """, + ) - shouldFail("Conflicting maplike/setlikes across inheritance", - """ + shouldFail( + "Conflicting maplike/setlikes across inheritance", + """ interface Foo1 { maplike; }; interface Foo2 : Foo1 { setlike; }; - """) + """, + ) - shouldFail("Conflicting maplike/iterable across inheritance", - """ + shouldFail( + "Conflicting maplike/iterable across inheritance", + """ interface Foo1 { maplike; }; interface Foo2 : Foo1 { iterable; }; - """) + """, + ) - shouldFail("Conflicting maplike/setlikes across multistep inheritance", - """ + shouldFail( + "Conflicting maplike/setlikes across multistep inheritance", + """ interface Foo1 { maplike; }; @@ -338,7 +563,8 @@ def WebIDLTest(parser, harness): interface Foo3 : Foo2 { setlike; }; - """) + """, + ) # # Member name collision tests @@ -353,61 +579,83 @@ def WebIDLTest(parser, harness): """ if methodPasses: - shouldPass("Conflicting method: %s and %s" % (likeMember, conflictName), - """ + shouldPass( + "Conflicting method: %s and %s" % (likeMember, conflictName), + """ interface Foo1 { %s; [Throws] undefined %s(long test1, double test2, double test3); }; - """ % (likeMember, conflictName), expectedMembers) + """ + % (likeMember, conflictName), + expectedMembers, + ) else: - shouldFail("Conflicting method: %s and %s" % (likeMember, conflictName), - """ + shouldFail( + "Conflicting method: %s and %s" % (likeMember, conflictName), + """ interface Foo1 { %s; [Throws] undefined %s(long test1, double test2, double test3); }; - """ % (likeMember, conflictName)) + """ + % (likeMember, conflictName), + ) # Inherited conflicting methods should ALWAYS fail - shouldFail("Conflicting inherited method: %s and %s" % (likeMember, conflictName), - """ + shouldFail( + "Conflicting inherited method: %s and %s" % (likeMember, conflictName), + """ interface Foo1 { undefined %s(long test1, double test2, double test3); }; interface Foo2 : Foo1 { %s; }; - """ % (conflictName, likeMember)) - shouldFail("Conflicting static method: %s and %s" % (likeMember, conflictName), """ + % (conflictName, likeMember), + ) + shouldFail( + "Conflicting static method: %s and %s" % (likeMember, conflictName), + """ interface Foo1 { %s; static undefined %s(long test1, double test2, double test3); }; - """ % (likeMember, conflictName)) - shouldFail("Conflicting attribute: %s and %s" % (likeMember, conflictName), """ + % (likeMember, conflictName), + ) + shouldFail( + "Conflicting attribute: %s and %s" % (likeMember, conflictName), + """ interface Foo1 { %s attribute double %s; }; - """ % (likeMember, conflictName)) - shouldFail("Conflicting const: %s and %s" % (likeMember, conflictName), """ + % (likeMember, conflictName), + ) + shouldFail( + "Conflicting const: %s and %s" % (likeMember, conflictName), + """ interface Foo1 { %s; const double %s = 0; }; - """ % (likeMember, conflictName)) - shouldFail("Conflicting static attribute: %s and %s" % (likeMember, conflictName), """ + % (likeMember, conflictName), + ) + shouldFail( + "Conflicting static attribute: %s and %s" % (likeMember, conflictName), + """ interface Foo1 { %s; static attribute long %s; }; - """ % (likeMember, conflictName)) + """ + % (likeMember, conflictName), + ) for member in disallowedIterableNames: testConflictingMembers("iterable", member, iterableMembers, False) @@ -420,18 +668,23 @@ def WebIDLTest(parser, harness): for member in setDisallowedNonMethodNames: testConflictingMembers("setlike", member, setRWMembers, True) - shouldPass("Inheritance of maplike/setlike with child member collision", - """ + shouldPass( + "Inheritance of maplike/setlike with child member collision", + """ interface Foo1 { maplike; }; interface Foo2 : Foo1 { undefined entries(); }; - """, mapRWMembers, numProductions=2) + """, + mapRWMembers, + numProductions=2, + ) - shouldPass("Inheritance of multi-level maplike/setlike with child member collision", - """ + shouldPass( + "Inheritance of multi-level maplike/setlike with child member collision", + """ interface Foo1 { maplike; }; @@ -440,10 +693,14 @@ def WebIDLTest(parser, harness): interface Foo3 : Foo2 { undefined entries(); }; - """, mapRWMembers, numProductions=3) + """, + mapRWMembers, + numProductions=3, + ) - shouldFail("Maplike interface with mixin member collision", - """ + shouldFail( + "Maplike interface with mixin member collision", + """ interface Foo1 { maplike; }; @@ -451,10 +708,12 @@ def WebIDLTest(parser, harness): undefined entries(); }; Foo1 includes Foo2; - """) + """, + ) - shouldPass("Inherited Maplike interface with consequential interface member collision", - """ + shouldPass( + "Inherited Maplike interface with consequential interface member collision", + """ interface Foo1 { maplike; }; @@ -464,20 +723,26 @@ def WebIDLTest(parser, harness): interface Foo3 : Foo1 { }; Foo3 includes Foo2; - """, mapRWMembers, numProductions=4) + """, + mapRWMembers, + numProductions=4, + ) - shouldFail("Inheritance of name collision with child maplike/setlike", - """ + shouldFail( + "Inheritance of name collision with child maplike/setlike", + """ interface Foo1 { undefined entries(); }; interface Foo2 : Foo1 { maplike; }; - """) + """, + ) - shouldFail("Inheritance of multi-level name collision with child maplike/setlike", - """ + shouldFail( + "Inheritance of multi-level name collision with child maplike/setlike", + """ interface Foo1 { undefined entries(); }; @@ -486,20 +751,26 @@ def WebIDLTest(parser, harness): interface Foo3 : Foo2 { maplike; }; - """) + """, + ) - shouldPass("Inheritance of attribute collision with parent maplike/setlike", - """ + shouldPass( + "Inheritance of attribute collision with parent maplike/setlike", + """ interface Foo1 { maplike; }; interface Foo2 : Foo1 { attribute double size; }; - """, mapRWMembers, numProductions=2) + """, + mapRWMembers, + numProductions=2, + ) - shouldPass("Inheritance of multi-level attribute collision with parent maplike/setlike", - """ + shouldPass( + "Inheritance of multi-level attribute collision with parent maplike/setlike", + """ interface Foo1 { maplike; }; @@ -508,20 +779,26 @@ def WebIDLTest(parser, harness): interface Foo3 : Foo2 { attribute double size; }; - """, mapRWMembers, numProductions=3) + """, + mapRWMembers, + numProductions=3, + ) - shouldFail("Inheritance of attribute collision with child maplike/setlike", - """ + shouldFail( + "Inheritance of attribute collision with child maplike/setlike", + """ interface Foo1 { attribute double size; }; interface Foo2 : Foo1 { maplike; }; - """) + """, + ) - shouldFail("Inheritance of multi-level attribute collision with child maplike/setlike", - """ + shouldFail( + "Inheritance of multi-level attribute collision with child maplike/setlike", + """ interface Foo1 { attribute double size; }; @@ -530,53 +807,65 @@ def WebIDLTest(parser, harness): interface Foo3 : Foo2 { maplike; }; - """) + """, + ) - shouldFail("Inheritance of attribute/rw function collision with child maplike/setlike", - """ + shouldFail( + "Inheritance of attribute/rw function collision with child maplike/setlike", + """ interface Foo1 { attribute double set; }; interface Foo2 : Foo1 { maplike; }; - """) + """, + ) - shouldFail("Inheritance of const/rw function collision with child maplike/setlike", - """ + shouldFail( + "Inheritance of const/rw function collision with child maplike/setlike", + """ interface Foo1 { const double set = 0; }; interface Foo2 : Foo1 { maplike; }; - """) + """, + ) - shouldPass("Inheritance of rw function with same name in child maplike/setlike", - """ + shouldPass( + "Inheritance of rw function with same name in child maplike/setlike", + """ interface Foo1 { maplike; }; interface Foo2 : Foo1 { undefined clear(); }; - """, mapRWMembers, numProductions=2) + """, + mapRWMembers, + numProductions=2, + ) - shouldFail("Inheritance of unforgeable attribute collision with child maplike/setlike", - """ + shouldFail( + "Inheritance of unforgeable attribute collision with child maplike/setlike", + """ interface Foo1 { - [Unforgeable] + [LegacyUnforgeable] attribute double size; }; interface Foo2 : Foo1 { maplike; }; - """) + """, + ) - shouldFail("Inheritance of multi-level unforgeable attribute collision with child maplike/setlike", - """ + shouldFail( + "Inheritance of multi-level unforgeable attribute collision with child maplike/setlike", + """ interface Foo1 { - [Unforgeable] + [LegacyUnforgeable] attribute double size; }; interface Foo2 : Foo1 { @@ -584,49 +873,40 @@ def WebIDLTest(parser, harness): interface Foo3 : Foo2 { maplike; }; - """) + """, + ) - shouldPass("Interface with readonly allowable overrides", - """ + shouldPass( + "Interface with readonly allowable overrides", + """ interface Foo1 { readonly setlike; readonly attribute boolean clear; }; - """, setROMembers + [("clear", WebIDL.IDLAttribute)]) + """, + setROMembers + [("clear", WebIDL.IDLAttribute)], + ) - shouldPass("JS Implemented read-only interface with readonly allowable overrides", - """ - [JSImplementation="@mozilla.org/dom/test-interface-js-maplike;1"] - interface Foo1 { - constructor(); - readonly setlike; - readonly attribute boolean clear; - }; - """, setROChromeMembers + [("clear", WebIDL.IDLAttribute)]) - - shouldFail("JS Implemented read-write interface with non-readwrite allowable overrides", - """ - [JSImplementation="@mozilla.org/dom/test-interface-js-maplike;1"] - interface Foo1 { - constructor(); - setlike; - readonly attribute boolean clear; - }; - """) - - r = shouldPass("Check proper override of clear/delete/set", - """ + r = shouldPass( + "Check proper override of clear/delete/set", + """ interface Foo1 { maplike; long clear(long a, long b, double c, double d); long set(long a, long b, double c, double d); long delete(long a, long b, double c, double d); }; - """, mapRWMembers) + """, + mapRWMembers, + ) for m in r[0].members: if m.identifier.name in ["clear", "set", "delete"]: harness.ok(m.isMethod(), "%s should be a method" % m.identifier.name) - harness.check(m.maxArgCount, 4, "%s should have 4 arguments" % m.identifier.name) - harness.ok(not m.isMaplikeOrSetlikeOrIterableMethod(), - "%s should not be a maplike/setlike function" % m.identifier.name) + harness.check( + m.maxArgCount, 4, "%s should have 4 arguments" % m.identifier.name + ) + harness.ok( + not m.isMaplikeOrSetlikeOrIterableMethod(), + "%s should not be a maplike/setlike function" % m.identifier.name, + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_interfacemixin.py b/components/script/dom/bindings/codegen/parser/tests/test_interfacemixin.py index 79cf1f6780d..b3c8573fa59 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_interfacemixin.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_interfacemixin.py @@ -1,33 +1,46 @@ import WebIDL + def WebIDLTest(parser, harness): parser.parse("interface mixin Foo { };") results = parser.finish() harness.ok(True, "Empty interface mixin parsed without error.") harness.check(len(results), 1, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLInterfaceMixin), - "Should be an IDLInterfaceMixin") + harness.ok( + isinstance(results[0], WebIDL.IDLInterfaceMixin), + "Should be an IDLInterfaceMixin", + ) mixin = results[0] - harness.check(mixin.identifier.QName(), "::Foo", "Interface mixin has the right QName") + harness.check( + mixin.identifier.QName(), "::Foo", "Interface mixin has the right QName" + ) harness.check(mixin.identifier.name, "Foo", "Interface mixin has the right name") parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface mixin QNameBase { const long foo = 3; }; - """) + """ + ) results = parser.finish() harness.check(len(results), 1, "Should be one productions") - harness.ok(isinstance(results[0], WebIDL.IDLInterfaceMixin), - "Should be an IDLInterfaceMixin") + harness.ok( + isinstance(results[0], WebIDL.IDLInterfaceMixin), + "Should be an IDLInterfaceMixin", + ) harness.check(len(results[0].members), 1, "Expect 1 productions") mixin = results[0] - harness.check(mixin.members[0].identifier.QName(), "::QNameBase::foo", - "Member has the right QName") + harness.check( + mixin.members[0].identifier.QName(), + "::QNameBase::foo", + "Member has the right QName", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface mixin A { readonly attribute boolean x; undefined foo(); @@ -36,24 +49,40 @@ def WebIDLTest(parser, harness): readonly attribute boolean y; undefined foo(long arg); }; - """) + """ + ) results = parser.finish() - harness.check(len(results), 2, - "Should have two results with partial interface mixin") + harness.check( + len(results), 2, "Should have two results with partial interface mixin" + ) mixin = results[0] - harness.check(len(mixin.members), 3, - "Should have three members with partial interface mixin") - harness.check(mixin.members[0].identifier.name, "x", - "First member should be x with partial interface mixin") - harness.check(mixin.members[1].identifier.name, "foo", - "Second member should be foo with partial interface mixin") - harness.check(len(mixin.members[1].signatures()), 2, - "Should have two foo signatures with partial interface mixin") - harness.check(mixin.members[2].identifier.name, "y", - "Third member should be y with partial interface mixin") + harness.check( + len(mixin.members), 3, "Should have three members with partial interface mixin" + ) + harness.check( + mixin.members[0].identifier.name, + "x", + "First member should be x with partial interface mixin", + ) + harness.check( + mixin.members[1].identifier.name, + "foo", + "Second member should be foo with partial interface mixin", + ) + harness.check( + len(mixin.members[1].signatures()), + 2, + "Should have two foo signatures with partial interface mixin", + ) + harness.check( + mixin.members[2].identifier.name, + "y", + "Third member should be y with partial interface mixin", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ partial interface mixin A { readonly attribute boolean y; undefined foo(long arg); @@ -62,285 +91,334 @@ def WebIDLTest(parser, harness): readonly attribute boolean x; undefined foo(); }; - """) + """ + ) results = parser.finish() - harness.check(len(results), 2, - "Should have two results with reversed partial interface mixin") + harness.check( + len(results), 2, "Should have two results with reversed partial interface mixin" + ) mixin = results[1] - harness.check(len(mixin.members), 3, - "Should have three members with reversed partial interface mixin") - harness.check(mixin.members[0].identifier.name, "x", - "First member should be x with reversed partial interface mixin") - harness.check(mixin.members[1].identifier.name, "foo", - "Second member should be foo with reversed partial interface mixin") - harness.check(len(mixin.members[1].signatures()), 2, - "Should have two foo signatures with reversed partial interface mixin") - harness.check(mixin.members[2].identifier.name, "y", - "Third member should be y with reversed partial interface mixin") + harness.check( + len(mixin.members), + 3, + "Should have three members with reversed partial interface mixin", + ) + harness.check( + mixin.members[0].identifier.name, + "x", + "First member should be x with reversed partial interface mixin", + ) + harness.check( + mixin.members[1].identifier.name, + "foo", + "Second member should be foo with reversed partial interface mixin", + ) + harness.check( + len(mixin.members[1].signatures()), + 2, + "Should have two foo signatures with reversed partial interface mixin", + ) + harness.check( + mixin.members[2].identifier.name, + "y", + "Third member should be y with reversed partial interface mixin", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface Interface {}; interface mixin Mixin { attribute short x; }; Interface includes Mixin; - """) + """ + ) results = parser.finish() iface = results[0] harness.check(len(iface.members), 1, "Should merge members from mixins") - harness.check(iface.members[0].identifier.name, "x", - "Should merge members from mixins") + harness.check( + iface.members[0].identifier.name, "x", "Should merge members from mixins" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface mixin A { readonly attribute boolean x; }; interface mixin A { readonly attribute boolean y; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow two non-partial interface mixins with the same name") + harness.ok( + threw, "Should not allow two non-partial interface mixins with the same name" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ partial interface mixin A { readonly attribute boolean x; }; partial interface mixin A { readonly attribute boolean y; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Must have a non-partial interface mixin for a given name") + harness.ok(threw, "Must have a non-partial interface mixin for a given name") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { boolean x; }; partial interface mixin A { readonly attribute boolean y; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow a name collision between partial interface " - "mixin and other object") + harness.ok( + threw, + "Should not allow a name collision between partial interface " + "mixin and other object", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { boolean x; }; interface mixin A { readonly attribute boolean y; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow a name collision between interface mixin " - "and other object") + harness.ok( + threw, + "Should not allow a name collision between interface mixin " "and other object", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface mixin A { readonly attribute boolean x; }; interface A; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow a name collision between external interface " - "and interface mixin") + harness.ok( + threw, + "Should not allow a name collision between external interface " + "and interface mixin", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [SomeRandomAnnotation] interface mixin A { readonly attribute boolean y; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow unknown extended attributes on interface mixins") + harness.ok( + threw, "Should not allow unknown extended attributes on interface mixins" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface mixin A { getter double (DOMString propertyName); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow getters on interface mixins") + harness.ok(threw, "Should not allow getters on interface mixins") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface mixin A { setter undefined (DOMString propertyName, double propertyValue); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow setters on interface mixins") + harness.ok(threw, "Should not allow setters on interface mixins") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface mixin A { deleter undefined (DOMString propertyName); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow deleters on interface mixins") + harness.ok(threw, "Should not allow deleters on interface mixins") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface mixin A { legacycaller double compute(double x); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow legacycallers on interface mixins") + harness.ok(threw, "Should not allow legacycallers on interface mixins") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface mixin A { inherit attribute x; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow inherited attribute on interface mixins") + harness.ok(threw, "Should not allow inherited attribute on interface mixins") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Interface {}; interface NotMixin { attribute short x; }; Interface includes NotMixin; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should fail if the right side does not point an interface mixin") + harness.ok(threw, "Should fail if the right side does not point an interface mixin") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface mixin NotInterface {}; interface mixin Mixin { attribute short x; }; NotInterface includes Mixin; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should fail if the left side does not point an interface") + harness.ok(threw, "Should fail if the left side does not point an interface") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface mixin Mixin { iterable; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should fail if an interface mixin includes iterable") + harness.ok(threw, "Should fail if an interface mixin includes iterable") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface mixin Mixin { setlike; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should fail if an interface mixin includes setlike") + harness.ok(threw, "Should fail if an interface mixin includes setlike") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface mixin Mixin { maplike; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should fail if an interface mixin includes maplike") + harness.ok(threw, "Should fail if an interface mixin includes maplike") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Interface { attribute short attr; }; @@ -348,17 +426,20 @@ def WebIDLTest(parser, harness): attribute short attr; }; Interface includes Mixin; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should fail if the included mixin interface has duplicated member") + harness.ok( + threw, "Should fail if the included mixin interface has duplicated member" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Interface {}; interface mixin Mixin1 { attribute short attr; @@ -368,15 +449,18 @@ def WebIDLTest(parser, harness): }; Interface includes Mixin1; Interface includes Mixin2; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should fail if the included mixin interfaces have duplicated member") + harness.ok( + threw, "Should fail if the included mixin interfaces have duplicated member" + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ [Global, Exposed=Window] interface Window {}; [Global, Exposed=Worker] interface Worker {}; [Exposed=Window] @@ -385,15 +469,20 @@ def WebIDLTest(parser, harness): Base returnSelf(); }; Base includes Mixin; - """) + """ + ) results = parser.finish() base = results[2] attr = base.members[0] - harness.check(attr.exposureSet, set(["Window"]), - "Should expose on globals where the base interfaces are exposed") + harness.check( + attr.exposureSet, + set(["Window"]), + "Should expose on globals where the base interfaces are exposed", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ [Global, Exposed=Window] interface Window {}; [Global, Exposed=Worker] interface Worker {}; [Exposed=Window] @@ -403,15 +492,18 @@ def WebIDLTest(parser, harness): attribute short a; }; Base includes Mixin; - """) + """ + ) results = parser.finish() base = results[2] attr = base.members[0] - harness.check(attr.exposureSet, set(["Window"]), - "Should follow [Exposed] on interface mixin") + harness.check( + attr.exposureSet, set(["Window"]), "Should follow [Exposed] on interface mixin" + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ [Global, Exposed=Window] interface Window {}; [Global, Exposed=Worker] interface Worker {}; [Exposed=Window] @@ -423,15 +515,20 @@ def WebIDLTest(parser, harness): }; Base1 includes Mixin; Base2 includes Mixin; - """) + """ + ) results = parser.finish() base = results[2] attr = base.members[0] - harness.check(attr.exposureSet, set(["Window", "Worker"]), - "Should expose on all globals where including interfaces are " - "exposed") + harness.check( + attr.exposureSet, + set(["Window", "Worker"]), + "Should expose on all globals where including interfaces are " "exposed", + ) base = results[3] attr = base.members[0] - harness.check(attr.exposureSet, set(["Window", "Worker"]), - "Should expose on all globals where including interfaces are " - "exposed") + harness.check( + attr.exposureSet, + set(["Window", "Worker"]), + "Should expose on all globals where including interfaces are " "exposed", + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_lenientSetter.py b/components/script/dom/bindings/codegen/parser/tests/test_lenientSetter.py index 78a9ffe9eaa..9d2230c3bec 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_lenientSetter.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_lenientSetter.py @@ -2,8 +2,9 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. + def should_throw(parser, harness, message, code): - parser = parser.reset(); + parser = parser.reset() threw = False try: parser.parse(code) @@ -15,44 +16,69 @@ def should_throw(parser, harness, message, code): def WebIDLTest(parser, harness): - # The [LenientSetter] extended attribute MUST take no arguments. - should_throw(parser, harness, "no arguments", """ + # The [LegacyLenientSetter] extended attribute MUST take no arguments. + should_throw( + parser, + harness, + "no arguments", + """ interface I { - [LenientSetter=X] readonly attribute long A; + [LegacyLenientSetter=X] readonly attribute long A; }; - """) + """, + ) - # An attribute with the [LenientSetter] extended attribute MUST NOT + # An attribute with the [LegacyLenientSetter] extended attribute MUST NOT # also be declared with the [PutForwards] extended attribute. - should_throw(parser, harness, "PutForwards", """ + should_throw( + parser, + harness, + "PutForwards", + """ interface I { - [PutForwards=B, LenientSetter] readonly attribute J A; + [PutForwards=B, LegacyLenientSetter] readonly attribute J A; }; interface J { attribute long B; }; - """) + """, + ) - # An attribute with the [LenientSetter] extended attribute MUST NOT + # An attribute with the [LegacyLenientSetter] extended attribute MUST NOT # also be declared with the [Replaceable] extended attribute. - should_throw(parser, harness, "Replaceable", """ + should_throw( + parser, + harness, + "Replaceable", + """ interface I { - [Replaceable, LenientSetter] readonly attribute J A; + [Replaceable, LegacyLenientSetter] readonly attribute J A; }; - """) + """, + ) - # The [LenientSetter] extended attribute MUST NOT be used on an + # The [LegacyLenientSetter] extended attribute MUST NOT be used on an # attribute that is not read only. - should_throw(parser, harness, "writable attribute", """ + should_throw( + parser, + harness, + "writable attribute", + """ interface I { - [LenientSetter] attribute long A; + [LegacyLenientSetter] attribute long A; }; - """) + """, + ) - # The [LenientSetter] extended attribute MUST NOT be used on a + # The [LegacyLenientSetter] extended attribute MUST NOT be used on a # static attribute. - should_throw(parser, harness, "static attribute", """ + should_throw( + parser, + harness, + "static attribute", + """ interface I { - [LenientSetter] static readonly attribute long A; + [LegacyLenientSetter] static readonly attribute long A; }; - """) + """, + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_method.py b/components/script/dom/bindings/codegen/parser/tests/test_method.py index ff1f087c861..0ddfada28ac 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_method.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_method.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestMethods { undefined basic(); static undefined basicStatic(); @@ -16,35 +18,50 @@ def WebIDLTest(parser, harness): undefined setAny(any arg1); float doFloats(float arg1); }; - """) + """ + ) results = parser.finish() harness.ok(True, "TestMethods interface parsed without error.") harness.check(len(results), 1, "Should be one production.") iface = results[0] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should be an IDLInterface") - harness.check(iface.identifier.QName(), "::TestMethods", "Interface has the right QName") + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface") + harness.check( + iface.identifier.QName(), "::TestMethods", "Interface has the right QName" + ) harness.check(iface.identifier.name, "TestMethods", "Interface has the right name") harness.check(len(iface.members), 12, "Expect 12 members") methods = iface.members def checkArgument(argument, QName, name, type, optional, variadic): - harness.ok(isinstance(argument, WebIDL.IDLArgument), - "Should be an IDLArgument") - harness.check(argument.identifier.QName(), QName, "Argument has the right QName") + harness.ok(isinstance(argument, WebIDL.IDLArgument), "Should be an IDLArgument") + harness.check( + argument.identifier.QName(), QName, "Argument has the right QName" + ) harness.check(argument.identifier.name, name, "Argument has the right name") harness.check(str(argument.type), type, "Argument has the right return type") - harness.check(argument.optional, optional, "Argument has the right optional value") - harness.check(argument.variadic, variadic, "Argument has the right variadic value") + harness.check( + argument.optional, optional, "Argument has the right optional value" + ) + harness.check( + argument.variadic, variadic, "Argument has the right variadic value" + ) - def checkMethod(method, QName, name, signatures, - static=False, getter=False, setter=False, - deleter=False, legacycaller=False, stringifier=False): - harness.ok(isinstance(method, WebIDL.IDLMethod), - "Should be an IDLMethod") + def checkMethod( + method, + QName, + name, + signatures, + static=False, + getter=False, + setter=False, + deleter=False, + legacycaller=False, + stringifier=False, + ): + harness.ok(isinstance(method, WebIDL.IDLMethod), "Should be an IDLMethod") harness.ok(method.isMethod(), "Method is a method") harness.ok(not method.isAttr(), "Method is not an attr") harness.ok(not method.isConst(), "Method is not a const") @@ -53,72 +70,202 @@ def WebIDLTest(parser, harness): harness.check(method.isStatic(), static, "Method has the correct static value") harness.check(method.isGetter(), getter, "Method has the correct getter value") harness.check(method.isSetter(), setter, "Method has the correct setter value") - harness.check(method.isDeleter(), deleter, "Method has the correct deleter value") - harness.check(method.isLegacycaller(), legacycaller, "Method has the correct legacycaller value") - harness.check(method.isStringifier(), stringifier, "Method has the correct stringifier value") - harness.check(len(method.signatures()), len(signatures), "Method has the correct number of signatures") + harness.check( + method.isDeleter(), deleter, "Method has the correct deleter value" + ) + harness.check( + method.isLegacycaller(), + legacycaller, + "Method has the correct legacycaller value", + ) + harness.check( + method.isStringifier(), + stringifier, + "Method has the correct stringifier value", + ) + harness.check( + len(method.signatures()), + len(signatures), + "Method has the correct number of signatures", + ) sigpairs = zip(method.signatures(), signatures) for (gotSignature, expectedSignature) in sigpairs: (gotRetType, gotArgs) = gotSignature (expectedRetType, expectedArgs) = expectedSignature - harness.check(str(gotRetType), expectedRetType, - "Method has the expected return type.") + harness.check( + str(gotRetType), expectedRetType, "Method has the expected return type." + ) for i in range(0, len(gotArgs)): (QName, name, type, optional, variadic) = expectedArgs[i] checkArgument(gotArgs[i], QName, name, type, optional, variadic) checkMethod(methods[0], "::TestMethods::basic", "basic", [("Undefined", [])]) - checkMethod(methods[1], "::TestMethods::basicStatic", "basicStatic", - [("Undefined", [])], static=True) - checkMethod(methods[2], "::TestMethods::basicWithSimpleArgs", - "basicWithSimpleArgs", - [("Undefined", - [("::TestMethods::basicWithSimpleArgs::arg1", "arg1", "Boolean", False, False), - ("::TestMethods::basicWithSimpleArgs::arg2", "arg2", "Byte", False, False), - ("::TestMethods::basicWithSimpleArgs::arg3", "arg3", "UnsignedLong", False, False)])]) - checkMethod(methods[3], "::TestMethods::basicBoolean", "basicBoolean", [("Boolean", [])]) - checkMethod(methods[4], "::TestMethods::basicStaticBoolean", "basicStaticBoolean", [("Boolean", [])], static=True) - checkMethod(methods[5], "::TestMethods::basicBooleanWithSimpleArgs", - "basicBooleanWithSimpleArgs", - [("Boolean", - [("::TestMethods::basicBooleanWithSimpleArgs::arg1", "arg1", "Boolean", False, False), - ("::TestMethods::basicBooleanWithSimpleArgs::arg2", "arg2", "Byte", False, False), - ("::TestMethods::basicBooleanWithSimpleArgs::arg3", "arg3", "UnsignedLong", False, False)])]) - checkMethod(methods[6], "::TestMethods::optionalArg", - "optionalArg", - [("Undefined", - [("::TestMethods::optionalArg::arg1", "arg1", "ByteOrNull", True, False), - ("::TestMethods::optionalArg::arg2", "arg2", "ByteSequence", True, False)])]) - checkMethod(methods[7], "::TestMethods::variadicArg", - "variadicArg", - [("Undefined", - [("::TestMethods::variadicArg::arg1", "arg1", "ByteOrNull", True, True)])]) - checkMethod(methods[8], "::TestMethods::getObject", - "getObject", [("Object", [])]) - checkMethod(methods[9], "::TestMethods::setObject", - "setObject", - [("Undefined", - [("::TestMethods::setObject::arg1", "arg1", "Object", False, False)])]) - checkMethod(methods[10], "::TestMethods::setAny", - "setAny", - [("Undefined", - [("::TestMethods::setAny::arg1", "arg1", "Any", False, False)])]) - checkMethod(methods[11], "::TestMethods::doFloats", - "doFloats", - [("Float", - [("::TestMethods::doFloats::arg1", "arg1", "Float", False, False)])]) + checkMethod( + methods[1], + "::TestMethods::basicStatic", + "basicStatic", + [("Undefined", [])], + static=True, + ) + checkMethod( + methods[2], + "::TestMethods::basicWithSimpleArgs", + "basicWithSimpleArgs", + [ + ( + "Undefined", + [ + ( + "::TestMethods::basicWithSimpleArgs::arg1", + "arg1", + "Boolean", + False, + False, + ), + ( + "::TestMethods::basicWithSimpleArgs::arg2", + "arg2", + "Byte", + False, + False, + ), + ( + "::TestMethods::basicWithSimpleArgs::arg3", + "arg3", + "UnsignedLong", + False, + False, + ), + ], + ) + ], + ) + checkMethod( + methods[3], "::TestMethods::basicBoolean", "basicBoolean", [("Boolean", [])] + ) + checkMethod( + methods[4], + "::TestMethods::basicStaticBoolean", + "basicStaticBoolean", + [("Boolean", [])], + static=True, + ) + checkMethod( + methods[5], + "::TestMethods::basicBooleanWithSimpleArgs", + "basicBooleanWithSimpleArgs", + [ + ( + "Boolean", + [ + ( + "::TestMethods::basicBooleanWithSimpleArgs::arg1", + "arg1", + "Boolean", + False, + False, + ), + ( + "::TestMethods::basicBooleanWithSimpleArgs::arg2", + "arg2", + "Byte", + False, + False, + ), + ( + "::TestMethods::basicBooleanWithSimpleArgs::arg3", + "arg3", + "UnsignedLong", + False, + False, + ), + ], + ) + ], + ) + checkMethod( + methods[6], + "::TestMethods::optionalArg", + "optionalArg", + [ + ( + "Undefined", + [ + ( + "::TestMethods::optionalArg::arg1", + "arg1", + "ByteOrNull", + True, + False, + ), + ( + "::TestMethods::optionalArg::arg2", + "arg2", + "ByteSequence", + True, + False, + ), + ], + ) + ], + ) + checkMethod( + methods[7], + "::TestMethods::variadicArg", + "variadicArg", + [ + ( + "Undefined", + [ + ( + "::TestMethods::variadicArg::arg1", + "arg1", + "ByteOrNull", + True, + True, + ) + ], + ) + ], + ) + checkMethod(methods[8], "::TestMethods::getObject", "getObject", [("Object", [])]) + checkMethod( + methods[9], + "::TestMethods::setObject", + "setObject", + [ + ( + "Undefined", + [("::TestMethods::setObject::arg1", "arg1", "Object", False, False)], + ) + ], + ) + checkMethod( + methods[10], + "::TestMethods::setAny", + "setAny", + [("Undefined", [("::TestMethods::setAny::arg1", "arg1", "Any", False, False)])], + ) + checkMethod( + methods[11], + "::TestMethods::doFloats", + "doFloats", + [("Float", [("::TestMethods::doFloats::arg1", "arg1", "Float", False, False)])], + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { undefined foo(optional float bar = 1); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True @@ -127,11 +274,13 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [GetterThrows] undefined foo(); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True @@ -140,11 +289,13 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [SetterThrows] undefined foo(); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True @@ -153,11 +304,13 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [Throw] undefined foo(); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True @@ -166,11 +319,13 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { undefined __noSuchMethod__(); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True @@ -179,77 +334,96 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [Throws, LenientFloat] undefined foo(float myFloat); [Throws] undefined foo(); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True harness.ok(not threw, "Should allow LenientFloat to be only in a specific overload") parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface A { [Throws] undefined foo(); [Throws, LenientFloat] undefined foo(float myFloat); }; - """) + """ + ) results = parser.finish() iface = results[0] methods = iface.members lenientFloat = methods[0].getExtendedAttribute("LenientFloat") - harness.ok(lenientFloat is not None, "LenientFloat in overloads must be added to the method") + harness.ok( + lenientFloat is not None, + "LenientFloat in overloads must be added to the method", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [Throws, LenientFloat] undefined foo(float myFloat); [Throws] undefined foo(float myFloat, float yourFloat); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True - harness.ok(threw, "Should prevent overloads from getting different restricted float behavior") + harness.ok( + threw, + "Should prevent overloads from getting different restricted float behavior", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [Throws] undefined foo(float myFloat, float yourFloat); [Throws, LenientFloat] undefined foo(float myFloat); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True - harness.ok(threw, "Should prevent overloads from getting different restricted float behavior (2)") + harness.ok( + threw, + "Should prevent overloads from getting different restricted float behavior (2)", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [Throws, LenientFloat] undefined foo(float myFloat); [Throws, LenientFloat] undefined foo(short myShort); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True diff --git a/components/script/dom/bindings/codegen/parser/tests/test_namespace.py b/components/script/dom/bindings/codegen/parser/tests/test_namespace.py index 62edb270c63..247c5b22232 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_namespace.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_namespace.py @@ -5,13 +5,13 @@ def WebIDLTest(parser, harness): attribute any foo; any bar(); }; - """) + """ + ) results = parser.finish() harness.check(len(results), 1, "Should have a thing.") - harness.ok(results[0].isNamespace(), "Our thing should be a namespace"); - harness.check(len(results[0].members), 2, - "Should have two things in our namespace") + harness.ok(results[0].isNamespace(), "Our thing should be a namespace") + harness.check(len(results[0].members), 2, "Should have two things in our namespace") harness.ok(results[0].members[0].isAttr(), "First member is attribute") harness.ok(results[0].members[0].isStatic(), "Attribute should be static") harness.ok(results[0].members[1].isMethod(), "Second member is method") @@ -26,17 +26,17 @@ def WebIDLTest(parser, harness): partial namespace MyNamespace { any bar(); }; - """) + """ + ) results = parser.finish() harness.check(len(results), 2, "Should have things.") - harness.ok(results[0].isNamespace(), "Our thing should be a namespace"); - harness.check(len(results[0].members), 2, - "Should have two things in our namespace") + harness.ok(results[0].isNamespace(), "Our thing should be a namespace") + harness.check(len(results[0].members), 2, "Should have two things in our namespace") harness.ok(results[0].members[0].isAttr(), "First member is attribute") - harness.ok(results[0].members[0].isStatic(), "Attribute should be static"); + harness.ok(results[0].members[0].isStatic(), "Attribute should be static") harness.ok(results[0].members[1].isMethod(), "Second member is method") - harness.ok(results[0].members[1].isStatic(), "Operation should be static"); + harness.ok(results[0].members[1].isStatic(), "Operation should be static") parser = parser.reset() parser.parse( @@ -47,17 +47,17 @@ def WebIDLTest(parser, harness): namespace MyNamespace { attribute any foo; }; - """) + """ + ) results = parser.finish() harness.check(len(results), 2, "Should have things.") - harness.ok(results[1].isNamespace(), "Our thing should be a namespace"); - harness.check(len(results[1].members), 2, - "Should have two things in our namespace") + harness.ok(results[1].isNamespace(), "Our thing should be a namespace") + harness.check(len(results[1].members), 2, "Should have two things in our namespace") harness.ok(results[1].members[0].isAttr(), "First member is attribute") - harness.ok(results[1].members[0].isStatic(), "Attribute should be static"); + harness.ok(results[1].members[0].isStatic(), "Attribute should be static") harness.ok(results[1].members[1].isMethod(), "Second member is method") - harness.ok(results[1].members[1].isStatic(), "Operation should be static"); + harness.ok(results[1].members[1].isStatic(), "Operation should be static") parser = parser.reset() threw = False @@ -67,7 +67,8 @@ def WebIDLTest(parser, harness): namespace MyNamespace { static attribute any foo; }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -82,7 +83,8 @@ def WebIDLTest(parser, harness): namespace MyNamespace { static any bar(); }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -101,7 +103,8 @@ def WebIDLTest(parser, harness): interface MyNamespace { any baz(); }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -120,7 +123,8 @@ def WebIDLTest(parser, harness): namespace MyNamespace { any bar(); }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -139,7 +143,8 @@ def WebIDLTest(parser, harness): namespace MyNamespace { any bar(); }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -158,7 +163,8 @@ def WebIDLTest(parser, harness): interface MyNamespace { any bar(); }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -177,7 +183,8 @@ def WebIDLTest(parser, harness): partial interface MyNamespace { any baz(); }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -196,7 +203,8 @@ def WebIDLTest(parser, harness): namespace MyNamespace { any bar(); }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -215,7 +223,8 @@ def WebIDLTest(parser, harness): partial namespace MyNamespace { any baz(); }; - """) + """ + ) results = parser.finish() except Exception as x: diff --git a/components/script/dom/bindings/codegen/parser/tests/test_newobject.py b/components/script/dom/bindings/codegen/parser/tests/test_newobject.py index 26785c6a270..c12995a0e86 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_newobject.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_newobject.py @@ -1,6 +1,7 @@ # Import the WebIDL module, so we can do isinstance checks and whatnot import WebIDL + def WebIDLTest(parser, harness): # Basic functionality parser.parse( @@ -9,7 +10,8 @@ def WebIDLTest(parser, harness): [NewObject] readonly attribute Iface attr; [NewObject] Iface method(); }; - """) + """ + ) results = parser.finish() harness.ok(results, "Should not have thrown on basic [NewObject] usage") @@ -21,7 +23,8 @@ def WebIDLTest(parser, harness): interface Iface { [Pure, NewObject] readonly attribute Iface attr; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -35,7 +38,8 @@ def WebIDLTest(parser, harness): interface Iface { [Pure, NewObject] Iface method(); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -49,7 +53,8 @@ def WebIDLTest(parser, harness): interface Iface { [Cached, NewObject, Affects=Nothing] readonly attribute Iface attr; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -63,7 +68,8 @@ def WebIDLTest(parser, harness): interface Iface { [StoreInSlot, NewObject, Affects=Nothing] readonly attribute Iface attr; }; - """) + """ + ) results = parser.finish() except: threw = True diff --git a/components/script/dom/bindings/codegen/parser/tests/test_nullable_equivalency.py b/components/script/dom/bindings/codegen/parser/tests/test_nullable_equivalency.py index 8ba6771677a..012c5fcff7c 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_nullable_equivalency.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_nullable_equivalency.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestNullableEquivalency1 { attribute long a; attribute long? b; @@ -53,22 +55,24 @@ def WebIDLTest(parser, harness): attribute object a; attribute object? b; }; - """) + """ + ) for decl in parser.finish(): if decl.isInterface(): checkEquivalent(decl, harness) + def checkEquivalent(iface, harness): type1 = iface.members[0].type type2 = iface.members[1].type - harness.check(type1.nullable(), False, 'attr1 should not be nullable') - harness.check(type2.nullable(), True, 'attr2 should be nullable') + harness.check(type1.nullable(), False, "attr1 should not be nullable") + harness.check(type2.nullable(), True, "attr2 should be nullable") # We don't know about type1, but type2, the nullable type, definitely # shouldn't be builtin. - harness.check(type2.builtin, False, 'attr2 should not be builtin') + harness.check(type2.builtin, False, "attr2 should not be builtin") # Ensure that all attributes of type2 match those in type1, except for: # - names on an ignore list, @@ -78,10 +82,22 @@ def checkEquivalent(iface, harness): # # Yes, this is an ugly, fragile hack. But it finds bugs... for attr in dir(type1): - if attr.startswith('_') or \ - attr in ['nullable', 'builtin', 'filename', 'location', - 'inner', 'QName', 'getDeps', 'name', 'prettyName'] or \ - (hasattr(type(type1), attr) and not callable(getattr(type1, attr))): + if ( + attr.startswith("_") + or attr + in [ + "nullable", + "builtin", + "filename", + "location", + "inner", + "QName", + "getDeps", + "name", + "prettyName", + ] + or (hasattr(type(type1), attr) and not callable(getattr(type1, attr))) + ): continue a1 = getattr(type1, attr) @@ -96,20 +112,30 @@ def checkEquivalent(iface, harness): try: a2 = getattr(type2, attr) except: - harness.ok(False, 'Missing %s attribute on type %s in %s' % (attr, type2, iface)) + harness.ok( + False, + "Missing %s attribute on type %s in %s" % (attr, type2, iface), + ) continue if not callable(a2): - harness.ok(False, "%s attribute on type %s in %s wasn't callable" % (attr, type2, iface)) + harness.ok( + False, + "%s attribute on type %s in %s wasn't callable" + % (attr, type2, iface), + ) continue v2 = a2() - harness.check(v2, v1, '%s method return value' % attr) + harness.check(v2, v1, "%s method return value" % attr) else: try: a2 = getattr(type2, attr) except: - harness.ok(False, 'Missing %s attribute on type %s in %s' % (attr, type2, iface)) + harness.ok( + False, + "Missing %s attribute on type %s in %s" % (attr, type2, iface), + ) continue - harness.check(a2, a1, '%s attribute should match' % attr) + harness.check(a2, a1, "%s attribute should match" % attr) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_nullable_void.py b/components/script/dom/bindings/codegen/parser/tests/test_nullable_void.py deleted file mode 100644 index ebf841a5205..00000000000 --- a/components/script/dom/bindings/codegen/parser/tests/test_nullable_void.py +++ /dev/null @@ -1,14 +0,0 @@ -def WebIDLTest(parser, harness): - threw = False - try: - parser.parse(""" - interface NullableUndefined { - undefined? foo(); - }; - """) - - results = parser.finish() - except: - threw = True - - harness.ok(threw, "Should have thrown.") diff --git a/components/script/dom/bindings/codegen/parser/tests/test_observableArray.py b/components/script/dom/bindings/codegen/parser/tests/test_observableArray.py new file mode 100644 index 00000000000..601f626bcf4 --- /dev/null +++ b/components/script/dom/bindings/codegen/parser/tests/test_observableArray.py @@ -0,0 +1,288 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +def WebIDLTest(parser, harness): + + # Test dictionary as inner type + harness.should_throw( + parser, + """ + dictionary A { + boolean member; + }; + interface B { + attribute ObservableArray foo; + }; + """, + "use dictionary as inner type", + ) + + # Test sequence as inner type + harness.should_throw( + parser, + """ + interface A { + attribute ObservableArray> foo; + }; + """, + "use sequence as inner type", + ) + + # Test sequence as inner type + harness.should_throw( + parser, + """ + dictionary A { + boolean member; + }; + interface B { + attribute ObservableArray> foo; + }; + """, + "use sequence as inner type", + ) + + # Test record as inner type + harness.should_throw( + parser, + """ + interface A { + attribute ObservableArray> foo; + }; + """, + "use record as inner type", + ) + + # Test record as inner type + harness.should_throw( + parser, + """ + dictionary A { + boolean member; + }; + interface B { + attribute ObservableArray> foo; + }; + """, + "use record as inner type", + ) + + # Test observable array as inner type + harness.should_throw( + parser, + """ + interface A { + attribute ObservableArray> foo; + }; + """, + "use ObservableArray as inner type", + ) + + # Test nullable attribute + harness.should_throw( + parser, + """ + interface A { + attribute ObservableArray? foo; + }; + """, + "nullable", + ) + + # Test sequence + harness.should_throw( + parser, + """ + interface A { + undefined foo(sequence> foo); + }; + """, + "used in sequence", + ) + + # Test record + harness.should_throw( + parser, + """ + interface A { + undefined foo(record> foo); + }; + """, + "used in record", + ) + + # Test promise + harness.should_throw( + parser, + """ + interface A { + Promise> foo(); + }; + """, + "used in promise", + ) + + # Test union + harness.should_throw( + parser, + """ + interface A { + attribute (DOMString or ObservableArray>) foo; + }; + """, + "used in union", + ) + + # Test dictionary member + harness.should_throw( + parser, + """ + dictionary A { + ObservableArray foo; + }; + """, + "used on dictionary member type", + ) + + # Test argument + harness.should_throw( + parser, + """ + interface A { + undefined foo(ObservableArray foo); + }; + """, + "used on argument", + ) + + # Test static attribute + harness.should_throw( + parser, + """ + interface A { + static attribute ObservableArray foo; + }; + """, + "used on static attribute type", + ) + + # Test iterable + harness.should_throw( + parser, + """ + interface A { + iterable>; + }; + """, + "used in iterable", + ) + + # Test maplike + harness.should_throw( + parser, + """ + interface A { + maplike>; + }; + """, + "used in maplike", + ) + + # Test setlike + harness.should_throw( + parser, + """ + interface A { + setlike>; + }; + """, + "used in setlike", + ) + + # Test JS implemented interface + harness.should_throw( + parser, + """ + [JSImplementation="@mozilla.org/dom/test-interface-js;1"] + interface A { + readonly attribute ObservableArray foo; + }; + """, + "used in JS implemented interface", + ) + + # Test namespace + harness.should_throw( + parser, + """ + namespace A { + readonly attribute ObservableArray foo; + }; + """, + "used in namespaces", + ) + + # Test [Cached] extended attribute + harness.should_throw( + parser, + """ + interface A { + [Cached, Pure] + readonly attribute ObservableArray foo; + }; + """, + "have Cached extended attribute", + ) + + # Test [StoreInSlot] extended attribute + harness.should_throw( + parser, + """ + interface A { + [StoreInSlot, Pure] + readonly attribute ObservableArray foo; + }; + """, + "have StoreInSlot extended attribute", + ) + + # Test regular attribute + parser = parser.reset() + parser.parse( + """ + interface A { + readonly attribute ObservableArray foo; + attribute ObservableArray<[Clamp] octet> bar; + attribute ObservableArray baz; + attribute ObservableArray<(boolean or long)> qux; + }; + """ + ) + results = parser.finish() + A = results[0] + foo = A.members[0] + harness.ok(foo.readonly, "A.foo is readonly attribute") + harness.ok(foo.type.isObservableArray(), "A.foo is ObservableArray type") + harness.check( + foo.slotIndices[A.identifier.name], 0, "A.foo should be stored in slot" + ) + bar = A.members[1] + harness.ok(bar.type.isObservableArray(), "A.bar is ObservableArray type") + harness.check( + bar.slotIndices[A.identifier.name], 1, "A.bar should be stored in slot" + ) + harness.ok(bar.type.inner.hasClamp(), "A.bar's inner type should be clamped") + baz = A.members[2] + harness.ok(baz.type.isObservableArray(), "A.baz is ObservableArray type") + harness.check( + baz.slotIndices[A.identifier.name], 2, "A.baz should be stored in slot" + ) + harness.ok(baz.type.inner.nullable(), "A.baz's inner type should be nullable") + qux = A.members[3] + harness.ok(qux.type.isObservableArray(), "A.qux is ObservableArray type") + harness.check( + qux.slotIndices[A.identifier.name], 3, "A.qux should be stored in slot" + ) + harness.ok(qux.type.inner.isUnion(), "A.qux's inner type should be union") diff --git a/components/script/dom/bindings/codegen/parser/tests/test_optional_constraints.py b/components/script/dom/bindings/codegen/parser/tests/test_optional_constraints.py index ad012b633d5..2044c6362c3 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_optional_constraints.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_optional_constraints.py @@ -1,30 +1,35 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface OptionalConstraints1 { undefined foo(optional byte arg1, byte arg2); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(not threw, - "Should not have thrown on non-optional argument following " - "optional argument.") + harness.ok( + not threw, + "Should not have thrown on non-optional argument following " + "optional argument.", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface OptionalConstraints2 { undefined foo(optional byte arg1 = 1, optional byte arg2 = 2, - optional byte arg3, optional byte arg4 = 4, - optional byte arg5, optional byte arg6 = 9); + optional byte arg3, optional byte arg4 = 4, + optional byte arg5, optional byte arg6 = 9); }; - """) + """ + ) results = parser.finish() args = results[0].members[0].signatures()[0][1] harness.check(len(args), 6, "Should have 6 arguments") - harness.check(args[5].defaultValue.value, 9, - "Should have correct default value") + harness.check(args[5].defaultValue.value, 9, "Should have correct default value") diff --git a/components/script/dom/bindings/codegen/parser/tests/test_overload.py b/components/script/dom/bindings/codegen/parser/tests/test_overload.py index 8e02f64d6c9..7816276aa6d 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_overload.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_overload.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestOverloads { undefined basic(); undefined basic(long arg1); @@ -14,21 +16,29 @@ def WebIDLTest(parser, harness): undefined optionalTest(); undefined optionalTest(optional long num1, long num2); }; - """) + """ + ) results = parser.finish() harness.ok(True, "TestOverloads interface parsed without error.") harness.check(len(results), 1, "Should be one production.") iface = results[0] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should be an IDLInterface") - harness.check(iface.identifier.QName(), "::TestOverloads", "Interface has the right QName") - harness.check(iface.identifier.name, "TestOverloads", "Interface has the right name") + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface") + harness.check( + iface.identifier.QName(), "::TestOverloads", "Interface has the right QName" + ) + harness.check( + iface.identifier.name, "TestOverloads", "Interface has the right name" + ) harness.check(len(iface.members), 4, "Expect %s members" % 4) member = iface.members[0] - harness.check(member.identifier.QName(), "::TestOverloads::basic", "Method has the right QName") + harness.check( + member.identifier.QName(), + "::TestOverloads::basic", + "Method has the right QName", + ) harness.check(member.identifier.name, "basic", "Method has the right name") harness.check(member.hasOverloads(), True, "Method has overloads") @@ -37,24 +47,28 @@ def WebIDLTest(parser, harness): (retval, argumentSet) = signatures[0] - harness.check(str(retval), "Undefined", "Expect a undefined retval") + harness.check(str(retval), "Undefined", "Expect an undefined retval") harness.check(len(argumentSet), 0, "Expect an empty argument set") (retval, argumentSet) = signatures[1] - harness.check(str(retval), "Undefined", "Expect a undefined retval") + harness.check(str(retval), "Undefined", "Expect an undefined retval") harness.check(len(argumentSet), 1, "Expect an argument set with one argument") argument = argumentSet[0] - harness.ok(isinstance(argument, WebIDL.IDLArgument), - "Should be an IDLArgument") - harness.check(argument.identifier.QName(), "::TestOverloads::basic::arg1", "Argument has the right QName") + harness.ok(isinstance(argument, WebIDL.IDLArgument), "Should be an IDLArgument") + harness.check( + argument.identifier.QName(), + "::TestOverloads::basic::arg1", + "Argument has the right QName", + ) harness.check(argument.identifier.name, "arg1", "Argument has the right name") harness.check(str(argument.type), "Long", "Argument has the right type") member = iface.members[3] - harness.check(len(member.overloadsForArgCount(0)), 1, - "Only one overload for no args") - harness.check(len(member.overloadsForArgCount(1)), 0, - "No overloads for one arg") - harness.check(len(member.overloadsForArgCount(2)), 1, - "Only one overload for two args") + harness.check( + len(member.overloadsForArgCount(0)), 1, "Only one overload for no args" + ) + harness.check(len(member.overloadsForArgCount(1)), 0, "No overloads for one arg") + harness.check( + len(member.overloadsForArgCount(2)), 1, "Only one overload for two args" + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_promise.py b/components/script/dom/bindings/codegen/parser/tests/test_promise.py index ef44a216d10..9b418d51afe 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_promise.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_promise.py @@ -1,157 +1,177 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface A { legacycaller Promise foo(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow Promise return values for legacycaller.") + harness.ok(threw, "Should not allow Promise return values for legacycaller.") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { Promise foo(); long foo(long arg); }; - """) - results = parser.finish(); + """ + ) + results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow overloads which have both Promise and " - "non-Promise return types.") + harness.ok( + threw, + "Should not allow overloads which have both Promise and " + "non-Promise return types.", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { long foo(long arg); Promise foo(); }; - """) - results = parser.finish(); + """ + ) + results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow overloads which have both Promise and " - "non-Promise return types.") + harness.ok( + threw, + "Should not allow overloads which have both Promise and " + "non-Promise return types.", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { Promise? foo(); }; - """) - results = parser.finish(); + """ + ) + results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow nullable Promise return values.") + harness.ok(threw, "Should not allow nullable Promise return values.") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { undefined foo(Promise? arg); }; - """) - results = parser.finish(); + """ + ) + results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow nullable Promise arguments.") + harness.ok(threw, "Should not allow nullable Promise arguments.") parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface A { Promise foo(); Promise foo(long arg); }; - """) - results = parser.finish(); + """ + ) + results = parser.finish() - harness.ok(True, - "Should allow overloads which only have Promise and return " - "types.") + harness.ok( + True, "Should allow overloads which only have Promise and return " "types." + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { attribute Promise attr; }; - """) - results = parser.finish(); + """ + ) + results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow writable Promise-typed attributes.") + harness.ok(threw, "Should not allow writable Promise-typed attributes.") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { - [LenientSetter] readonly attribute Promise attr; + [LegacyLenientSetter] readonly attribute Promise attr; }; - """) - results = parser.finish(); + """ + ) + results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow [LenientSetter] Promise-typed attributes.") + harness.ok( + threw, "Should not allow [LegacyLenientSetter] Promise-typed attributes." + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [PutForwards=bar] readonly attribute Promise attr; }; - """) - results = parser.finish(); + """ + ) + results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow [PutForwards] Promise-typed attributes.") + harness.ok(threw, "Should not allow [PutForwards] Promise-typed attributes.") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [Replaceable] readonly attribute Promise attr; }; - """) - results = parser.finish(); + """ + ) + results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow [Replaceable] Promise-typed attributes.") + harness.ok(threw, "Should not allow [Replaceable] Promise-typed attributes.") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [SameObject] readonly attribute Promise attr; }; - """) - results = parser.finish(); + """ + ) + results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow [SameObject] Promise-typed attributes.") + harness.ok(threw, "Should not allow [SameObject] Promise-typed attributes.") diff --git a/components/script/dom/bindings/codegen/parser/tests/test_prototype_ident.py b/components/script/dom/bindings/codegen/parser/tests/test_prototype_ident.py index d3932b54f8b..5a806bf2a2d 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_prototype_ident.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_prototype_ident.py @@ -1,11 +1,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface TestIface { static attribute boolean prototype; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -15,11 +17,13 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestIface { static boolean prototype(); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -29,11 +33,13 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestIface { const boolean prototype = true; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -42,39 +48,60 @@ def WebIDLTest(parser, harness): # Make sure that we can parse non-static attributes with 'prototype' as identifier. parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestIface { attribute boolean prototype; }; - """) + """ + ) results = parser.finish() - testIface = results[0]; - harness.check(testIface.members[0].isStatic(), False, "Attribute should not be static") - harness.check(testIface.members[0].identifier.name, "prototype", "Attribute identifier should be 'prototype'") + testIface = results[0] + harness.check( + testIface.members[0].isStatic(), False, "Attribute should not be static" + ) + harness.check( + testIface.members[0].identifier.name, + "prototype", + "Attribute identifier should be 'prototype'", + ) # Make sure that we can parse non-static operations with 'prototype' as identifier. parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestIface { boolean prototype(); }; - """) + """ + ) results = parser.finish() - testIface = results[0]; - harness.check(testIface.members[0].isStatic(), False, "Operation should not be static") - harness.check(testIface.members[0].identifier.name, "prototype", "Operation identifier should be 'prototype'") + testIface = results[0] + harness.check( + testIface.members[0].isStatic(), False, "Operation should not be static" + ) + harness.check( + testIface.members[0].identifier.name, + "prototype", + "Operation identifier should be 'prototype'", + ) # Make sure that we can parse dictionary members with 'prototype' as identifier. parser = parser.reset() - parser.parse(""" + parser.parse( + """ dictionary TestDict { boolean prototype; }; - """) + """ + ) results = parser.finish() - testDict = results[0]; - harness.check(testDict.members[0].identifier.name, "prototype", "Dictionary member should be 'prototype'") - + testDict = results[0] + harness.check( + testDict.members[0].identifier.name, + "prototype", + "Dictionary member should be 'prototype'", + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_putForwards.py b/components/script/dom/bindings/codegen/parser/tests/test_putForwards.py index 86a1bf115b6..5ec4dde280e 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_putForwards.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_putForwards.py @@ -1,11 +1,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface I { [PutForwards=B] readonly attribute long A; }; - """) + """ + ) results = parser.finish() except: @@ -13,16 +15,18 @@ def WebIDLTest(parser, harness): harness.ok(threw, "Should have thrown.") - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface I { [PutForwards=B] readonly attribute J A; }; interface J { }; - """) + """ + ) results = parser.finish() except: @@ -30,17 +34,19 @@ def WebIDLTest(parser, harness): harness.ok(threw, "Should have thrown.") - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface I { [PutForwards=B] attribute J A; }; interface J { attribute long B; }; - """) + """ + ) results = parser.finish() except: @@ -48,17 +54,19 @@ def WebIDLTest(parser, harness): harness.ok(threw, "Should have thrown.") - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface I { [PutForwards=B] static readonly attribute J A; }; interface J { attribute long B; }; - """) + """ + ) results = parser.finish() except: @@ -66,17 +74,19 @@ def WebIDLTest(parser, harness): harness.ok(threw, "Should have thrown.") - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ callback interface I { [PutForwards=B] readonly attribute J A; }; interface J { attribute long B; }; - """) + """ + ) results = parser.finish() except: @@ -84,10 +94,11 @@ def WebIDLTest(parser, harness): harness.ok(threw, "Should have thrown.") - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface I { [PutForwards=C] readonly attribute J A; [PutForwards=C] readonly attribute J B; @@ -98,7 +109,8 @@ def WebIDLTest(parser, harness): interface K { [PutForwards=A] readonly attribute I D; }; - """) + """ + ) results = parser.finish() except: diff --git a/components/script/dom/bindings/codegen/parser/tests/test_record.py b/components/script/dom/bindings/codegen/parser/tests/test_record.py index 3d83e249be8..3a31d721b27 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_record.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_record.py @@ -1,18 +1,22 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ dictionary Dict {}; interface RecordArg { undefined foo(record arg); }; - """) + """ + ) results = parser.finish() - harness.check(len(results), 2, "Should know about two things"); - harness.ok(isinstance(results[1], WebIDL.IDLInterface), - "Should have an interface here"); + harness.check(len(results), 2, "Should know about two things") + harness.ok( + isinstance(results[1], WebIDL.IDLInterface), "Should have an interface here" + ) members = results[1].members harness.check(len(members), 1, "Should have one member") harness.ok(members[0].isMethod(), "Should have method") @@ -20,34 +24,38 @@ def WebIDLTest(parser, harness): args = signature[1] harness.check(len(args), 1, "Should have one arg") harness.ok(args[0].type.isRecord(), "Should have a record type here") - harness.ok(args[0].type.inner.isDictionary(), - "Should have a dictionary inner type") + harness.ok(args[0].type.inner.isDictionary(), "Should have a dictionary inner type") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface RecordUndefinedArg { undefined foo(record arg); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True - harness.ok(threw, "Should have thrown because record can't have undefined as value type.") + harness.ok( + threw, "Should have thrown because record can't have undefined as value type." + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Dict { record val; }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True - harness.ok(threw, - "Should have thrown on dictionary containing itself via record.") + harness.ok(threw, "Should have thrown on dictionary containing itself via record.") diff --git a/components/script/dom/bindings/codegen/parser/tests/test_replaceable.py b/components/script/dom/bindings/codegen/parser/tests/test_replaceable.py index 93ee42ed919..06ea6a47239 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_replaceable.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_replaceable.py @@ -2,8 +2,9 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. + def should_throw(parser, harness, message, code): - parser = parser.reset(); + parser = parser.reset() threw = False try: parser.parse(code) @@ -16,43 +17,68 @@ def should_throw(parser, harness, message, code): def WebIDLTest(parser, harness): # The [Replaceable] extended attribute MUST take no arguments. - should_throw(parser, harness, "no arguments", """ + should_throw( + parser, + harness, + "no arguments", + """ interface I { [Replaceable=X] readonly attribute long A; }; - """) + """, + ) # An attribute with the [Replaceable] extended attribute MUST NOT also be # declared with the [PutForwards] extended attribute. - should_throw(parser, harness, "PutForwards", """ + should_throw( + parser, + harness, + "PutForwards", + """ interface I { [PutForwards=B, Replaceable] readonly attribute J A; }; interface J { attribute long B; }; - """) + """, + ) # The [Replaceable] extended attribute MUST NOT be used on an attribute # that is not read only. - should_throw(parser, harness, "writable attribute", """ + should_throw( + parser, + harness, + "writable attribute", + """ interface I { [Replaceable] attribute long A; }; - """) + """, + ) # The [Replaceable] extended attribute MUST NOT be used on a static # attribute. - should_throw(parser, harness, "static attribute", """ + should_throw( + parser, + harness, + "static attribute", + """ interface I { [Replaceable] static readonly attribute long A; }; - """) + """, + ) # The [Replaceable] extended attribute MUST NOT be used on an attribute # declared on a callback interface. - should_throw(parser, harness, "callback interface", """ + should_throw( + parser, + harness, + "callback interface", + """ callback interface I { [Replaceable] readonly attribute long A; }; - """) + """, + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_securecontext_extended_attribute.py b/components/script/dom/bindings/codegen/parser/tests/test_securecontext_extended_attribute.py index 5af0c22803c..e0e967dd420 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_securecontext_extended_attribute.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_securecontext_extended_attribute.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ [SecureContext] interface TestSecureContextOnInterface { const octet TEST_CONSTANT = 0; @@ -13,27 +15,47 @@ def WebIDLTest(parser, harness): readonly attribute byte testAttribute2; undefined testMethod2(byte foo); }; - """) + """ + ) results = parser.finish() - harness.check(len(results[0].members), 6, "TestSecureContextOnInterface should have six members") - harness.ok(results[0].getExtendedAttribute("SecureContext"), - "Interface should have [SecureContext] extended attribute") - harness.ok(results[0].members[0].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to constant members") - harness.ok(results[0].members[1].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to attribute members") - harness.ok(results[0].members[2].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to method members") - harness.ok(results[0].members[3].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to constant members from partial interface") - harness.ok(results[0].members[4].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to attribute members from partial interface") - harness.ok(results[0].members[5].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to method members from partial interface") + harness.check( + len(results[0].members), + 6, + "TestSecureContextOnInterface should have six members", + ) + harness.ok( + results[0].getExtendedAttribute("SecureContext"), + "Interface should have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[0].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to constant members", + ) + harness.ok( + results[0].members[1].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to attribute members", + ) + harness.ok( + results[0].members[2].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to method members", + ) + harness.ok( + results[0].members[3].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to constant members from partial interface", + ) + harness.ok( + results[0].members[4].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to attribute members from partial interface", + ) + harness.ok( + results[0].members[5].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to method members from partial interface", + ) # Same thing, but with the partial interface specified first: parser = parser.reset() - parser.parse(""" + parser.parse( + """ partial interface TestSecureContextOnInterfaceAfterPartialInterface { const octet TEST_CONSTANT_2 = 0; readonly attribute byte testAttribute2; @@ -45,26 +67,46 @@ def WebIDLTest(parser, harness): readonly attribute byte testAttribute; undefined testMethod(byte foo); }; - """) + """ + ) results = parser.finish() - harness.check(len(results[1].members), 6, "TestSecureContextOnInterfaceAfterPartialInterface should have six members") - harness.ok(results[1].getExtendedAttribute("SecureContext"), - "Interface should have [SecureContext] extended attribute") - harness.ok(results[1].members[0].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to constant members") - harness.ok(results[1].members[1].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to attribute members") - harness.ok(results[1].members[2].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to method members") - harness.ok(results[1].members[3].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to constant members from partial interface") - harness.ok(results[1].members[4].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to attribute members from partial interface") - harness.ok(results[1].members[5].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to method members from partial interface") + harness.check( + len(results[1].members), + 6, + "TestSecureContextOnInterfaceAfterPartialInterface should have six members", + ) + harness.ok( + results[1].getExtendedAttribute("SecureContext"), + "Interface should have [SecureContext] extended attribute", + ) + harness.ok( + results[1].members[0].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to constant members", + ) + harness.ok( + results[1].members[1].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to attribute members", + ) + harness.ok( + results[1].members[2].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to method members", + ) + harness.ok( + results[1].members[3].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to constant members from partial interface", + ) + harness.ok( + results[1].members[4].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to attribute members from partial interface", + ) + harness.ok( + results[1].members[5].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to method members from partial interface", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestSecureContextOnPartialInterface { const octet TEST_CONSTANT = 0; readonly attribute byte testAttribute; @@ -76,26 +118,46 @@ def WebIDLTest(parser, harness): readonly attribute byte testAttribute2; undefined testMethod2(byte foo); }; - """) + """ + ) results = parser.finish() - harness.check(len(results[0].members), 6, "TestSecureContextOnPartialInterface should have six members") - harness.ok(results[0].getExtendedAttribute("SecureContext") is None, - "[SecureContext] should not propagate from a partial interface to the interface") - harness.ok(results[0].members[0].getExtendedAttribute("SecureContext") is None, - "[SecureContext] should not propagate from a partial interface to the interface's constant members") - harness.ok(results[0].members[1].getExtendedAttribute("SecureContext") is None, - "[SecureContext] should not propagate from a partial interface to the interface's attribute members") - harness.ok(results[0].members[2].getExtendedAttribute("SecureContext") is None, - "[SecureContext] should not propagate from a partial interface to the interface's method members") - harness.ok(results[0].members[3].getExtendedAttribute("SecureContext"), - "Constant members from [SecureContext] partial interface should be [SecureContext]") - harness.ok(results[0].members[4].getExtendedAttribute("SecureContext"), - "Attribute members from [SecureContext] partial interface should be [SecureContext]") - harness.ok(results[0].members[5].getExtendedAttribute("SecureContext"), - "Method members from [SecureContext] partial interface should be [SecureContext]") + harness.check( + len(results[0].members), + 6, + "TestSecureContextOnPartialInterface should have six members", + ) + harness.ok( + results[0].getExtendedAttribute("SecureContext") is None, + "[SecureContext] should not propagate from a partial interface to the interface", + ) + harness.ok( + results[0].members[0].getExtendedAttribute("SecureContext") is None, + "[SecureContext] should not propagate from a partial interface to the interface's constant members", + ) + harness.ok( + results[0].members[1].getExtendedAttribute("SecureContext") is None, + "[SecureContext] should not propagate from a partial interface to the interface's attribute members", + ) + harness.ok( + results[0].members[2].getExtendedAttribute("SecureContext") is None, + "[SecureContext] should not propagate from a partial interface to the interface's method members", + ) + harness.ok( + results[0].members[3].getExtendedAttribute("SecureContext"), + "Constant members from [SecureContext] partial interface should be [SecureContext]", + ) + harness.ok( + results[0].members[4].getExtendedAttribute("SecureContext"), + "Attribute members from [SecureContext] partial interface should be [SecureContext]", + ) + harness.ok( + results[0].members[5].getExtendedAttribute("SecureContext"), + "Method members from [SecureContext] partial interface should be [SecureContext]", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestSecureContextOnInterfaceMembers { const octet TEST_NON_SECURE_CONSTANT_1 = 0; [SecureContext] @@ -110,32 +172,58 @@ def WebIDLTest(parser, harness): undefined testSecureMethod(byte foo); undefined testNonSecureMethod2(byte foo); }; - """) + """ + ) results = parser.finish() - harness.check(len(results[0].members), 9, "TestSecureContextOnInterfaceMembers should have nine members") - harness.ok(results[0].getExtendedAttribute("SecureContext") is None, - "[SecureContext] on members should not propagate up to the interface") - harness.ok(results[0].members[0].getExtendedAttribute("SecureContext") is None, - "Constant should not have [SecureContext] extended attribute") - harness.ok(results[0].members[1].getExtendedAttribute("SecureContext"), - "Constant should have [SecureContext] extended attribute") - harness.ok(results[0].members[2].getExtendedAttribute("SecureContext") is None, - "Constant should not have [SecureContext] extended attribute") - harness.ok(results[0].members[3].getExtendedAttribute("SecureContext") is None, - "Attribute should not have [SecureContext] extended attribute") - harness.ok(results[0].members[4].getExtendedAttribute("SecureContext"), - "Attribute should have [SecureContext] extended attribute") - harness.ok(results[0].members[5].getExtendedAttribute("SecureContext") is None, - "Attribute should not have [SecureContext] extended attribute") - harness.ok(results[0].members[6].getExtendedAttribute("SecureContext") is None, - "Method should not have [SecureContext] extended attribute") - harness.ok(results[0].members[7].getExtendedAttribute("SecureContext"), - "Method should have [SecureContext] extended attribute") - harness.ok(results[0].members[8].getExtendedAttribute("SecureContext") is None, - "Method should not have [SecureContext] extended attribute") + harness.check( + len(results[0].members), + 9, + "TestSecureContextOnInterfaceMembers should have nine members", + ) + harness.ok( + results[0].getExtendedAttribute("SecureContext") is None, + "[SecureContext] on members should not propagate up to the interface", + ) + harness.ok( + results[0].members[0].getExtendedAttribute("SecureContext") is None, + "Constant should not have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[1].getExtendedAttribute("SecureContext"), + "Constant should have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[2].getExtendedAttribute("SecureContext") is None, + "Constant should not have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[3].getExtendedAttribute("SecureContext") is None, + "Attribute should not have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[4].getExtendedAttribute("SecureContext"), + "Attribute should have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[5].getExtendedAttribute("SecureContext") is None, + "Attribute should not have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[6].getExtendedAttribute("SecureContext") is None, + "Method should not have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[7].getExtendedAttribute("SecureContext"), + "Method should have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[8].getExtendedAttribute("SecureContext") is None, + "Method should not have [SecureContext] extended attribute", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestSecureContextOnPartialInterfaceMembers { }; partial interface TestSecureContextOnPartialInterfaceMembers { @@ -152,37 +240,62 @@ def WebIDLTest(parser, harness): undefined testSecureMethod(byte foo); undefined testNonSecureMethod2(byte foo); }; - """) + """ + ) results = parser.finish() - harness.check(len(results[0].members), 9, "TestSecureContextOnPartialInterfaceMembers should have nine members") - harness.ok(results[0].members[0].getExtendedAttribute("SecureContext") is None, - "Constant from partial interface should not have [SecureContext] extended attribute") - harness.ok(results[0].members[1].getExtendedAttribute("SecureContext"), - "Constant from partial interface should have [SecureContext] extended attribute") - harness.ok(results[0].members[2].getExtendedAttribute("SecureContext") is None, - "Constant from partial interface should not have [SecureContext] extended attribute") - harness.ok(results[0].members[3].getExtendedAttribute("SecureContext") is None, - "Attribute from partial interface should not have [SecureContext] extended attribute") - harness.ok(results[0].members[4].getExtendedAttribute("SecureContext"), - "Attribute from partial interface should have [SecureContext] extended attribute") - harness.ok(results[0].members[5].getExtendedAttribute("SecureContext") is None, - "Attribute from partial interface should not have [SecureContext] extended attribute") - harness.ok(results[0].members[6].getExtendedAttribute("SecureContext") is None, - "Method from partial interface should not have [SecureContext] extended attribute") - harness.ok(results[0].members[7].getExtendedAttribute("SecureContext"), - "Method from partial interface should have [SecureContext] extended attribute") - harness.ok(results[0].members[8].getExtendedAttribute("SecureContext") is None, - "Method from partial interface should not have [SecureContext] extended attribute") + harness.check( + len(results[0].members), + 9, + "TestSecureContextOnPartialInterfaceMembers should have nine members", + ) + harness.ok( + results[0].members[0].getExtendedAttribute("SecureContext") is None, + "Constant from partial interface should not have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[1].getExtendedAttribute("SecureContext"), + "Constant from partial interface should have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[2].getExtendedAttribute("SecureContext") is None, + "Constant from partial interface should not have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[3].getExtendedAttribute("SecureContext") is None, + "Attribute from partial interface should not have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[4].getExtendedAttribute("SecureContext"), + "Attribute from partial interface should have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[5].getExtendedAttribute("SecureContext") is None, + "Attribute from partial interface should not have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[6].getExtendedAttribute("SecureContext") is None, + "Method from partial interface should not have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[7].getExtendedAttribute("SecureContext"), + "Method from partial interface should have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[8].getExtendedAttribute("SecureContext") is None, + "Method from partial interface should not have [SecureContext] extended attribute", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [SecureContext=something] interface TestSecureContextTakesNoValue1 { const octet TEST_SECURE_CONSTANT = 0; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -191,7 +304,8 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestSecureContextForOverloads1 { [SecureContext] undefined testSecureMethod(byte foo); @@ -199,16 +313,21 @@ def WebIDLTest(parser, harness): partial interface TestSecureContextForOverloads1 { undefined testSecureMethod(byte foo, byte bar); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "If [SecureContext] appears on an overloaded operation, then it MUST appear on all overloads") + harness.ok( + threw, + "If [SecureContext] appears on an overloaded operation, then it MUST appear on all overloads", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestSecureContextForOverloads2 { [SecureContext] undefined testSecureMethod(byte foo); @@ -217,31 +336,40 @@ def WebIDLTest(parser, harness): [SecureContext] undefined testSecureMethod(byte foo, byte bar); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(not threw, "[SecureContext] can appear on an overloaded operation if it appears on all overloads") + harness.ok( + not threw, + "[SecureContext] can appear on an overloaded operation if it appears on all overloads", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [SecureContext] interface TestSecureContextOnInterfaceAndMember { [SecureContext] undefined testSecureMethod(byte foo); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "[SecureContext] must not appear on an interface and interface member") + harness.ok( + threw, "[SecureContext] must not appear on an interface and interface member" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestSecureContextOnPartialInterfaceAndMember { }; [SecureContext] @@ -249,16 +377,21 @@ def WebIDLTest(parser, harness): [SecureContext] undefined testSecureMethod(byte foo); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "[SecureContext] must not appear on a partial interface and one of the partial interface's member's") + harness.ok( + threw, + "[SecureContext] must not appear on a partial interface and one of the partial interface's member's", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [SecureContext] interface TestSecureContextOnInterfaceAndPartialInterfaceMember { }; @@ -266,31 +399,41 @@ def WebIDLTest(parser, harness): [SecureContext] undefined testSecureMethod(byte foo); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "[SecureContext] must not appear on an interface and one of its partial interface's member's") + harness.ok( + threw, + "[SecureContext] must not appear on an interface and one of its partial interface's member's", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [SecureContext] interface TestSecureContextOnInheritedInterface { }; interface TestSecureContextNotOnInheritingInterface : TestSecureContextOnInheritedInterface { undefined testSecureMethod(byte foo); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "[SecureContext] must appear on interfaces that inherit from another [SecureContext] interface") + harness.ok( + threw, + "[SecureContext] must appear on interfaces that inherit from another [SecureContext] interface", + ) # Test 'includes'. parser = parser.reset() - parser.parse(""" + parser.parse( + """ [SecureContext] interface TestSecureContextInterfaceThatIncludesNonSecureContextMixin { const octet TEST_CONSTANT = 0; @@ -301,31 +444,56 @@ def WebIDLTest(parser, harness): undefined testMethod2(byte foo); }; TestSecureContextInterfaceThatIncludesNonSecureContextMixin includes TestNonSecureContextMixin; - """) + """ + ) results = parser.finish() - harness.check(len(results[0].members), 4, "TestSecureContextInterfaceThatImplementsNonSecureContextInterface should have four members") - harness.ok(results[0].getExtendedAttribute("SecureContext"), - "Interface should have [SecureContext] extended attribute") - harness.ok(results[0].members[0].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to constant members even when other members are copied from a non-[SecureContext] interface") - harness.ok(results[0].members[1].getExtendedAttribute("SecureContext") is None, - "Constants copied from non-[SecureContext] mixin should not be [SecureContext]") - harness.ok(results[0].members[2].getExtendedAttribute("SecureContext") is None, - "Attributes copied from non-[SecureContext] mixin should not be [SecureContext]") - harness.ok(results[0].members[3].getExtendedAttribute("SecureContext") is None, - "Methods copied from non-[SecureContext] mixin should not be [SecureContext]") + harness.check( + len(results[0].members), + 4, + "TestSecureContextInterfaceThatImplementsNonSecureContextInterface should have four members", + ) + harness.ok( + results[0].getExtendedAttribute("SecureContext"), + "Interface should have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[0].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to constant members even when other members are copied from a non-[SecureContext] interface", + ) + harness.ok( + results[0].members[1].getExtendedAttribute("SecureContext") is None, + "Constants copied from non-[SecureContext] mixin should not be [SecureContext]", + ) + harness.ok( + results[0].members[2].getExtendedAttribute("SecureContext") is None, + "Attributes copied from non-[SecureContext] mixin should not be [SecureContext]", + ) + harness.ok( + results[0].members[3].getExtendedAttribute("SecureContext") is None, + "Methods copied from non-[SecureContext] mixin should not be [SecureContext]", + ) - # Test SecureContext and NoInterfaceObject + # Test SecureContext and LegacyNoInterfaceObject parser = parser.reset() - parser.parse(""" - [NoInterfaceObject, SecureContext] - interface TestSecureContextNoInterfaceObject { + parser.parse( + """ + [LegacyNoInterfaceObject, SecureContext] + interface TestSecureContextLegacyNoInterfaceObject { undefined testSecureMethod(byte foo); }; - """) + """ + ) results = parser.finish() - harness.check(len(results[0].members), 1, "TestSecureContextNoInterfaceObject should have only one member") - harness.ok(results[0].getExtendedAttribute("SecureContext"), - "Interface should have [SecureContext] extended attribute") - harness.ok(results[0].members[0].getExtendedAttribute("SecureContext"), - "Interface member should have [SecureContext] extended attribute") + harness.check( + len(results[0].members), + 1, + "TestSecureContextLegacyNoInterfaceObject should have only one member", + ) + harness.ok( + results[0].getExtendedAttribute("SecureContext"), + "Interface should have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[0].getExtendedAttribute("SecureContext"), + "Interface member should have [SecureContext] extended attribute", + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_special_method_signature_mismatch.py b/components/script/dom/bindings/codegen/parser/tests/test_special_method_signature_mismatch.py index b209c850d6b..a11860b3728 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_special_method_signature_mismatch.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_special_method_signature_mismatch.py @@ -1,11 +1,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch1 { getter long long foo(long index); }; - """) + """ + ) results = parser.finish() except: @@ -15,11 +17,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch2 { getter undefined foo(unsigned long index); }; - """) + """ + ) results = parser.finish() except: @@ -29,11 +33,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch3 { getter boolean foo(unsigned long index, boolean extraArg); }; - """) + """ + ) results = parser.finish() except: @@ -43,11 +49,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch4 { getter boolean foo(unsigned long... index); }; - """) + """ + ) results = parser.finish() except: @@ -57,11 +65,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch5 { getter boolean foo(optional unsigned long index); }; - """) + """ + ) results = parser.finish() except: @@ -71,11 +81,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch6 { getter boolean foo(); }; - """) + """ + ) results = parser.finish() except: @@ -85,11 +97,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch7 { deleter long long foo(long index); }; - """) + """ + ) results = parser.finish() except: @@ -99,11 +113,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch9 { deleter boolean foo(unsigned long index, boolean extraArg); }; - """) + """ + ) results = parser.finish() except: @@ -113,11 +129,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch10 { deleter boolean foo(unsigned long... index); }; - """) + """ + ) results = parser.finish() except: @@ -127,11 +145,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch11 { deleter boolean foo(optional unsigned long index); }; - """) + """ + ) results = parser.finish() except: @@ -141,11 +161,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch12 { deleter boolean foo(); }; - """) + """ + ) results = parser.finish() except: @@ -155,11 +177,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch13 { setter long long foo(long index, long long value); }; - """) + """ + ) results = parser.finish() except: @@ -169,11 +193,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch15 { setter boolean foo(unsigned long index, boolean value, long long extraArg); }; - """) + """ + ) results = parser.finish() except: @@ -183,11 +209,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch16 { setter boolean foo(unsigned long index, boolean... value); }; - """) + """ + ) results = parser.finish() except: @@ -197,11 +225,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch17 { setter boolean foo(unsigned long index, optional boolean value); }; - """) + """ + ) results = parser.finish() except: @@ -211,11 +241,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch18 { setter boolean foo(); }; - """) + """ + ) results = parser.finish() except: diff --git a/components/script/dom/bindings/codegen/parser/tests/test_special_methods.py b/components/script/dom/bindings/codegen/parser/tests/test_special_methods.py index c657c9c797d..9601a0a968f 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_special_methods.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_special_methods.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface SpecialMethods { getter long long (unsigned long index); setter long long (unsigned long index, long long value); @@ -14,47 +16,90 @@ def WebIDLTest(parser, harness): interface SpecialMethodsCombination { getter deleter boolean (DOMString name); }; - """) + """ + ) results = parser.finish() - def checkMethod(method, QName, name, - static=False, getter=False, setter=False, - deleter=False, legacycaller=False, stringifier=False): - harness.ok(isinstance(method, WebIDL.IDLMethod), - "Should be an IDLMethod") + def checkMethod( + method, + QName, + name, + static=False, + getter=False, + setter=False, + deleter=False, + legacycaller=False, + stringifier=False, + ): + harness.ok(isinstance(method, WebIDL.IDLMethod), "Should be an IDLMethod") harness.check(method.identifier.QName(), QName, "Method has the right QName") harness.check(method.identifier.name, name, "Method has the right name") harness.check(method.isStatic(), static, "Method has the correct static value") harness.check(method.isGetter(), getter, "Method has the correct getter value") harness.check(method.isSetter(), setter, "Method has the correct setter value") - harness.check(method.isDeleter(), deleter, "Method has the correct deleter value") - harness.check(method.isLegacycaller(), legacycaller, "Method has the correct legacycaller value") - harness.check(method.isStringifier(), stringifier, "Method has the correct stringifier value") + harness.check( + method.isDeleter(), deleter, "Method has the correct deleter value" + ) + harness.check( + method.isLegacycaller(), + legacycaller, + "Method has the correct legacycaller value", + ) + harness.check( + method.isStringifier(), + stringifier, + "Method has the correct stringifier value", + ) harness.check(len(results), 2, "Expect 2 interfaces") iface = results[0] harness.check(len(iface.members), 6, "Expect 6 members") - checkMethod(iface.members[0], "::SpecialMethods::__indexedgetter", "__indexedgetter", - getter=True) - checkMethod(iface.members[1], "::SpecialMethods::__indexedsetter", "__indexedsetter", - setter=True) - checkMethod(iface.members[2], "::SpecialMethods::__namedgetter", "__namedgetter", - getter=True) - checkMethod(iface.members[3], "::SpecialMethods::__namedsetter", "__namedsetter", - setter=True) - checkMethod(iface.members[4], "::SpecialMethods::__nameddeleter", "__nameddeleter", - deleter=True) + checkMethod( + iface.members[0], + "::SpecialMethods::__indexedgetter", + "__indexedgetter", + getter=True, + ) + checkMethod( + iface.members[1], + "::SpecialMethods::__indexedsetter", + "__indexedsetter", + setter=True, + ) + checkMethod( + iface.members[2], + "::SpecialMethods::__namedgetter", + "__namedgetter", + getter=True, + ) + checkMethod( + iface.members[3], + "::SpecialMethods::__namedsetter", + "__namedsetter", + setter=True, + ) + checkMethod( + iface.members[4], + "::SpecialMethods::__nameddeleter", + "__nameddeleter", + deleter=True, + ) iface = results[1] harness.check(len(iface.members), 1, "Expect 1 member") - checkMethod(iface.members[0], "::SpecialMethodsCombination::__namedgetterdeleter", - "__namedgetterdeleter", getter=True, deleter=True) + checkMethod( + iface.members[0], + "::SpecialMethodsCombination::__namedgetterdeleter", + "__namedgetterdeleter", + getter=True, + deleter=True, + ) - parser = parser.reset(); + parser = parser.reset() threw = False try: @@ -63,11 +108,10 @@ def WebIDLTest(parser, harness): interface IndexedDeleter { deleter undefined(unsigned long index); }; - """) + """ + ) parser.finish() except: threw = True harness.ok(threw, "There are no indexed deleters") - - diff --git a/components/script/dom/bindings/codegen/parser/tests/test_special_methods_uniqueness.py b/components/script/dom/bindings/codegen/parser/tests/test_special_methods_uniqueness.py index 9bf3d903463..014737e8168 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_special_methods_uniqueness.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_special_methods_uniqueness.py @@ -1,14 +1,17 @@ import WebIDL + def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodUniqueness1 { getter deleter boolean (DOMString name); getter boolean (DOMString name); }; - """) + """ + ) results = parser.finish() except: @@ -18,12 +21,14 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodUniqueness1 { deleter boolean (DOMString name); getter deleter boolean (DOMString name); }; - """) + """ + ) results = parser.finish() except: @@ -33,12 +38,14 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodUniqueness1 { setter boolean (DOMString name); setter boolean (DOMString name); }; - """) + """ + ) results = parser.finish() except: diff --git a/components/script/dom/bindings/codegen/parser/tests/test_stringifier.py b/components/script/dom/bindings/codegen/parser/tests/test_stringifier.py index deabdc5ec81..948be71e4dd 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_stringifier.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_stringifier.py @@ -1,27 +1,34 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestStringifier { stringifier; }; - """) + """ + ) results = parser.finish() - harness.ok(isinstance(results[0].members[0], WebIDL.IDLMethod), - "Stringifer should be method") + harness.ok( + isinstance(results[0].members[0], WebIDL.IDLMethod), + "Stringifer should be method", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestStringifier { stringifier; stringifier; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -32,12 +39,14 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface TestStringifier { stringifier; stringifier DOMString foo(); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -45,70 +54,107 @@ def WebIDLTest(parser, harness): harness.ok(threw, "Should not allow a 'stringifier;' and a 'stringifier()'") parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestStringifier { stringifier attribute DOMString foo; }; - """) + """ + ) results = parser.finish() - harness.ok(isinstance(results[0].members[0], WebIDL.IDLAttribute), - "Stringifier attribute should be an attribute") + harness.ok( + isinstance(results[0].members[0], WebIDL.IDLAttribute), + "Stringifier attribute should be an attribute", + ) stringifier = results[0].members[1] - harness.ok(isinstance(stringifier, WebIDL.IDLMethod), - "Stringifier attribute should insert a method") - harness.ok(stringifier.isStringifier(), - "Inserted method should be a stringifier") + harness.ok( + isinstance(stringifier, WebIDL.IDLMethod), + "Stringifier attribute should insert a method", + ) + harness.ok(stringifier.isStringifier(), "Inserted method should be a stringifier") parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestStringifier {}; interface mixin TestStringifierMixin { stringifier attribute DOMString foo; }; TestStringifier includes TestStringifierMixin; - """) + """ + ) results = parser.finish() - harness.ok(isinstance(results[0].members[0], WebIDL.IDLAttribute), - "Stringifier attribute should be an attribute") + harness.ok( + isinstance(results[0].members[0], WebIDL.IDLAttribute), + "Stringifier attribute should be an attribute", + ) stringifier = results[0].members[1] - harness.ok(isinstance(stringifier, WebIDL.IDLMethod), - "Stringifier attribute should insert a method") - harness.ok(stringifier.isStringifier(), - "Inserted method should be a stringifier") + harness.ok( + isinstance(stringifier, WebIDL.IDLMethod), + "Stringifier attribute should insert a method", + ) + harness.ok(stringifier.isStringifier(), "Inserted method should be a stringifier") parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestStringifier { stringifier attribute USVString foo; }; - """) + """ + ) results = parser.finish() stringifier = results[0].members[1] - harness.ok(stringifier.signatures()[0][0].isUSVString(), - "Stringifier attributes should allow USVString") + harness.ok( + stringifier.signatures()[0][0].isUSVString(), + "Stringifier attributes should allow USVString", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestStringifier { [Throws, NeedsSubjectPrincipal] stringifier attribute USVString foo; }; - """) + """ + ) results = parser.finish() stringifier = results[0].members[1] - harness.ok(stringifier.getExtendedAttribute("Throws"), - "Stringifier attributes should support [Throws]") - harness.ok(stringifier.getExtendedAttribute("NeedsSubjectPrincipal"), - "Stringifier attributes should support [NeedsSubjectPrincipal]") + harness.ok( + stringifier.getExtendedAttribute("Throws"), + "Stringifier attributes should support [Throws]", + ) + harness.ok( + stringifier.getExtendedAttribute("NeedsSubjectPrincipal"), + "Stringifier attributes should support [NeedsSubjectPrincipal]", + ) + + parser = parser.reset() + parser.parse( + """ + interface TestStringifier { + stringifier attribute UTF8String foo; + }; + """ + ) + results = parser.finish() + stringifier = results[0].members[1] + harness.ok( + stringifier.signatures()[0][0].isUTF8String(), + "Stringifier attributes should allow UTF8String", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestStringifier { stringifier attribute ByteString foo; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -118,12 +164,14 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestStringifier { stringifier; stringifier attribute DOMString foo; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -133,12 +181,14 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestStringifier { stringifier attribute DOMString foo; stringifier attribute DOMString bar; }; - """) + """ + ) results = parser.finish() except: threw = True diff --git a/components/script/dom/bindings/codegen/parser/tests/test_toJSON.py b/components/script/dom/bindings/codegen/parser/tests/test_toJSON.py index ad01330e65a..f312667ec4d 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_toJSON.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_toJSON.py @@ -6,7 +6,8 @@ def WebIDLTest(parser, harness): interface Test { object toJSON(); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -21,7 +22,8 @@ def WebIDLTest(parser, harness): object toJSON(object arg); object toJSON(long arg); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -35,7 +37,8 @@ def WebIDLTest(parser, harness): interface Test { object toJSON(object arg); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -49,7 +52,8 @@ def WebIDLTest(parser, harness): interface Test { long toJSON(); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -63,11 +67,14 @@ def WebIDLTest(parser, harness): interface Test { [Default] object toJSON(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(not threw, "Should allow a default toJSON method with 'object' as return type.") + harness.ok( + not threw, "Should allow a default toJSON method with 'object' as return type." + ) parser = parser.reset() threw = False @@ -77,119 +84,226 @@ def WebIDLTest(parser, harness): interface Test { [Default] long toJSON(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Should not allow a default toJSON method with non-'object' as return type.") + harness.ok( + threw, + "Should not allow a default toJSON method with non-'object' as return type.", + ) - JsonTypes = [ "byte", "octet", "short", "unsigned short", "long", "unsigned long", "long long", - "unsigned long long", "float", "unrestricted float", "double", "unrestricted double", "boolean", - "DOMString", "ByteString", "UTF8String", "USVString", "Enum", "InterfaceWithToJSON", "object" ] + JsonTypes = [ + "byte", + "octet", + "short", + "unsigned short", + "long", + "unsigned long", + "long long", + "unsigned long long", + "float", + "unrestricted float", + "double", + "unrestricted double", + "boolean", + "DOMString", + "ByteString", + "UTF8String", + "USVString", + "Enum", + "InterfaceWithToJSON", + "object", + ] - nonJsonTypes = [ "InterfaceWithoutToJSON", "any", "Int8Array", "Int16Array", "Int32Array","Uint8Array", - "Uint16Array", "Uint32Array", "Uint8ClampedArray", "Float32Array", "Float64Array", "ArrayBuffer" ] + nonJsonTypes = [ + "InterfaceWithoutToJSON", + "any", + "Int8Array", + "Int16Array", + "Int32Array", + "Uint8Array", + "Uint16Array", + "Uint32Array", + "Uint8ClampedArray", + "Float32Array", + "Float64Array", + "ArrayBuffer", + ] def doTest(testIDL, shouldThrow, description): p = parser.reset() threw = False try: - p.parse(testIDL + - """ + p.parse( + testIDL + + """ enum Enum { "a", "b", "c" }; interface InterfaceWithToJSON { long toJSON(); }; interface InterfaceWithoutToJSON {}; - """); - p.finish(); + """ + ) + p.finish() except Exception as x: threw = True harness.ok(x.message == "toJSON method has non-JSON return type", x) harness.check(threw, shouldThrow, description) - for type in JsonTypes: - doTest("interface Test { %s toJSON(); };" % type, False, - "%s should be a JSON type" % type) + doTest( + "interface Test { %s toJSON(); };" % type, + False, + "%s should be a JSON type" % type, + ) - doTest("interface Test { sequence<%s> toJSON(); };" % type, False, - "sequence<%s> should be a JSON type" % type) + doTest( + "interface Test { sequence<%s> toJSON(); };" % type, + False, + "sequence<%s> should be a JSON type" % type, + ) - doTest("dictionary Foo { %s foo; }; " - "interface Test { Foo toJSON(); }; " % type, False, - "dictionary containing only JSON type (%s) should be a JSON type" % type) + doTest( + "dictionary Foo { %s foo; }; " "interface Test { Foo toJSON(); }; " % type, + False, + "dictionary containing only JSON type (%s) should be a JSON type" % type, + ) - doTest("dictionary Foo { %s foo; }; dictionary Bar : Foo { }; " - "interface Test { Bar toJSON(); }; " % type, False, - "dictionary whose ancestors only contain JSON types should be a JSON type") + doTest( + "dictionary Foo { %s foo; }; dictionary Bar : Foo { }; " + "interface Test { Bar toJSON(); }; " % type, + False, + "dictionary whose ancestors only contain JSON types should be a JSON type", + ) - doTest("dictionary Foo { any foo; }; dictionary Bar : Foo { %s bar; };" - "interface Test { Bar toJSON(); };" % type, True, - "dictionary whose ancestors contain non-JSON types should not be a JSON type") + doTest( + "dictionary Foo { any foo; }; dictionary Bar : Foo { %s bar; };" + "interface Test { Bar toJSON(); };" % type, + True, + "dictionary whose ancestors contain non-JSON types should not be a JSON type", + ) - doTest("interface Test { record toJSON(); };" % type, False, - "record should be a JSON type" % type) + doTest( + "interface Test { record toJSON(); };" % type, + False, + "record should be a JSON type" % type, + ) - doTest("interface Test { record toJSON(); };" % type, False, - "record should be a JSON type" % type) + doTest( + "interface Test { record toJSON(); };" % type, + False, + "record should be a JSON type" % type, + ) - doTest("interface Test { record toJSON(); };" % type, False, - "record should be a JSON type" % type) + doTest( + "interface Test { record toJSON(); };" % type, + False, + "record should be a JSON type" % type, + ) - doTest("interface Test { record toJSON(); };" % type, False, - "record should be a JSON type" % type) + doTest( + "interface Test { record toJSON(); };" % type, + False, + "record should be a JSON type" % type, + ) otherUnionType = "Foo" if type != "object" else "long" - doTest("interface Foo { object toJSON(); };" - "interface Test { (%s or %s) toJSON(); };" % (otherUnionType, type), False, - "union containing only JSON types (%s or %s) should be a JSON type" %(otherUnionType, type)) + doTest( + "interface Foo { object toJSON(); };" + "interface Test { (%s or %s) toJSON(); };" % (otherUnionType, type), + False, + "union containing only JSON types (%s or %s) should be a JSON type" + % (otherUnionType, type), + ) - doTest("interface test { %s? toJSON(); };" % type, False, - "Nullable type (%s) should be a JSON type" % type) + doTest( + "interface test { %s? toJSON(); };" % type, + False, + "Nullable type (%s) should be a JSON type" % type, + ) - doTest("interface Foo : InterfaceWithoutToJSON { %s toJSON(); };" - "interface Test { Foo toJSON(); };" % type, False, - "interface with toJSON should be a JSON type") + doTest( + "interface Foo : InterfaceWithoutToJSON { %s toJSON(); };" + "interface Test { Foo toJSON(); };" % type, + False, + "interface with toJSON should be a JSON type", + ) - doTest("interface Foo : InterfaceWithToJSON { };" - "interface Test { Foo toJSON(); };", False, - "inherited interface with toJSON should be a JSON type") + doTest( + "interface Foo : InterfaceWithToJSON { };" "interface Test { Foo toJSON(); };", + False, + "inherited interface with toJSON should be a JSON type", + ) for type in nonJsonTypes: - doTest("interface Test { %s toJSON(); };" % type, True, - "%s should not be a JSON type" % type) + doTest( + "interface Test { %s toJSON(); };" % type, + True, + "%s should not be a JSON type" % type, + ) - doTest("interface Test { sequence<%s> toJSON(); };" % type, True, - "sequence<%s> should not be a JSON type" % type) + doTest( + "interface Test { sequence<%s> toJSON(); };" % type, + True, + "sequence<%s> should not be a JSON type" % type, + ) - doTest("dictionary Foo { %s foo; }; " - "interface Test { Foo toJSON(); }; " % type, True, - "Dictionary containing a non-JSON type (%s) should not be a JSON type" % type) + doTest( + "dictionary Foo { %s foo; }; " "interface Test { Foo toJSON(); }; " % type, + True, + "Dictionary containing a non-JSON type (%s) should not be a JSON type" + % type, + ) - doTest("dictionary Foo { %s foo; }; dictionary Bar : Foo { }; " - "interface Test { Bar toJSON(); }; " % type, True, - "dictionary whose ancestors only contain non-JSON types should not be a JSON type") + doTest( + "dictionary Foo { %s foo; }; dictionary Bar : Foo { }; " + "interface Test { Bar toJSON(); }; " % type, + True, + "dictionary whose ancestors only contain non-JSON types should not be a JSON type", + ) - doTest("interface Test { record toJSON(); };" % type, True, - "record should not be a JSON type" % type) + doTest( + "interface Test { record toJSON(); };" % type, + True, + "record should not be a JSON type" % type, + ) - doTest("interface Test { record toJSON(); };" % type, True, - "record should not be a JSON type" % type) + doTest( + "interface Test { record toJSON(); };" % type, + True, + "record should not be a JSON type" % type, + ) - doTest("interface Test { record toJSON(); };" % type, True, - "record should not be a JSON type" % type) + doTest( + "interface Test { record toJSON(); };" % type, + True, + "record should not be a JSON type" % type, + ) if type != "any": - doTest("interface Foo { object toJSON(); }; " - "interface Test { (Foo or %s) toJSON(); };" % type, True, - "union containing a non-JSON type (%s) should not be a JSON type" % type) + doTest( + "interface Foo { object toJSON(); }; " + "interface Test { (Foo or %s) toJSON(); };" % type, + True, + "union containing a non-JSON type (%s) should not be a JSON type" + % type, + ) - doTest("interface test { %s? toJSON(); };" % type, True, - "Nullable type (%s) should not be a JSON type" % type) + doTest( + "interface test { %s? toJSON(); };" % type, + True, + "Nullable type (%s) should not be a JSON type" % type, + ) - doTest("dictionary Foo { long foo; any bar; };" - "interface Test { Foo toJSON(); };", True, - "dictionary containing a non-JSON type should not be a JSON type") + doTest( + "dictionary Foo { long foo; any bar; };" "interface Test { Foo toJSON(); };", + True, + "dictionary containing a non-JSON type should not be a JSON type", + ) - doTest("interface Foo : InterfaceWithoutToJSON { }; " - "interface Test { Foo toJSON(); };", True, - "interface without toJSON should not be a JSON type") + doTest( + "interface Foo : InterfaceWithoutToJSON { }; " + "interface Test { Foo toJSON(); };", + True, + "interface without toJSON should not be a JSON type", + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_treatNonCallableAsNull.py b/components/script/dom/bindings/codegen/parser/tests/test_treatNonCallableAsNull.py index 7a0bde8a6dc..7becfdca1f3 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_treatNonCallableAsNull.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_treatNonCallableAsNull.py @@ -1,14 +1,17 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ [TreatNonCallableAsNull] callback Function = any(any... arguments); interface TestTreatNonCallableAsNull1 { attribute Function? onfoo; attribute Function onbar; }; - """) + """ + ) results = parser.finish() @@ -22,13 +25,15 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ callback Function = any(any... arguments); interface TestTreatNonCallableAsNull2 { [TreatNonCallableAsNull] attribute Function onfoo; }; - """) + """ + ) results = parser.finish() except: @@ -40,14 +45,16 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ callback Function = any(any... arguments); [TreatNonCallableAsNull] interface TestTreatNonCallableAsNull3 { attribute Function onfoo; }; - """) + """ + ) results = parser.finish() except: @@ -59,10 +66,12 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" - [TreatNonCallableAsNull, TreatNonObjectAsNull] + parser.parse( + """ + [TreatNonCallableAsNull, LegacyTreatNonObjectAsNull] callback Function = any(any... arguments); - """) + """ + ) results = parser.finish() except: diff --git a/components/script/dom/bindings/codegen/parser/tests/test_typedef.py b/components/script/dom/bindings/codegen/parser/tests/test_typedef.py index d98088380ba..c19d064efff 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_typedef.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_typedef.py @@ -1,5 +1,6 @@ def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ typedef long mylong; typedef long? mynullablelong; interface Foo { @@ -8,22 +9,28 @@ def WebIDLTest(parser, harness): undefined bar(optional mynullablelong arg = null); undefined baz(mylong arg); }; - """) + """ + ) results = parser.finish() - harness.check(results[2].members[1].signatures()[0][1][0].type.name, "LongOrNull", - "Should expand typedefs") + harness.check( + results[2].members[1].signatures()[0][1][0].type.name, + "LongOrNull", + "Should expand typedefs", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef long? mynullablelong; interface Foo { undefined foo(mynullablelong? Y); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -33,12 +40,14 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef long? mynullablelong; interface Foo { const mynullablelong? X = 5; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -48,29 +57,38 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Foo { const mynullablelong? X = 5; }; typedef long? mynullablelong; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown on nullable inside nullable const typedef " - "after interface.") + harness.ok( + threw, + "Should have thrown on nullable inside nullable const typedef " + "after interface.", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface Foo { const mylong X = 5; }; typedef long mylong; - """) + """ + ) results = parser.finish() - harness.check(results[0].members[0].type.name, "Long", - "Should expand typedefs that come before interface") + harness.check( + results[0].members[0].type.name, + "Long", + "Should expand typedefs that come before interface", + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_typedef_identifier_conflict.py b/components/script/dom/bindings/codegen/parser/tests/test_typedef_identifier_conflict.py index 0ea38ce437b..2aab3a8a91f 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_typedef_identifier_conflict.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_typedef_identifier_conflict.py @@ -5,12 +5,15 @@ def WebIDLTest(parser, harness): """ typedef long foo; typedef long foo; - """) + """ + ) results = parser.finish() except Exception as e: exception = e harness.ok(exception, "Should have thrown.") - harness.ok("Multiple unresolvable definitions of identifier 'foo'" in str(exception), - "Should have a sane exception message") + harness.ok( + "Multiple unresolvable definitions of identifier 'foo'" in str(exception), + "Should have a sane exception message", + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_undefined.py b/components/script/dom/bindings/codegen/parser/tests/test_undefined.py new file mode 100644 index 00000000000..4731ee1bcd7 --- /dev/null +++ b/components/script/dom/bindings/codegen/parser/tests/test_undefined.py @@ -0,0 +1,246 @@ +import WebIDL + + +def WebIDLTest(parser, harness): + try: + parser.parse( + """ + dictionary Dict { + undefined undefinedMember; + double bar; + }; + """ + ) + results = parser.finish() + except: + threw = True + + harness.ok(threw, "undefined must not be used as the type of a dictionary member") + + parser = parser.reset() + threw = False + + try: + parser.parse( + """ + dictionary Dict { + (undefined or double) undefinedMemberOfUnionInDict; + }; + """ + ) + results = parser.finish() + except: + threw = True + + harness.ok( + threw, + "undefined must not be used as the type of a dictionary member, " + "whether directly or in a union", + ) + + parser = parser.reset() + threw = False + + try: + parser.parse( + """ + interface Foo { + double bar(undefined foo); + }; + """ + ) + results = parser.finish() + except: + threw = True + + harness.ok( + threw, + "undefined must not be used as the type of an argument in any " + "circumstance (so not as the argument of a regular operation)", + ) + + parser = parser.reset() + threw = False + + try: + parser.parse( + """ + interface Foo { + getter double(undefined name); + }; + """ + ) + results = parser.finish() + except: + threw = True + + harness.ok( + threw, + "undefined must not be used as the type of an argument in any " + "circumstance (so not as the argument of a getter)", + ) + + parser = parser.reset() + threw = False + + try: + parser.parse( + """ + interface Foo { + setter undefined(DOMString name, undefined value); + }; + """ + ) + results = parser.finish() + except: + threw = True + + harness.ok( + threw, + "undefined must not be used as the type of an argument in any " + "circumstance (so not as the argument of a setter)", + ) + + parser = parser.reset() + threw = False + + try: + parser.parse( + """ + interface Foo { + deleter undefined (undefined name); + }; + """ + ) + results = parser.finish() + except: + threw = True + + harness.ok( + threw, + "undefined must not be used as the type of an argument in any " + "circumstance (so not as the argument of a deleter)", + ) + + parser = parser.reset() + threw = False + + try: + parser.parse( + """ + interface Foo { + constructor (undefined foo); + }; + """ + ) + results = parser.finish() + except: + threw = True + + harness.ok( + threw, + "undefined must not be used as the type of an argument in any " + "circumstance (so not as the argument of a constructor)", + ) + + parser = parser.reset() + threw = False + + try: + parser.parse( + """ + callback Callback = undefined (undefined foo); + """ + ) + results = parser.finish() + except: + threw = True + + harness.ok( + threw, + "undefined must not be used as the type of an argument in any " + "circumstance (so not as the argument of a callback)", + ) + + parser = parser.reset() + threw = False + + try: + parser.parse( + """ + interface Foo { + async iterable(undefined name); + }; + """ + ) + results = parser.finish() + except: + threw = True + + harness.ok( + threw, + "undefined must not be used as the type of an argument in any " + "circumstance (so not as the argument of an async iterable " + "iterator)", + ) + + parser = parser.reset() + threw = False + + try: + parser.parse( + """ + interface Foo { + static double bar(undefined foo); + }; + """ + ) + results = parser.finish() + except: + threw = True + + harness.ok( + threw, + "undefined must not be used as the type of an argument in any " + "circumstance (so not as the argument of a static operation)", + ) + + parser = parser.reset() + threw = False + + try: + parser.parse( + """ + interface Foo { + const undefined FOO = undefined; + }; + """ + ) + results = parser.finish() + except: + threw = True + + harness.ok( + threw, + "undefined is not a valid type for a constant", + ) + + parser = parser.reset() + threw = False + + try: + parser.parse( + """ + interface Foo { + const any FOO = undefined; + }; + """ + ) + results = parser.finish() + except: + threw = True + + harness.ok( + threw, + "undefined is not a valid value for a constant", + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_unenumerable_own_properties.py b/components/script/dom/bindings/codegen/parser/tests/test_unenumerable_own_properties.py index d28cc1ec052..b024d317492 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_unenumerable_own_properties.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_unenumerable_own_properties.py @@ -10,18 +10,21 @@ def WebIDLTest(parser, harness): interface Baz : Bar { getter long(DOMString name); }; - """); - results = parser.finish(); + """ + ) + results = parser.finish() harness.check(len(results), 3, "Should have three interfaces") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [LegacyUnenumerableNamedProperties] interface NoNamedGetter { }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -31,12 +34,14 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [LegacyUnenumerableNamedProperties=Foo] interface ShouldNotHaveArg { getter long(DOMString name); }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -46,7 +51,8 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [LegacyUnenumerableNamedProperties] interface Foo { getter long(DOMString name); @@ -56,7 +62,8 @@ def WebIDLTest(parser, harness): interface Baz : Bar { getter long(DOMString name); }; - """) + """ + ) results = parser.finish() except Exception as x: diff --git a/components/script/dom/bindings/codegen/parser/tests/test_unforgeable.py b/components/script/dom/bindings/codegen/parser/tests/test_unforgeable.py index e72548f637f..500d123ddb2 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_unforgeable.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_unforgeable.py @@ -1,253 +1,311 @@ def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface Child : Parent { }; interface Parent { - [Unforgeable] readonly attribute long foo; + [LegacyUnforgeable] readonly attribute long foo; }; - """) + """ + ) results = parser.finish() - harness.check(len(results), 2, - "Should be able to inherit from an interface with " - "[Unforgeable] properties.") + harness.check( + len(results), + 2, + "Should be able to inherit from an interface with " + "[LegacyUnforgeable] properties.", + ) - parser = parser.reset(); - parser.parse(""" + parser = parser.reset() + parser.parse( + """ interface Child : Parent { const short foo = 10; }; interface Parent { - [Unforgeable] readonly attribute long foo; + [LegacyUnforgeable] readonly attribute long foo; }; - """) + """ + ) results = parser.finish() - harness.check(len(results), 2, - "Should be able to inherit from an interface with " - "[Unforgeable] properties even if we have a constant with " - "the same name.") + harness.check( + len(results), + 2, + "Should be able to inherit from an interface with " + "[LegacyUnforgeable] properties even if we have a constant with " + "the same name.", + ) - parser = parser.reset(); - parser.parse(""" + parser = parser.reset() + parser.parse( + """ interface Child : Parent { static attribute short foo; }; interface Parent { - [Unforgeable] readonly attribute long foo; + [LegacyUnforgeable] readonly attribute long foo; }; - """) + """ + ) results = parser.finish() - harness.check(len(results), 2, - "Should be able to inherit from an interface with " - "[Unforgeable] properties even if we have a static attribute " - "with the same name.") + harness.check( + len(results), + 2, + "Should be able to inherit from an interface with " + "[LegacyUnforgeable] properties even if we have a static attribute " + "with the same name.", + ) - parser = parser.reset(); - parser.parse(""" + parser = parser.reset() + parser.parse( + """ interface Child : Parent { static undefined foo(); }; interface Parent { - [Unforgeable] readonly attribute long foo; + [LegacyUnforgeable] readonly attribute long foo; }; - """) + """ + ) results = parser.finish() - harness.check(len(results), 2, - "Should be able to inherit from an interface with " - "[Unforgeable] properties even if we have a static operation " - "with the same name.") + harness.check( + len(results), + 2, + "Should be able to inherit from an interface with " + "[LegacyUnforgeable] properties even if we have a static operation " + "with the same name.", + ) - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Child : Parent { undefined foo(); }; interface Parent { - [Unforgeable] readonly attribute long foo; + [LegacyUnforgeable] readonly attribute long foo; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown when shadowing unforgeable attribute on " - "parent with operation.") + harness.ok( + threw, + "Should have thrown when shadowing unforgeable attribute on " + "parent with operation.", + ) - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Child : Parent { undefined foo(); }; interface Parent { - [Unforgeable] undefined foo(); + [LegacyUnforgeable] undefined foo(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown when shadowing unforgeable operation on " - "parent with operation.") + harness.ok( + threw, + "Should have thrown when shadowing unforgeable operation on " + "parent with operation.", + ) - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Child : Parent { attribute short foo; }; interface Parent { - [Unforgeable] readonly attribute long foo; + [LegacyUnforgeable] readonly attribute long foo; }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True - harness.ok(threw, - "Should have thrown when shadowing unforgeable attribute on " - "parent with attribute.") + harness.ok( + threw, + "Should have thrown when shadowing unforgeable attribute on " + "parent with attribute.", + ) - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Child : Parent { attribute short foo; }; interface Parent { - [Unforgeable] undefined foo(); + [LegacyUnforgeable] undefined foo(); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True - harness.ok(threw, - "Should have thrown when shadowing unforgeable operation on " - "parent with attribute.") + harness.ok( + threw, + "Should have thrown when shadowing unforgeable operation on " + "parent with attribute.", + ) - parser = parser.reset(); - parser.parse(""" + parser = parser.reset() + parser.parse( + """ interface Child : Parent { }; interface Parent {}; interface mixin Mixin { - [Unforgeable] readonly attribute long foo; + [LegacyUnforgeable] readonly attribute long foo; }; Parent includes Mixin; - """) + """ + ) results = parser.finish() - harness.check(len(results), 4, - "Should be able to inherit from an interface with a " - "mixin with [Unforgeable] properties.") + harness.check( + len(results), + 4, + "Should be able to inherit from an interface with a " + "mixin with [LegacyUnforgeable] properties.", + ) - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Child : Parent { undefined foo(); }; interface Parent {}; interface mixin Mixin { - [Unforgeable] readonly attribute long foo; + [LegacyUnforgeable] readonly attribute long foo; }; Parent includes Mixin; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown when shadowing unforgeable attribute " - "of parent's consequential interface.") + harness.ok( + threw, + "Should have thrown when shadowing unforgeable attribute " + "of parent's consequential interface.", + ) - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Child : Parent { }; interface Parent : GrandParent {}; interface GrandParent {}; interface mixin Mixin { - [Unforgeable] readonly attribute long foo; + [LegacyUnforgeable] readonly attribute long foo; }; GrandParent includes Mixin; interface mixin ChildMixin { undefined foo(); }; Child includes ChildMixin; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown when our consequential interface shadows unforgeable attribute " - "of ancestor's consequential interface.") + harness.ok( + threw, + "Should have thrown when our consequential interface shadows unforgeable attribute " + "of ancestor's consequential interface.", + ) - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Child : Parent { }; interface Parent : GrandParent {}; interface GrandParent {}; interface mixin Mixin { - [Unforgeable] undefined foo(); + [LegacyUnforgeable] undefined foo(); }; GrandParent includes Mixin; interface mixin ChildMixin { undefined foo(); }; Child includes ChildMixin; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown when our consequential interface shadows unforgeable operation " - "of ancestor's consequential interface.") + harness.ok( + threw, + "Should have thrown when our consequential interface shadows unforgeable operation " + "of ancestor's consequential interface.", + ) - parser = parser.reset(); - parser.parse(""" + parser = parser.reset() + parser.parse( + """ interface iface { - [Unforgeable] attribute long foo; + [LegacyUnforgeable] attribute long foo; }; - """) + """ + ) results = parser.finish() - harness.check(len(results), 1, - "Should allow writable [Unforgeable] attribute.") + harness.check( + len(results), 1, "Should allow writable [LegacyUnforgeable] attribute." + ) - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface iface { - [Unforgeable] static readonly attribute long foo; + [LegacyUnforgeable] static readonly attribute long foo; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Should have thrown for static [Unforgeable] attribute.") + harness.ok(threw, "Should have thrown for static [LegacyUnforgeable] attribute.") diff --git a/components/script/dom/bindings/codegen/parser/tests/test_union.py b/components/script/dom/bindings/codegen/parser/tests/test_union.py index 469208b264d..7fc1236d54e 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_union.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_union.py @@ -3,12 +3,15 @@ import itertools import string # We'd like to use itertools.chain but it's 2.6 or higher. + + def chain(*iterables): # chain('ABC', 'DEF') --> A B C D E F for it in iterables: for element in it: yield element + # We'd like to use itertools.combinations but it's 2.6 or higher. def combinations(iterable, r): # combinations('ABCD', 2) --> AB AC AD BC BD CD @@ -26,10 +29,11 @@ def combinations(iterable, r): else: return indices[i] += 1 - for j in range(i+1, r): - indices[j] = indices[j-1] + 1 + for j in range(i + 1, r): + indices[j] = indices[j - 1] + 1 yield tuple(pool[i] for i in indices) + # We'd like to use itertools.combinations_with_replacement but it's 2.7 or # higher. def combinations_with_replacement(iterable, r): @@ -49,27 +53,30 @@ def combinations_with_replacement(iterable, r): indices[i:] = [indices[i] + 1] * (r - i) yield tuple(pool[i] for i in indices) + def WebIDLTest(parser, harness): - types = ["float", - "double", - "short", - "unsigned short", - "long", - "unsigned long", - "long long", - "unsigned long long", - "boolean", - "byte", - "octet", - "DOMString", - "ByteString", - "USVString", - #"sequence", - "object", - "ArrayBuffer", - #"Date", - "TestInterface1", - "TestInterface2"] + types = [ + "float", + "double", + "short", + "unsigned short", + "long", + "unsigned long", + "long long", + "unsigned long long", + "boolean", + "byte", + "octet", + "DOMString", + "ByteString", + "USVString", + # "sequence", + "object", + "ArrayBuffer", + # "Date", + "TestInterface1", + "TestInterface2", + ] testPre = """ interface TestInterface1 { @@ -78,13 +85,18 @@ def WebIDLTest(parser, harness): }; """ - interface = testPre + """ + interface = ( + testPre + + """ interface PrepareForTest { """ + ) for (i, type) in enumerate(types): - interface += string.Template(""" + interface += string.Template( + """ readonly attribute ${type} attr${i}; - """).substitute(i=i, type=type) + """ + ).substitute(i=i, type=type) interface += """ }; """ @@ -98,8 +110,10 @@ def WebIDLTest(parser, harness): def typesAreDistinguishable(t): return all(u[0].isDistinguishableFrom(u[1]) for u in combinations(t, 2)) + def typesAreNotDistinguishable(t): return any(not u[0].isDistinguishableFrom(u[1]) for u in combinations(t, 2)) + def unionTypeName(t): if len(t) > 2: t[0:2] = [unionTypeName(t[0:2])] @@ -118,29 +132,40 @@ def WebIDLTest(parser, harness): # as a string and the parsed IDL type. def invalidUnionWithUnion(typeCombinations): for c in typeCombinations: - if (typesAreNotDistinguishable((c[0][1], c[1][1])) and - typesAreDistinguishable((c[1][1], c[2][1])) and - typesAreDistinguishable((c[0][1], c[2][1]))): + if ( + typesAreNotDistinguishable((c[0][1], c[1][1])) + and typesAreDistinguishable((c[1][1], c[2][1])) + and typesAreDistinguishable((c[0][1], c[2][1])) + ): yield unionTypeName([t[0] for t in c]) # Create a list of tuples containing the name of the type as a string and # the parsed IDL type. types = zip(types, (a.type for a in iface.members)) - validUnionTypes = chain(unionTypes(combinations(types, 2), typesAreDistinguishable), - unionTypes(combinations(types, 3), typesAreDistinguishable)) - invalidUnionTypes = chain(unionTypes(combinations_with_replacement(types, 2), typesAreNotDistinguishable), - invalidUnionWithUnion(combinations(types, 3))) - interface = testPre + """ + validUnionTypes = chain( + unionTypes(combinations(types, 2), typesAreDistinguishable), + unionTypes(combinations(types, 3), typesAreDistinguishable), + ) + invalidUnionTypes = chain( + unionTypes(combinations_with_replacement(types, 2), typesAreNotDistinguishable), + invalidUnionWithUnion(combinations(types, 3)), + ) + interface = ( + testPre + + """ interface TestUnion { """ + ) for (i, type) in enumerate(validUnionTypes): - interface += string.Template(""" + interface += string.Template( + """ undefined method${i}(${type} arg); ${type} returnMethod${i}(); attribute ${type} attr${i}; undefined optionalMethod${i}(${type}? arg); - """).substitute(i=i, type=type) + """ + ).substitute(i=i, type=type) interface += """ }; """ @@ -150,11 +175,16 @@ def WebIDLTest(parser, harness): parser = parser.reset() for invalid in invalidUnionTypes: - interface = testPre + string.Template(""" + interface = ( + testPre + + string.Template( + """ interface TestUnion { undefined method(${type} arg); }; - """).substitute(type=invalid) + """ + ).substitute(type=invalid) + ) threw = False try: diff --git a/components/script/dom/bindings/codegen/parser/tests/test_union_any.py b/components/script/dom/bindings/codegen/parser/tests/test_union_any.py index 3eb67648d56..caba44b55f9 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_union_any.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_union_any.py @@ -1,11 +1,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface AnyNotInUnion { undefined foo((any or DOMString) arg); }; - """) + """ + ) results = parser.finish() except: diff --git a/components/script/dom/bindings/codegen/parser/tests/test_union_nullable.py b/components/script/dom/bindings/codegen/parser/tests/test_union_nullable.py index 71da4349e6e..d15ed4cfb54 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_union_nullable.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_union_nullable.py @@ -1,53 +1,60 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface OneNullableInUnion { undefined foo((object? or DOMString?) arg); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Two nullable member types of a union should have thrown.") + harness.ok(threw, "Two nullable member types of a union should have thrown.") parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface NullableInNullableUnion { undefined foo((object? or DOMString)? arg); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "A nullable union type with a nullable member type should have " - "thrown.") + harness.ok( + threw, + "A nullable union type with a nullable member type should have " "thrown.", + ) parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface NullableInUnionNullableUnionHelper { }; interface NullableInUnionNullableUnion { undefined foo(((object? or DOMString) or NullableInUnionNullableUnionHelper)? arg); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "A nullable union type with a nullable member type should have " - "thrown.") + harness.ok( + threw, + "A nullable union type with a nullable member type should have " "thrown.", + ) diff --git a/components/script/dom/bindings/codegen/parser/tests/test_usvstring.py b/components/script/dom/bindings/codegen/parser/tests/test_usvstring.py index 3a1369abd02..effede391cb 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_usvstring.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_usvstring.py @@ -2,23 +2,27 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestUSVString { attribute USVString svs; }; - """) + """ + ) - results = parser.finish(); + results = parser.finish() harness.check(len(results), 1, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface") iface = results[0] - harness.check(iface.identifier.QName(), "::TestUSVString", - "Interface has the right QName") - harness.check(iface.identifier.name, "TestUSVString", - "Interface has the right name") + harness.check( + iface.identifier.QName(), "::TestUSVString", "Interface has the right QName" + ) + harness.check( + iface.identifier.name, "TestUSVString", "Interface has the right name" + ) harness.check(iface.parent, None, "Interface has no parent") members = iface.members @@ -26,11 +30,11 @@ def WebIDLTest(parser, harness): attr = members[0] harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute") - harness.check(attr.identifier.QName(), "::TestUSVString::svs", - "Attr has correct QName") + harness.check( + attr.identifier.QName(), "::TestUSVString::svs", "Attr has correct QName" + ) harness.check(attr.identifier.name, "svs", "Attr has correct name") - harness.check(str(attr.type), "USVString", - "Attr type is the correct name") + harness.check(str(attr.type), "USVString", "Attr type is the correct name") harness.ok(attr.type.isUSVString(), "Should be USVString type") harness.ok(attr.type.isString(), "Should be String collective type") harness.ok(not attr.type.isDOMString(), "Should be not be DOMString type") diff --git a/components/script/dom/bindings/codegen/parser/tests/test_variadic_callback.py b/components/script/dom/bindings/codegen/parser/tests/test_variadic_callback.py index d9a78db2043..3fd3dccd37a 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_variadic_callback.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_variadic_callback.py @@ -1,9 +1,12 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ callback TestVariadicCallback = any(any... arguments); - """) + """ + ) results = parser.finish() diff --git a/components/script/dom/bindings/codegen/parser/tests/test_variadic_constraints.py b/components/script/dom/bindings/codegen/parser/tests/test_variadic_constraints.py index e36eff8b476..06ce09d8236 100644 --- a/components/script/dom/bindings/codegen/parser/tests/test_variadic_constraints.py +++ b/components/script/dom/bindings/codegen/parser/tests/test_variadic_constraints.py @@ -1,61 +1,72 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface VariadicConstraints1 { undefined foo(byte... arg1, byte arg2); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown on variadic argument followed by required " - "argument.") + harness.ok( + threw, + "Should have thrown on variadic argument followed by required " "argument.", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface VariadicConstraints2 { undefined foo(byte... arg1, optional byte arg2); }; - """) - results = parser.finish(); + """ + ) + results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown on variadic argument followed by optional " - "argument.") + harness.ok( + threw, + "Should have thrown on variadic argument followed by optional " "argument.", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface VariadicConstraints3 { undefined foo(optional byte... arg1); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown on variadic argument explicitly flagged as " - "optional.") + harness.ok( + threw, + "Should have thrown on variadic argument explicitly flagged as " "optional.", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface VariadicConstraints4 { undefined foo(byte... arg1 = 0); }; - """) + """ + ) results = parser.finish() except: threw = True diff --git a/components/script/dom/bindings/codegen/parser/update.sh b/components/script/dom/bindings/codegen/parser/update.sh index dd7803c940c..cec4d6a378e 100755 --- a/components/script/dom/bindings/codegen/parser/update.sh +++ b/components/script/dom/bindings/codegen/parser/update.sh @@ -4,6 +4,7 @@ patch < debug.patch patch < callback-location.patch patch < union-typedef.patch patch < inline.patch +patch < readable-stream.patch wget https://hg.mozilla.org/mozilla-central/archive/tip.zip/dom/bindings/parser/tests/ -O tests.zip rm -r tests diff --git a/components/script/dom/bindings/codegen/ply/ANNOUNCE b/components/script/dom/bindings/codegen/ply/ANNOUNCE new file mode 100644 index 00000000000..c430051cf45 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/ANNOUNCE @@ -0,0 +1,40 @@ +January 31, 2017 + + Announcing : PLY-3.10 (Python Lex-Yacc) + + http://www.dabeaz.com/ply + +I'm pleased to announce PLY-3.10--a pure Python implementation of the +common parsing tools lex and yacc. PLY-3.10 is a minor bug fix +release. It supports both Python 2 and Python 3. + +If you are new to PLY, here are a few highlights: + +- PLY is closely modeled after traditional lex/yacc. If you know how + to use these or similar tools in other languages, you will find + PLY to be comparable. + +- PLY provides very extensive error reporting and diagnostic + information to assist in parser construction. The original + implementation was developed for instructional purposes. As + a result, the system tries to identify the most common types + of errors made by novice users. + +- PLY provides full support for empty productions, error recovery, + precedence rules, and ambiguous grammars. + +- Parsing is based on LR-parsing which is fast, memory efficient, + better suited to large grammars, and which has a number of nice + properties when dealing with syntax errors and other parsing + problems. Currently, PLY can build its parsing tables using + either SLR or LALR(1) algorithms. + +More information about PLY can be obtained on the PLY webpage at: + + http://www.dabeaz.com/ply + +PLY is freely available. + +Cheers, + +David Beazley (http://www.dabeaz.com) \ No newline at end of file diff --git a/components/script/dom/bindings/codegen/ply/CHANGES b/components/script/dom/bindings/codegen/ply/CHANGES new file mode 100644 index 00000000000..815c23184e4 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/CHANGES @@ -0,0 +1,1394 @@ +Version 3.10 +--------------------- +01/31/17: beazley + Changed grammar signature computation to not involve hashing + functions. Parts are just combined into a big string. + +10/07/16: beazley + Fixed Issue #101: Incorrect shift-reduce conflict resolution with + precedence specifier. + + PLY was incorrectly resolving shift-reduce conflicts in certain + cases. For example, in the example/calc/calc.py example, you + could trigger it doing this: + + calc > -3 - 4 + 1 (correct answer should be -7) + calc > + + Issue and suggested patch contributed by https://github.com/RomaVis + +Version 3.9 +--------------------- +08/30/16: beazley + Exposed the parser state number as the parser.state attribute + in productions and error functions. For example: + + def p_somerule(p): + ''' + rule : A B C + ''' + print('State:', p.parser.state) + + May address issue #65 (publish current state in error callback). + +08/30/16: beazley + Fixed Issue #88. Python3 compatibility with ply/cpp. + +08/30/16: beazley + Fixed Issue #93. Ply can crash if SyntaxError is raised inside + a production. Not actually sure if the original implementation + worked as documented at all. Yacc has been modified to follow + the spec as outlined in the CHANGES noted for 11/27/07 below. + +08/30/16: beazley + Fixed Issue #97. Failure with code validation when the original + source files aren't present. Validation step now ignores + the missing file. + +08/30/16: beazley + Minor fixes to version numbers. + +Version 3.8 +--------------------- +10/02/15: beazley + Fixed issues related to Python 3.5. Patch contributed by Barry Warsaw. + +Version 3.7 +--------------------- +08/25/15: beazley + Fixed problems when reading table files from pickled data. + +05/07/15: beazley + Fixed regression in handling of table modules if specified as module + objects. See https://github.com/dabeaz/ply/issues/63 + +Version 3.6 +--------------------- +04/25/15: beazley + If PLY is unable to create the 'parser.out' or 'parsetab.py' files due + to permission issues, it now just issues a warning message and + continues to operate. This could happen if a module using PLY + is installed in a funny way where tables have to be regenerated, but + for whatever reason, the user doesn't have write permission on + the directory where PLY wants to put them. + +04/24/15: beazley + Fixed some issues related to use of packages and table file + modules. Just to emphasize, PLY now generates its special + files such as 'parsetab.py' and 'lextab.py' in the *SAME* + directory as the source file that uses lex() and yacc(). + + If for some reason, you want to change the name of the table + module, use the tabmodule and lextab options: + + lexer = lex.lex(lextab='spamlextab') + parser = yacc.yacc(tabmodule='spamparsetab') + + If you specify a simple name as shown, the module will still be + created in the same directory as the file invoking lex() or yacc(). + If you want the table files to be placed into a different package, + then give a fully qualified package name. For example: + + lexer = lex.lex(lextab='pkgname.files.lextab') + parser = yacc.yacc(tabmodule='pkgname.files.parsetab') + + For this to work, 'pkgname.files' must already exist as a valid + Python package (i.e., the directories must already exist and be + set up with the proper __init__.py files, etc.). + +Version 3.5 +--------------------- +04/21/15: beazley + Added support for defaulted_states in the parser. A + defaulted_state is a state where the only legal action is a + reduction of a single grammar rule across all valid input + tokens. For such states, the rule is reduced and the + reading of the next lookahead token is delayed until it is + actually needed at a later point in time. + + This delay in consuming the next lookahead token is a + potentially important feature in advanced parsing + applications that require tight interaction between the + lexer and the parser. For example, a grammar rule change + modify the lexer state upon reduction and have such changes + take effect before the next input token is read. + + *** POTENTIAL INCOMPATIBILITY *** + One potential danger of defaulted_states is that syntax + errors might be deferred to a a later point of processing + than where they were detected in past versions of PLY. + Thus, it's possible that your error handling could change + slightly on the same inputs. defaulted_states do not change + the overall parsing of the input (i.e., the same grammar is + accepted). + + If for some reason, you need to disable defaulted states, + you can do this: + + parser = yacc.yacc() + parser.defaulted_states = {} + +04/21/15: beazley + Fixed debug logging in the parser. It wasn't properly reporting goto states + on grammar rule reductions. + +04/20/15: beazley + Added actions to be defined to character literals (Issue #32). For example: + + literals = [ '{', '}' ] + + def t_lbrace(t): + r'\{' + # Some action + t.type = '{' + return t + + def t_rbrace(t): + r'\}' + # Some action + t.type = '}' + return t + +04/19/15: beazley + Import of the 'parsetab.py' file is now constrained to only consider the + directory specified by the outputdir argument to yacc(). If not supplied, + the import will only consider the directory in which the grammar is defined. + This should greatly reduce problems with the wrong parsetab.py file being + imported by mistake. For example, if it's found somewhere else on the path + by accident. + + *** POTENTIAL INCOMPATIBILITY *** It's possible that this might break some + packaging/deployment setup if PLY was instructed to place its parsetab.py + in a different location. You'll have to specify a proper outputdir= argument + to yacc() to fix this if needed. + +04/19/15: beazley + Changed default output directory to be the same as that in which the + yacc grammar is defined. If your grammar is in a file 'calc.py', + then the parsetab.py and parser.out files should be generated in the + same directory as that file. The destination directory can be changed + using the outputdir= argument to yacc(). + +04/19/15: beazley + Changed the parsetab.py file signature slightly so that the parsetab won't + regenerate if created on a different major version of Python (ie., a + parsetab created on Python 2 will work with Python 3). + +04/16/15: beazley + Fixed Issue #44 call_errorfunc() should return the result of errorfunc() + +04/16/15: beazley + Support for versions of Python <2.7 is officially dropped. PLY may work, but + the unit tests requires Python 2.7 or newer. + +04/16/15: beazley + Fixed bug related to calling yacc(start=...). PLY wasn't regenerating the + table file correctly for this case. + +04/16/15: beazley + Added skipped tests for PyPy and Java. Related to use of Python's -O option. + +05/29/13: beazley + Added filter to make unit tests pass under 'python -3'. + Reported by Neil Muller. + +05/29/13: beazley + Fixed CPP_INTEGER regex in ply/cpp.py (Issue 21). + Reported by @vbraun. + +05/29/13: beazley + Fixed yacc validation bugs when from __future__ import unicode_literals + is being used. Reported by Kenn Knowles. + +05/29/13: beazley + Added support for Travis-CI. Contributed by Kenn Knowles. + +05/29/13: beazley + Added a .gitignore file. Suggested by Kenn Knowles. + +05/29/13: beazley + Fixed validation problems for source files that include a + different source code encoding specifier. Fix relies on + the inspect module. Should work on Python 2.6 and newer. + Not sure about older versions of Python. + Contributed by Michael Droettboom + +05/21/13: beazley + Fixed unit tests for yacc to eliminate random failures due to dict hash value + randomization in Python 3.3 + Reported by Arfrever + +10/15/12: beazley + Fixed comment whitespace processing bugs in ply/cpp.py. + Reported by Alexei Pososin. + +10/15/12: beazley + Fixed token names in ply/ctokens.py to match rule names. + Reported by Alexei Pososin. + +04/26/12: beazley + Changes to functions available in panic mode error recover. In previous versions + of PLY, the following global functions were available for use in the p_error() rule: + + yacc.errok() # Reset error state + yacc.token() # Get the next token + yacc.restart() # Reset the parsing stack + + The use of global variables was problematic for code involving multiple parsers + and frankly was a poor design overall. These functions have been moved to methods + of the parser instance created by the yacc() function. You should write code like + this: + + def p_error(p): + ... + parser.errok() + + parser = yacc.yacc() + + *** POTENTIAL INCOMPATIBILITY *** The original global functions now issue a + DeprecationWarning. + +04/19/12: beazley + Fixed some problems with line and position tracking and the use of error + symbols. If you have a grammar rule involving an error rule like this: + + def p_assignment_bad(p): + '''assignment : location EQUALS error SEMI''' + ... + + You can now do line and position tracking on the error token. For example: + + def p_assignment_bad(p): + '''assignment : location EQUALS error SEMI''' + start_line = p.lineno(3) + start_pos = p.lexpos(3) + + If the trackng=True option is supplied to parse(), you can additionally get + spans: + + def p_assignment_bad(p): + '''assignment : location EQUALS error SEMI''' + start_line, end_line = p.linespan(3) + start_pos, end_pos = p.lexspan(3) + + Note that error handling is still a hairy thing in PLY. This won't work + unless your lexer is providing accurate information. Please report bugs. + Suggested by a bug reported by Davis Herring. + +04/18/12: beazley + Change to doc string handling in lex module. Regex patterns are now first + pulled from a function's .regex attribute. If that doesn't exist, then + .doc is checked as a fallback. The @TOKEN decorator now sets the .regex + attribute of a function instead of its doc string. + Changed suggested by Kristoffer Ellersgaard Koch. + +04/18/12: beazley + Fixed issue #1: Fixed _tabversion. It should use __tabversion__ instead of __version__ + Reported by Daniele Tricoli + +04/18/12: beazley + Fixed issue #8: Literals empty list causes IndexError + Reported by Walter Nissen. + +04/18/12: beazley + Fixed issue #12: Typo in code snippet in documentation + Reported by florianschanda. + +04/18/12: beazley + Fixed issue #10: Correctly escape t_XOREQUAL pattern. + Reported by Andy Kittner. + +Version 3.4 +--------------------- +02/17/11: beazley + Minor patch to make cpp.py compatible with Python 3. Note: This + is an experimental file not currently used by the rest of PLY. + +02/17/11: beazley + Fixed setup.py trove classifiers to properly list PLY as + Python 3 compatible. + +01/02/11: beazley + Migration of repository to github. + +Version 3.3 +----------------------------- +08/25/09: beazley + Fixed issue 15 related to the set_lineno() method in yacc. Reported by + mdsherry. + +08/25/09: beazley + Fixed a bug related to regular expression compilation flags not being + properly stored in lextab.py files created by the lexer when running + in optimize mode. Reported by Bruce Frederiksen. + + +Version 3.2 +----------------------------- +03/24/09: beazley + Added an extra check to not print duplicated warning messages + about reduce/reduce conflicts. + +03/24/09: beazley + Switched PLY over to a BSD-license. + +03/23/09: beazley + Performance optimization. Discovered a few places to make + speedups in LR table generation. + +03/23/09: beazley + New warning message. PLY now warns about rules never + reduced due to reduce/reduce conflicts. Suggested by + Bruce Frederiksen. + +03/23/09: beazley + Some clean-up of warning messages related to reduce/reduce errors. + +03/23/09: beazley + Added a new picklefile option to yacc() to write the parsing + tables to a filename using the pickle module. Here is how + it works: + + yacc(picklefile="parsetab.p") + + This option can be used if the normal parsetab.py file is + extremely large. For example, on jython, it is impossible + to read parsing tables if the parsetab.py exceeds a certain + threshold. + + The filename supplied to the picklefile option is opened + relative to the current working directory of the Python + interpreter. If you need to refer to the file elsewhere, + you will need to supply an absolute or relative path. + + For maximum portability, the pickle file is written + using protocol 0. + +03/13/09: beazley + Fixed a bug in parser.out generation where the rule numbers + where off by one. + +03/13/09: beazley + Fixed a string formatting bug with one of the error messages. + Reported by Richard Reitmeyer + +Version 3.1 +----------------------------- +02/28/09: beazley + Fixed broken start argument to yacc(). PLY-3.0 broke this + feature by accident. + +02/28/09: beazley + Fixed debugging output. yacc() no longer reports shift/reduce + or reduce/reduce conflicts if debugging is turned off. This + restores similar behavior in PLY-2.5. Reported by Andrew Waters. + +Version 3.0 +----------------------------- +02/03/09: beazley + Fixed missing lexer attribute on certain tokens when + invoking the parser p_error() function. Reported by + Bart Whiteley. + +02/02/09: beazley + The lex() command now does all error-reporting and diagonistics + using the logging module interface. Pass in a Logger object + using the errorlog parameter to specify a different logger. + +02/02/09: beazley + Refactored ply.lex to use a more object-oriented and organized + approach to collecting lexer information. + +02/01/09: beazley + Removed the nowarn option from lex(). All output is controlled + by passing in a logger object. Just pass in a logger with a high + level setting to suppress output. This argument was never + documented to begin with so hopefully no one was relying upon it. + +02/01/09: beazley + Discovered and removed a dead if-statement in the lexer. This + resulted in a 6-7% speedup in lexing when I tested it. + +01/13/09: beazley + Minor change to the procedure for signalling a syntax error in a + production rule. A normal SyntaxError exception should be raised + instead of yacc.SyntaxError. + +01/13/09: beazley + Added a new method p.set_lineno(n,lineno) that can be used to set the + line number of symbol n in grammar rules. This simplifies manual + tracking of line numbers. + +01/11/09: beazley + Vastly improved debugging support for yacc.parse(). Instead of passing + debug as an integer, you can supply a Logging object (see the logging + module). Messages will be generated at the ERROR, INFO, and DEBUG + logging levels, each level providing progressively more information. + The debugging trace also shows states, grammar rule, values passed + into grammar rules, and the result of each reduction. + +01/09/09: beazley + The yacc() command now does all error-reporting and diagnostics using + the interface of the logging module. Use the errorlog parameter to + specify a logging object for error messages. Use the debuglog parameter + to specify a logging object for the 'parser.out' output. + +01/09/09: beazley + *HUGE* refactoring of the the ply.yacc() implementation. The high-level + user interface is backwards compatible, but the internals are completely + reorganized into classes. No more global variables. The internals + are also more extensible. For example, you can use the classes to + construct a LALR(1) parser in an entirely different manner than + what is currently the case. Documentation is forthcoming. + +01/07/09: beazley + Various cleanup and refactoring of yacc internals. + +01/06/09: beazley + Fixed a bug with precedence assignment. yacc was assigning the precedence + each rule based on the left-most token, when in fact, it should have been + using the right-most token. Reported by Bruce Frederiksen. + +11/27/08: beazley + Numerous changes to support Python 3.0 including removal of deprecated + statements (e.g., has_key) and the additional of compatibility code + to emulate features from Python 2 that have been removed, but which + are needed. Fixed the unit testing suite to work with Python 3.0. + The code should be backwards compatible with Python 2. + +11/26/08: beazley + Loosened the rules on what kind of objects can be passed in as the + "module" parameter to lex() and yacc(). Previously, you could only use + a module or an instance. Now, PLY just uses dir() to get a list of + symbols on whatever the object is without regard for its type. + +11/26/08: beazley + Changed all except: statements to be compatible with Python2.x/3.x syntax. + +11/26/08: beazley + Changed all raise Exception, value statements to raise Exception(value) for + forward compatibility. + +11/26/08: beazley + Removed all print statements from lex and yacc, using sys.stdout and sys.stderr + directly. Preparation for Python 3.0 support. + +11/04/08: beazley + Fixed a bug with referring to symbols on the the parsing stack using negative + indices. + +05/29/08: beazley + Completely revamped the testing system to use the unittest module for everything. + Added additional tests to cover new errors/warnings. + +Version 2.5 +----------------------------- +05/28/08: beazley + Fixed a bug with writing lex-tables in optimized mode and start states. + Reported by Kevin Henry. + +Version 2.4 +----------------------------- +05/04/08: beazley + A version number is now embedded in the table file signature so that + yacc can more gracefully accomodate changes to the output format + in the future. + +05/04/08: beazley + Removed undocumented .pushback() method on grammar productions. I'm + not sure this ever worked and can't recall ever using it. Might have + been an abandoned idea that never really got fleshed out. This + feature was never described or tested so removing it is hopefully + harmless. + +05/04/08: beazley + Added extra error checking to yacc() to detect precedence rules defined + for undefined terminal symbols. This allows yacc() to detect a potential + problem that can be really tricky to debug if no warning message or error + message is generated about it. + +05/04/08: beazley + lex() now has an outputdir that can specify the output directory for + tables when running in optimize mode. For example: + + lexer = lex.lex(optimize=True, lextab="ltab", outputdir="foo/bar") + + The behavior of specifying a table module and output directory are + more aligned with the behavior of yacc(). + +05/04/08: beazley + [Issue 9] + Fixed filename bug in when specifying the modulename in lex() and yacc(). + If you specified options such as the following: + + parser = yacc.yacc(tabmodule="foo.bar.parsetab",outputdir="foo/bar") + + yacc would create a file "foo.bar.parsetab.py" in the given directory. + Now, it simply generates a file "parsetab.py" in that directory. + Bug reported by cptbinho. + +05/04/08: beazley + Slight modification to lex() and yacc() to allow their table files + to be loaded from a previously loaded module. This might make + it easier to load the parsing tables from a complicated package + structure. For example: + + import foo.bar.spam.parsetab as parsetab + parser = yacc.yacc(tabmodule=parsetab) + + Note: lex and yacc will never regenerate the table file if used + in the form---you will get a warning message instead. + This idea suggested by Brian Clapper. + + +04/28/08: beazley + Fixed a big with p_error() functions being picked up correctly + when running in yacc(optimize=1) mode. Patch contributed by + Bart Whiteley. + +02/28/08: beazley + Fixed a bug with 'nonassoc' precedence rules. Basically the + non-precedence was being ignored and not producing the correct + run-time behavior in the parser. + +02/16/08: beazley + Slight relaxation of what the input() method to a lexer will + accept as a string. Instead of testing the input to see + if the input is a string or unicode string, it checks to see + if the input object looks like it contains string data. + This change makes it possible to pass string-like objects + in as input. For example, the object returned by mmap. + + import mmap, os + data = mmap.mmap(os.open(filename,os.O_RDONLY), + os.path.getsize(filename), + access=mmap.ACCESS_READ) + lexer.input(data) + + +11/29/07: beazley + Modification of ply.lex to allow token functions to aliased. + This is subtle, but it makes it easier to create libraries and + to reuse token specifications. For example, suppose you defined + a function like this: + + def number(t): + r'\d+' + t.value = int(t.value) + return t + + This change would allow you to define a token rule as follows: + + t_NUMBER = number + + In this case, the token type will be set to 'NUMBER' and use + the associated number() function to process tokens. + +11/28/07: beazley + Slight modification to lex and yacc to grab symbols from both + the local and global dictionaries of the caller. This + modification allows lexers and parsers to be defined using + inner functions and closures. + +11/28/07: beazley + Performance optimization: The lexer.lexmatch and t.lexer + attributes are no longer set for lexer tokens that are not + defined by functions. The only normal use of these attributes + would be in lexer rules that need to perform some kind of + special processing. Thus, it doesn't make any sense to set + them on every token. + + *** POTENTIAL INCOMPATIBILITY *** This might break code + that is mucking around with internal lexer state in some + sort of magical way. + +11/27/07: beazley + Added the ability to put the parser into error-handling mode + from within a normal production. To do this, simply raise + a yacc.SyntaxError exception like this: + + def p_some_production(p): + 'some_production : prod1 prod2' + ... + raise yacc.SyntaxError # Signal an error + + A number of things happen after this occurs: + + - The last symbol shifted onto the symbol stack is discarded + and parser state backed up to what it was before the + the rule reduction. + + - The current lookahead symbol is saved and replaced by + the 'error' symbol. + + - The parser enters error recovery mode where it tries + to either reduce the 'error' rule or it starts + discarding items off of the stack until the parser + resets. + + When an error is manually set, the parser does *not* call + the p_error() function (if any is defined). + *** NEW FEATURE *** Suggested on the mailing list + +11/27/07: beazley + Fixed structure bug in examples/ansic. Reported by Dion Blazakis. + +11/27/07: beazley + Fixed a bug in the lexer related to start conditions and ignored + token rules. If a rule was defined that changed state, but + returned no token, the lexer could be left in an inconsistent + state. Reported by + +11/27/07: beazley + Modified setup.py to support Python Eggs. Patch contributed by + Simon Cross. + +11/09/07: beazely + Fixed a bug in error handling in yacc. If a syntax error occurred and the + parser rolled the entire parse stack back, the parser would be left in in + inconsistent state that would cause it to trigger incorrect actions on + subsequent input. Reported by Ton Biegstraaten, Justin King, and others. + +11/09/07: beazley + Fixed a bug when passing empty input strings to yacc.parse(). This + would result in an error message about "No input given". Reported + by Andrew Dalke. + +Version 2.3 +----------------------------- +02/20/07: beazley + Fixed a bug with character literals if the literal '.' appeared as the + last symbol of a grammar rule. Reported by Ales Smrcka. + +02/19/07: beazley + Warning messages are now redirected to stderr instead of being printed + to standard output. + +02/19/07: beazley + Added a warning message to lex.py if it detects a literal backslash + character inside the t_ignore declaration. This is to help + problems that might occur if someone accidentally defines t_ignore + as a Python raw string. For example: + + t_ignore = r' \t' + + The idea for this is from an email I received from David Cimimi who + reported bizarre behavior in lexing as a result of defining t_ignore + as a raw string by accident. + +02/18/07: beazley + Performance improvements. Made some changes to the internal + table organization and LR parser to improve parsing performance. + +02/18/07: beazley + Automatic tracking of line number and position information must now be + enabled by a special flag to parse(). For example: + + yacc.parse(data,tracking=True) + + In many applications, it's just not that important to have the + parser automatically track all line numbers. By making this an + optional feature, it allows the parser to run significantly faster + (more than a 20% speed increase in many cases). Note: positional + information is always available for raw tokens---this change only + applies to positional information associated with nonterminal + grammar symbols. + *** POTENTIAL INCOMPATIBILITY *** + +02/18/07: beazley + Yacc no longer supports extended slices of grammar productions. + However, it does support regular slices. For example: + + def p_foo(p): + '''foo: a b c d e''' + p[0] = p[1:3] + + This change is a performance improvement to the parser--it streamlines + normal access to the grammar values since slices are now handled in + a __getslice__() method as opposed to __getitem__(). + +02/12/07: beazley + Fixed a bug in the handling of token names when combined with + start conditions. Bug reported by Todd O'Bryan. + +Version 2.2 +------------------------------ +11/01/06: beazley + Added lexpos() and lexspan() methods to grammar symbols. These + mirror the same functionality of lineno() and linespan(). For + example: + + def p_expr(p): + 'expr : expr PLUS expr' + p.lexpos(1) # Lexing position of left-hand-expression + p.lexpos(1) # Lexing position of PLUS + start,end = p.lexspan(3) # Lexing range of right hand expression + +11/01/06: beazley + Minor change to error handling. The recommended way to skip characters + in the input is to use t.lexer.skip() as shown here: + + def t_error(t): + print "Illegal character '%s'" % t.value[0] + t.lexer.skip(1) + + The old approach of just using t.skip(1) will still work, but won't + be documented. + +10/31/06: beazley + Discarded tokens can now be specified as simple strings instead of + functions. To do this, simply include the text "ignore_" in the + token declaration. For example: + + t_ignore_cppcomment = r'//.*' + + Previously, this had to be done with a function. For example: + + def t_ignore_cppcomment(t): + r'//.*' + pass + + If start conditions/states are being used, state names should appear + before the "ignore_" text. + +10/19/06: beazley + The Lex module now provides support for flex-style start conditions + as described at http://www.gnu.org/software/flex/manual/html_chapter/flex_11.html. + Please refer to this document to understand this change note. Refer to + the PLY documentation for PLY-specific explanation of how this works. + + To use start conditions, you first need to declare a set of states in + your lexer file: + + states = ( + ('foo','exclusive'), + ('bar','inclusive') + ) + + This serves the same role as the %s and %x specifiers in flex. + + One a state has been declared, tokens for that state can be + declared by defining rules of the form t_state_TOK. For example: + + t_PLUS = '\+' # Rule defined in INITIAL state + t_foo_NUM = '\d+' # Rule defined in foo state + t_bar_NUM = '\d+' # Rule defined in bar state + + t_foo_bar_NUM = '\d+' # Rule defined in both foo and bar + t_ANY_NUM = '\d+' # Rule defined in all states + + In addition to defining tokens for each state, the t_ignore and t_error + specifications can be customized for specific states. For example: + + t_foo_ignore = " " # Ignored characters for foo state + def t_bar_error(t): + # Handle errors in bar state + + With token rules, the following methods can be used to change states + + def t_TOKNAME(t): + t.lexer.begin('foo') # Begin state 'foo' + t.lexer.push_state('foo') # Begin state 'foo', push old state + # onto a stack + t.lexer.pop_state() # Restore previous state + t.lexer.current_state() # Returns name of current state + + These methods mirror the BEGIN(), yy_push_state(), yy_pop_state(), and + yy_top_state() functions in flex. + + The use of start states can be used as one way to write sub-lexers. + For example, the lexer or parser might instruct the lexer to start + generating a different set of tokens depending on the context. + + example/yply/ylex.py shows the use of start states to grab C/C++ + code fragments out of traditional yacc specification files. + + *** NEW FEATURE *** Suggested by Daniel Larraz with whom I also + discussed various aspects of the design. + +10/19/06: beazley + Minor change to the way in which yacc.py was reporting shift/reduce + conflicts. Although the underlying LALR(1) algorithm was correct, + PLY was under-reporting the number of conflicts compared to yacc/bison + when precedence rules were in effect. This change should make PLY + report the same number of conflicts as yacc. + +10/19/06: beazley + Modified yacc so that grammar rules could also include the '-' + character. For example: + + def p_expr_list(p): + 'expression-list : expression-list expression' + + Suggested by Oldrich Jedlicka. + +10/18/06: beazley + Attribute lexer.lexmatch added so that token rules can access the re + match object that was generated. For example: + + def t_FOO(t): + r'some regex' + m = t.lexer.lexmatch + # Do something with m + + + This may be useful if you want to access named groups specified within + the regex for a specific token. Suggested by Oldrich Jedlicka. + +10/16/06: beazley + Changed the error message that results if an illegal character + is encountered and no default error function is defined in lex. + The exception is now more informative about the actual cause of + the error. + +Version 2.1 +------------------------------ +10/02/06: beazley + The last Lexer object built by lex() can be found in lex.lexer. + The last Parser object built by yacc() can be found in yacc.parser. + +10/02/06: beazley + New example added: examples/yply + + This example uses PLY to convert Unix-yacc specification files to + PLY programs with the same grammar. This may be useful if you + want to convert a grammar from bison/yacc to use with PLY. + +10/02/06: beazley + Added support for a start symbol to be specified in the yacc + input file itself. Just do this: + + start = 'name' + + where 'name' matches some grammar rule. For example: + + def p_name(p): + 'name : A B C' + ... + + This mirrors the functionality of the yacc %start specifier. + +09/30/06: beazley + Some new examples added.: + + examples/GardenSnake : A simple indentation based language similar + to Python. Shows how you might handle + whitespace. Contributed by Andrew Dalke. + + examples/BASIC : An implementation of 1964 Dartmouth BASIC. + Contributed by Dave against his better + judgement. + +09/28/06: beazley + Minor patch to allow named groups to be used in lex regular + expression rules. For example: + + t_QSTRING = r'''(?P['"]).*?(?P=quote)''' + + Patch submitted by Adam Ring. + +09/28/06: beazley + LALR(1) is now the default parsing method. To use SLR, use + yacc.yacc(method="SLR"). Note: there is no performance impact + on parsing when using LALR(1) instead of SLR. However, constructing + the parsing tables will take a little longer. + +09/26/06: beazley + Change to line number tracking. To modify line numbers, modify + the line number of the lexer itself. For example: + + def t_NEWLINE(t): + r'\n' + t.lexer.lineno += 1 + + This modification is both cleanup and a performance optimization. + In past versions, lex was monitoring every token for changes in + the line number. This extra processing is unnecessary for a vast + majority of tokens. Thus, this new approach cleans it up a bit. + + *** POTENTIAL INCOMPATIBILITY *** + You will need to change code in your lexer that updates the line + number. For example, "t.lineno += 1" becomes "t.lexer.lineno += 1" + +09/26/06: beazley + Added the lexing position to tokens as an attribute lexpos. This + is the raw index into the input text at which a token appears. + This information can be used to compute column numbers and other + details (e.g., scan backwards from lexpos to the first newline + to get a column position). + +09/25/06: beazley + Changed the name of the __copy__() method on the Lexer class + to clone(). This is used to clone a Lexer object (e.g., if + you're running different lexers at the same time). + +09/21/06: beazley + Limitations related to the use of the re module have been eliminated. + Several users reported problems with regular expressions exceeding + more than 100 named groups. To solve this, lex.py is now capable + of automatically splitting its master regular regular expression into + smaller expressions as needed. This should, in theory, make it + possible to specify an arbitrarily large number of tokens. + +09/21/06: beazley + Improved error checking in lex.py. Rules that match the empty string + are now rejected (otherwise they cause the lexer to enter an infinite + loop). An extra check for rules containing '#' has also been added. + Since lex compiles regular expressions in verbose mode, '#' is interpreted + as a regex comment, it is critical to use '\#' instead. + +09/18/06: beazley + Added a @TOKEN decorator function to lex.py that can be used to + define token rules where the documentation string might be computed + in some way. + + digit = r'([0-9])' + nondigit = r'([_A-Za-z])' + identifier = r'(' + nondigit + r'(' + digit + r'|' + nondigit + r')*)' + + from ply.lex import TOKEN + + @TOKEN(identifier) + def t_ID(t): + # Do whatever + + The @TOKEN decorator merely sets the documentation string of the + associated token function as needed for lex to work. + + Note: An alternative solution is the following: + + def t_ID(t): + # Do whatever + + t_ID.__doc__ = identifier + + Note: Decorators require the use of Python 2.4 or later. If compatibility + with old versions is needed, use the latter solution. + + The need for this feature was suggested by Cem Karan. + +09/14/06: beazley + Support for single-character literal tokens has been added to yacc. + These literals must be enclosed in quotes. For example: + + def p_expr(p): + "expr : expr '+' expr" + ... + + def p_expr(p): + 'expr : expr "-" expr' + ... + + In addition to this, it is necessary to tell the lexer module about + literal characters. This is done by defining the variable 'literals' + as a list of characters. This should be defined in the module that + invokes the lex.lex() function. For example: + + literals = ['+','-','*','/','(',')','='] + + or simply + + literals = '+=*/()=' + + It is important to note that literals can only be a single character. + When the lexer fails to match a token using its normal regular expression + rules, it will check the current character against the literal list. + If found, it will be returned with a token type set to match the literal + character. Otherwise, an illegal character will be signalled. + + +09/14/06: beazley + Modified PLY to install itself as a proper Python package called 'ply'. + This will make it a little more friendly to other modules. This + changes the usage of PLY only slightly. Just do this to import the + modules + + import ply.lex as lex + import ply.yacc as yacc + + Alternatively, you can do this: + + from ply import * + + Which imports both the lex and yacc modules. + Change suggested by Lee June. + +09/13/06: beazley + Changed the handling of negative indices when used in production rules. + A negative production index now accesses already parsed symbols on the + parsing stack. For example, + + def p_foo(p): + "foo: A B C D" + print p[1] # Value of 'A' symbol + print p[2] # Value of 'B' symbol + print p[-1] # Value of whatever symbol appears before A + # on the parsing stack. + + p[0] = some_val # Sets the value of the 'foo' grammer symbol + + This behavior makes it easier to work with embedded actions within the + parsing rules. For example, in C-yacc, it is possible to write code like + this: + + bar: A { printf("seen an A = %d\n", $1); } B { do_stuff; } + + In this example, the printf() code executes immediately after A has been + parsed. Within the embedded action code, $1 refers to the A symbol on + the stack. + + To perform this equivalent action in PLY, you need to write a pair + of rules like this: + + def p_bar(p): + "bar : A seen_A B" + do_stuff + + def p_seen_A(p): + "seen_A :" + print "seen an A =", p[-1] + + The second rule "seen_A" is merely a empty production which should be + reduced as soon as A is parsed in the "bar" rule above. The use + of the negative index p[-1] is used to access whatever symbol appeared + before the seen_A symbol. + + This feature also makes it possible to support inherited attributes. + For example: + + def p_decl(p): + "decl : scope name" + + def p_scope(p): + """scope : GLOBAL + | LOCAL""" + p[0] = p[1] + + def p_name(p): + "name : ID" + if p[-1] == "GLOBAL": + # ... + else if p[-1] == "LOCAL": + #... + + In this case, the name rule is inheriting an attribute from the + scope declaration that precedes it. + + *** POTENTIAL INCOMPATIBILITY *** + If you are currently using negative indices within existing grammar rules, + your code will break. This should be extremely rare if non-existent in + most cases. The argument to various grammar rules is not usually not + processed in the same way as a list of items. + +Version 2.0 +------------------------------ +09/07/06: beazley + Major cleanup and refactoring of the LR table generation code. Both SLR + and LALR(1) table generation is now performed by the same code base with + only minor extensions for extra LALR(1) processing. + +09/07/06: beazley + Completely reimplemented the entire LALR(1) parsing engine to use the + DeRemer and Pennello algorithm for calculating lookahead sets. This + significantly improves the performance of generating LALR(1) tables + and has the added feature of actually working correctly! If you + experienced weird behavior with LALR(1) in prior releases, this should + hopefully resolve all of those problems. Many thanks to + Andrew Waters and Markus Schoepflin for submitting bug reports + and helping me test out the revised LALR(1) support. + +Version 1.8 +------------------------------ +08/02/06: beazley + Fixed a problem related to the handling of default actions in LALR(1) + parsing. If you experienced subtle and/or bizarre behavior when trying + to use the LALR(1) engine, this may correct those problems. Patch + contributed by Russ Cox. Note: This patch has been superceded by + revisions for LALR(1) parsing in Ply-2.0. + +08/02/06: beazley + Added support for slicing of productions in yacc. + Patch contributed by Patrick Mezard. + +Version 1.7 +------------------------------ +03/02/06: beazley + Fixed infinite recursion problem ReduceToTerminals() function that + would sometimes come up in LALR(1) table generation. Reported by + Markus Schoepflin. + +03/01/06: beazley + Added "reflags" argument to lex(). For example: + + lex.lex(reflags=re.UNICODE) + + This can be used to specify optional flags to the re.compile() function + used inside the lexer. This may be necessary for special situations such + as processing Unicode (e.g., if you want escapes like \w and \b to consult + the Unicode character property database). The need for this suggested by + Andreas Jung. + +03/01/06: beazley + Fixed a bug with an uninitialized variable on repeated instantiations of parser + objects when the write_tables=0 argument was used. Reported by Michael Brown. + +03/01/06: beazley + Modified lex.py to accept Unicode strings both as the regular expressions for + tokens and as input. Hopefully this is the only change needed for Unicode support. + Patch contributed by Johan Dahl. + +03/01/06: beazley + Modified the class-based interface to work with new-style or old-style classes. + Patch contributed by Michael Brown (although I tweaked it slightly so it would work + with older versions of Python). + +Version 1.6 +------------------------------ +05/27/05: beazley + Incorporated patch contributed by Christopher Stawarz to fix an extremely + devious bug in LALR(1) parser generation. This patch should fix problems + numerous people reported with LALR parsing. + +05/27/05: beazley + Fixed problem with lex.py copy constructor. Reported by Dave Aitel, Aaron Lav, + and Thad Austin. + +05/27/05: beazley + Added outputdir option to yacc() to control output directory. Contributed + by Christopher Stawarz. + +05/27/05: beazley + Added rununit.py test script to run tests using the Python unittest module. + Contributed by Miki Tebeka. + +Version 1.5 +------------------------------ +05/26/04: beazley + Major enhancement. LALR(1) parsing support is now working. + This feature was implemented by Elias Ioup (ezioup@alumni.uchicago.edu) + and optimized by David Beazley. To use LALR(1) parsing do + the following: + + yacc.yacc(method="LALR") + + Computing LALR(1) parsing tables takes about twice as long as + the default SLR method. However, LALR(1) allows you to handle + more complex grammars. For example, the ANSI C grammar + (in example/ansic) has 13 shift-reduce conflicts with SLR, but + only has 1 shift-reduce conflict with LALR(1). + +05/20/04: beazley + Added a __len__ method to parser production lists. Can + be used in parser rules like this: + + def p_somerule(p): + """a : B C D + | E F" + if (len(p) == 3): + # Must have been first rule + elif (len(p) == 2): + # Must be second rule + + Suggested by Joshua Gerth and others. + +Version 1.4 +------------------------------ +04/23/04: beazley + Incorporated a variety of patches contributed by Eric Raymond. + These include: + + 0. Cleans up some comments so they don't wrap on an 80-column display. + 1. Directs compiler errors to stderr where they belong. + 2. Implements and documents automatic line counting when \n is ignored. + 3. Changes the way progress messages are dumped when debugging is on. + The new format is both less verbose and conveys more information than + the old, including shift and reduce actions. + +04/23/04: beazley + Added a Python setup.py file to simply installation. Contributed + by Adam Kerrison. + +04/23/04: beazley + Added patches contributed by Adam Kerrison. + + - Some output is now only shown when debugging is enabled. This + means that PLY will be completely silent when not in debugging mode. + + - An optional parameter "write_tables" can be passed to yacc() to + control whether or not parsing tables are written. By default, + it is true, but it can be turned off if you don't want the yacc + table file. Note: disabling this will cause yacc() to regenerate + the parsing table each time. + +04/23/04: beazley + Added patches contributed by David McNab. This patch addes two + features: + + - The parser can be supplied as a class instead of a module. + For an example of this, see the example/classcalc directory. + + - Debugging output can be directed to a filename of the user's + choice. Use + + yacc(debugfile="somefile.out") + + +Version 1.3 +------------------------------ +12/10/02: jmdyck + Various minor adjustments to the code that Dave checked in today. + Updated test/yacc_{inf,unused}.exp to reflect today's changes. + +12/10/02: beazley + Incorporated a variety of minor bug fixes to empty production + handling and infinite recursion checking. Contributed by + Michael Dyck. + +12/10/02: beazley + Removed bogus recover() method call in yacc.restart() + +Version 1.2 +------------------------------ +11/27/02: beazley + Lexer and parser objects are now available as an attribute + of tokens and slices respectively. For example: + + def t_NUMBER(t): + r'\d+' + print t.lexer + + def p_expr_plus(t): + 'expr: expr PLUS expr' + print t.lexer + print t.parser + + This can be used for state management (if needed). + +10/31/02: beazley + Modified yacc.py to work with Python optimize mode. To make + this work, you need to use + + yacc.yacc(optimize=1) + + Furthermore, you need to first run Python in normal mode + to generate the necessary parsetab.py files. After that, + you can use python -O or python -OO. + + Note: optimized mode turns off a lot of error checking. + Only use when you are sure that your grammar is working. + Make sure parsetab.py is up to date! + +10/30/02: beazley + Added cloning of Lexer objects. For example: + + import copy + l = lex.lex() + lc = copy.copy(l) + + l.input("Some text") + lc.input("Some other text") + ... + + This might be useful if the same "lexer" is meant to + be used in different contexts---or if multiple lexers + are running concurrently. + +10/30/02: beazley + Fixed subtle bug with first set computation and empty productions. + Patch submitted by Michael Dyck. + +10/30/02: beazley + Fixed error messages to use "filename:line: message" instead + of "filename:line. message". This makes error reporting more + friendly to emacs. Patch submitted by François Pinard. + +10/30/02: beazley + Improvements to parser.out file. Terminals and nonterminals + are sorted instead of being printed in random order. + Patch submitted by François Pinard. + +10/30/02: beazley + Improvements to parser.out file output. Rules are now printed + in a way that's easier to understand. Contributed by Russ Cox. + +10/30/02: beazley + Added 'nonassoc' associativity support. This can be used + to disable the chaining of operators like a < b < c. + To use, simply specify 'nonassoc' in the precedence table + + precedence = ( + ('nonassoc', 'LESSTHAN', 'GREATERTHAN'), # Nonassociative operators + ('left', 'PLUS', 'MINUS'), + ('left', 'TIMES', 'DIVIDE'), + ('right', 'UMINUS'), # Unary minus operator + ) + + Patch contributed by Russ Cox. + +10/30/02: beazley + Modified the lexer to provide optional support for Python -O and -OO + modes. To make this work, Python *first* needs to be run in + unoptimized mode. This reads the lexing information and creates a + file "lextab.py". Then, run lex like this: + + # module foo.py + ... + ... + lex.lex(optimize=1) + + Once the lextab file has been created, subsequent calls to + lex.lex() will read data from the lextab file instead of using + introspection. In optimized mode (-O, -OO) everything should + work normally despite the loss of doc strings. + + To change the name of the file 'lextab.py' use the following: + + lex.lex(lextab="footab") + + (this creates a file footab.py) + + +Version 1.1 October 25, 2001 +------------------------------ + +10/25/01: beazley + Modified the table generator to produce much more compact data. + This should greatly reduce the size of the parsetab.py[c] file. + Caveat: the tables still need to be constructed so a little more + work is done in parsetab on import. + +10/25/01: beazley + There may be a possible bug in the cycle detector that reports errors + about infinite recursion. I'm having a little trouble tracking it + down, but if you get this problem, you can disable the cycle + detector as follows: + + yacc.yacc(check_recursion = 0) + +10/25/01: beazley + Fixed a bug in lex.py that sometimes caused illegal characters to be + reported incorrectly. Reported by Sverre Jørgensen. + +7/8/01 : beazley + Added a reference to the underlying lexer object when tokens are handled by + functions. The lexer is available as the 'lexer' attribute. This + was added to provide better lexing support for languages such as Fortran + where certain types of tokens can't be conveniently expressed as regular + expressions (and where the tokenizing function may want to perform a + little backtracking). Suggested by Pearu Peterson. + +6/20/01 : beazley + Modified yacc() function so that an optional starting symbol can be specified. + For example: + + yacc.yacc(start="statement") + + Normally yacc always treats the first production rule as the starting symbol. + However, if you are debugging your grammar it may be useful to specify + an alternative starting symbol. Idea suggested by Rich Salz. + +Version 1.0 June 18, 2001 +-------------------------- +Initial public offering + diff --git a/components/script/dom/bindings/codegen/ply/COPYING b/components/script/dom/bindings/codegen/ply/COPYING deleted file mode 100644 index 3b107de4508..00000000000 --- a/components/script/dom/bindings/codegen/ply/COPYING +++ /dev/null @@ -1,28 +0,0 @@ -Copyright (C) 2001-2009, -David M. Beazley (Dabeaz LLC) -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -* Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. -* Neither the name of the David Beazley or Dabeaz LLC may be used to - endorse or promote products derived from this software without - specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/components/script/dom/bindings/codegen/ply/MANIFEST.in b/components/script/dom/bindings/codegen/ply/MANIFEST.in new file mode 100644 index 00000000000..0d37431b0b4 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/MANIFEST.in @@ -0,0 +1,8 @@ +recursive-include example * +recursive-include doc * +recursive-include test * +include ANNOUNCE +include README.md +include CHANGES +include TODO +global-exclude *.pyc diff --git a/components/script/dom/bindings/codegen/ply/PKG-INFO b/components/script/dom/bindings/codegen/ply/PKG-INFO new file mode 100644 index 00000000000..6eedf425953 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/PKG-INFO @@ -0,0 +1,22 @@ +Metadata-Version: 1.1 +Name: ply +Version: 3.10 +Summary: Python Lex & Yacc +Home-page: http://www.dabeaz.com/ply/ +Author: David Beazley +Author-email: dave@dabeaz.com +License: BSD +Description: + PLY is yet another implementation of lex and yacc for Python. Some notable + features include the fact that its implemented entirely in Python and it + uses LALR(1) parsing which is efficient and well suited for larger grammars. + + PLY provides most of the standard lex/yacc features including support for empty + productions, precedence rules, error recovery, and support for ambiguous grammars. + + PLY is extremely easy to use and provides very extensive error checking. + It is compatible with both Python 2 and Python 3. + +Platform: UNKNOWN +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 2 diff --git a/components/script/dom/bindings/codegen/ply/README b/components/script/dom/bindings/codegen/ply/README deleted file mode 100644 index d3de9993360..00000000000 --- a/components/script/dom/bindings/codegen/ply/README +++ /dev/null @@ -1,9 +0,0 @@ -David Beazley's PLY (Python Lex-Yacc) -http://www.dabeaz.com/ply/ - -Licensed under BSD. - -This directory contains just the code and license from PLY version 4.0; -the full distribution (see the URL) also contains examples, tests, -documentation, and a longer README. - diff --git a/components/script/dom/bindings/codegen/ply/README.md b/components/script/dom/bindings/codegen/ply/README.md new file mode 100644 index 00000000000..e428f1b14a8 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/README.md @@ -0,0 +1,273 @@ +PLY (Python Lex-Yacc) Version 3.10 + +Copyright (C) 2001-2017 +David M. Beazley (Dabeaz LLC) +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +* Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +* Neither the name of the David Beazley or Dabeaz LLC may be used to + endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Introduction +============ + +PLY is a 100% Python implementation of the common parsing tools lex +and yacc. Here are a few highlights: + + - PLY is very closely modeled after traditional lex/yacc. + If you know how to use these tools in C, you will find PLY + to be similar. + + - PLY provides *very* extensive error reporting and diagnostic + information to assist in parser construction. The original + implementation was developed for instructional purposes. As + a result, the system tries to identify the most common types + of errors made by novice users. + + - PLY provides full support for empty productions, error recovery, + precedence specifiers, and moderately ambiguous grammars. + + - Parsing is based on LR-parsing which is fast, memory efficient, + better suited to large grammars, and which has a number of nice + properties when dealing with syntax errors and other parsing problems. + Currently, PLY builds its parsing tables using the LALR(1) + algorithm used in yacc. + + - PLY uses Python introspection features to build lexers and parsers. + This greatly simplifies the task of parser construction since it reduces + the number of files and eliminates the need to run a separate lex/yacc + tool before running your program. + + - PLY can be used to build parsers for "real" programming languages. + Although it is not ultra-fast due to its Python implementation, + PLY can be used to parse grammars consisting of several hundred + rules (as might be found for a language like C). The lexer and LR + parser are also reasonably efficient when parsing typically + sized programs. People have used PLY to build parsers for + C, C++, ADA, and other real programming languages. + +How to Use +========== + +PLY consists of two files : lex.py and yacc.py. These are contained +within the 'ply' directory which may also be used as a Python package. +To use PLY, simply copy the 'ply' directory to your project and import +lex and yacc from the associated 'ply' package. For example: + + import ply.lex as lex + import ply.yacc as yacc + +Alternatively, you can copy just the files lex.py and yacc.py +individually and use them as modules. For example: + + import lex + import yacc + +The file setup.py can be used to install ply using distutils. + +The file doc/ply.html contains complete documentation on how to use +the system. + +The example directory contains several different examples including a +PLY specification for ANSI C as given in K&R 2nd Ed. + +A simple example is found at the end of this document + +Requirements +============ +PLY requires the use of Python 2.6 or greater. However, you should +use the latest Python release if possible. It should work on just +about any platform. PLY has been tested with both CPython and Jython. +It also seems to work with IronPython. + +Resources +========= +More information about PLY can be obtained on the PLY webpage at: + + http://www.dabeaz.com/ply + +For a detailed overview of parsing theory, consult the excellent +book "Compilers : Principles, Techniques, and Tools" by Aho, Sethi, and +Ullman. The topics found in "Lex & Yacc" by Levine, Mason, and Brown +may also be useful. + +The GitHub page for PLY can be found at: + + https://github.com/dabeaz/ply + +An old and relatively inactive discussion group for PLY is found at: + + http://groups.google.com/group/ply-hack + +Acknowledgments +=============== +A special thanks is in order for all of the students in CS326 who +suffered through about 25 different versions of these tools :-). + +The CHANGES file acknowledges those who have contributed patches. + +Elias Ioup did the first implementation of LALR(1) parsing in PLY-1.x. +Andrew Waters and Markus Schoepflin were instrumental in reporting bugs +and testing a revised LALR(1) implementation for PLY-2.0. + +Special Note for PLY-3.0 +======================== +PLY-3.0 the first PLY release to support Python 3. However, backwards +compatibility with Python 2.6 is still preserved. PLY provides dual +Python 2/3 compatibility by restricting its implementation to a common +subset of basic language features. You should not convert PLY using +2to3--it is not necessary and may in fact break the implementation. + +Example +======= + +Here is a simple example showing a PLY implementation of a calculator +with variables. + + # ----------------------------------------------------------------------------- + # calc.py + # + # A simple calculator with variables. + # ----------------------------------------------------------------------------- + + tokens = ( + 'NAME','NUMBER', + 'PLUS','MINUS','TIMES','DIVIDE','EQUALS', + 'LPAREN','RPAREN', + ) + + # Tokens + + t_PLUS = r'\+' + t_MINUS = r'-' + t_TIMES = r'\*' + t_DIVIDE = r'/' + t_EQUALS = r'=' + t_LPAREN = r'\(' + t_RPAREN = r'\)' + t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*' + + def t_NUMBER(t): + r'\d+' + t.value = int(t.value) + return t + + # Ignored characters + t_ignore = " \t" + + def t_newline(t): + r'\n+' + t.lexer.lineno += t.value.count("\n") + + def t_error(t): + print("Illegal character '%s'" % t.value[0]) + t.lexer.skip(1) + + # Build the lexer + import ply.lex as lex + lex.lex() + + # Precedence rules for the arithmetic operators + precedence = ( + ('left','PLUS','MINUS'), + ('left','TIMES','DIVIDE'), + ('right','UMINUS'), + ) + + # dictionary of names (for storing variables) + names = { } + + def p_statement_assign(p): + 'statement : NAME EQUALS expression' + names[p[1]] = p[3] + + def p_statement_expr(p): + 'statement : expression' + print(p[1]) + + def p_expression_binop(p): + '''expression : expression PLUS expression + | expression MINUS expression + | expression TIMES expression + | expression DIVIDE expression''' + if p[2] == '+' : p[0] = p[1] + p[3] + elif p[2] == '-': p[0] = p[1] - p[3] + elif p[2] == '*': p[0] = p[1] * p[3] + elif p[2] == '/': p[0] = p[1] / p[3] + + def p_expression_uminus(p): + 'expression : MINUS expression %prec UMINUS' + p[0] = -p[2] + + def p_expression_group(p): + 'expression : LPAREN expression RPAREN' + p[0] = p[2] + + def p_expression_number(p): + 'expression : NUMBER' + p[0] = p[1] + + def p_expression_name(p): + 'expression : NAME' + try: + p[0] = names[p[1]] + except LookupError: + print("Undefined name '%s'" % p[1]) + p[0] = 0 + + def p_error(p): + print("Syntax error at '%s'" % p.value) + + import ply.yacc as yacc + yacc.yacc() + + while True: + try: + s = raw_input('calc > ') # use input() on Python 3 + except EOFError: + break + yacc.parse(s) + + +Bug Reports and Patches +======================= +My goal with PLY is to simply have a decent lex/yacc implementation +for Python. As a general rule, I don't spend huge amounts of time +working on it unless I receive very specific bug reports and/or +patches to fix problems. I also try to incorporate submitted feature +requests and enhancements into each new version. Please visit the PLY +github page at https://github.com/dabeaz/ply to submit issues and pull +requests. To contact me about bugs and/or new features, please send +email to dave@dabeaz.com. + +-- Dave + + + + + + + + + diff --git a/components/script/dom/bindings/codegen/ply/TODO b/components/script/dom/bindings/codegen/ply/TODO new file mode 100644 index 00000000000..f4800aacf47 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/TODO @@ -0,0 +1,16 @@ +The PLY to-do list: + +1. Finish writing the C Preprocessor module. Started in the + file ply/cpp.py + +2. Create and document libraries of useful tokens. + +3. Expand the examples/yply tool that parses bison/yacc + files. + +4. Think of various diabolical things to do with the + new yacc internals. For example, it is now possible + to specify grammrs using completely different schemes + than the reflection approach used by PLY. + + diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/README b/components/script/dom/bindings/codegen/ply/example/BASIC/README new file mode 100644 index 00000000000..be24a3005e7 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/BASIC/README @@ -0,0 +1,79 @@ +Inspired by a September 14, 2006 Salon article "Why Johnny Can't Code" by +David Brin (http://www.salon.com/tech/feature/2006/09/14/basic/index.html), +I thought that a fully working BASIC interpreter might be an interesting, +if not questionable, PLY example. Uh, okay, so maybe it's just a bad idea, +but in any case, here it is. + +In this example, you'll find a rough implementation of 1964 Dartmouth BASIC +as described in the manual at: + + http://www.bitsavers.org/pdf/dartmouth/BASIC_Oct64.pdf + +See also: + + http://en.wikipedia.org/wiki/Dartmouth_BASIC + +This dialect is downright primitive---there are no string variables +and no facilities for interactive input. Moreover, subroutines and functions +are brain-dead even more than they usually are for BASIC. Of course, +the GOTO statement is provided. + +Nevertheless, there are a few interesting aspects of this example: + + - It illustrates a fully working interpreter including lexing, parsing, + and interpretation of instructions. + + - The parser shows how to catch and report various kinds of parsing + errors in a more graceful way. + + - The example both parses files (supplied on command line) and + interactive input entered line by line. + + - It shows how you might represent parsed information. In this case, + each BASIC statement is encoded into a Python tuple containing the + statement type and parameters. These tuples are then stored in + a dictionary indexed by program line numbers. + + - Even though it's just BASIC, the parser contains more than 80 + rules and 150 parsing states. Thus, it's a little more meaty than + the calculator example. + +To use the example, run it as follows: + + % python basic.py hello.bas + HELLO WORLD + % + +or use it interactively: + + % python basic.py + [BASIC] 10 PRINT "HELLO WORLD" + [BASIC] 20 END + [BASIC] RUN + HELLO WORLD + [BASIC] + +The following files are defined: + + basic.py - High level script that controls everything + basiclex.py - BASIC tokenizer + basparse.py - BASIC parser + basinterp.py - BASIC interpreter that runs parsed programs. + +In addition, a number of sample BASIC programs (.bas suffix) are +provided. These were taken out of the Dartmouth manual. + +Disclaimer: I haven't spent a ton of time testing this and it's likely that +I've skimped here and there on a few finer details (e.g., strictly enforcing +variable naming rules). However, the interpreter seems to be able to run +the examples in the BASIC manual. + +Have fun! + +-Dave + + + + + + diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/basic.py b/components/script/dom/bindings/codegen/ply/example/BASIC/basic.py new file mode 100644 index 00000000000..70ac9e7c740 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/BASIC/basic.py @@ -0,0 +1,65 @@ +# An implementation of Dartmouth BASIC (1964) +# + +import sys +sys.path.insert(0, "../..") + +if sys.version_info[0] >= 3: + raw_input = input + +import basiclex +import basparse +import basinterp + +# If a filename has been specified, we try to run it. +# If a runtime error occurs, we bail out and enter +# interactive mode below +if len(sys.argv) == 2: + data = open(sys.argv[1]).read() + prog = basparse.parse(data) + if not prog: + raise SystemExit + b = basinterp.BasicInterpreter(prog) + try: + b.run() + raise SystemExit + except RuntimeError: + pass + +else: + b = basinterp.BasicInterpreter({}) + +# Interactive mode. This incrementally adds/deletes statements +# from the program stored in the BasicInterpreter object. In +# addition, special commands 'NEW','LIST',and 'RUN' are added. +# Specifying a line number with no code deletes that line from +# the program. + +while 1: + try: + line = raw_input("[BASIC] ") + except EOFError: + raise SystemExit + if not line: + continue + line += "\n" + prog = basparse.parse(line) + if not prog: + continue + + keys = list(prog) + if keys[0] > 0: + b.add_statements(prog) + else: + stat = prog[keys[0]] + if stat[0] == 'RUN': + try: + b.run() + except RuntimeError: + pass + elif stat[0] == 'LIST': + b.list() + elif stat[0] == 'BLANK': + b.del_line(stat[1]) + elif stat[0] == 'NEW': + b.new() diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/basiclex.py b/components/script/dom/bindings/codegen/ply/example/BASIC/basiclex.py new file mode 100644 index 00000000000..4151f4c34fb --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/BASIC/basiclex.py @@ -0,0 +1,61 @@ +# An implementation of Dartmouth BASIC (1964) + +from ply import * + +keywords = ( + 'LET', 'READ', 'DATA', 'PRINT', 'GOTO', 'IF', 'THEN', 'FOR', 'NEXT', 'TO', 'STEP', + 'END', 'STOP', 'DEF', 'GOSUB', 'DIM', 'REM', 'RETURN', 'RUN', 'LIST', 'NEW', +) + +tokens = keywords + ( + 'EQUALS', 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'POWER', + 'LPAREN', 'RPAREN', 'LT', 'LE', 'GT', 'GE', 'NE', + 'COMMA', 'SEMI', 'INTEGER', 'FLOAT', 'STRING', + 'ID', 'NEWLINE' +) + +t_ignore = ' \t' + + +def t_REM(t): + r'REM .*' + return t + + +def t_ID(t): + r'[A-Z][A-Z0-9]*' + if t.value in keywords: + t.type = t.value + return t + +t_EQUALS = r'=' +t_PLUS = r'\+' +t_MINUS = r'-' +t_TIMES = r'\*' +t_POWER = r'\^' +t_DIVIDE = r'/' +t_LPAREN = r'\(' +t_RPAREN = r'\)' +t_LT = r'<' +t_LE = r'<=' +t_GT = r'>' +t_GE = r'>=' +t_NE = r'<>' +t_COMMA = r'\,' +t_SEMI = r';' +t_INTEGER = r'\d+' +t_FLOAT = r'((\d*\.\d+)(E[\+-]?\d+)?|([1-9]\d*E[\+-]?\d+))' +t_STRING = r'\".*?\"' + + +def t_NEWLINE(t): + r'\n' + t.lexer.lineno += 1 + return t + + +def t_error(t): + print("Illegal character %s" % t.value[0]) + t.lexer.skip(1) + +lex.lex(debug=0) diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/basiclog.py b/components/script/dom/bindings/codegen/ply/example/BASIC/basiclog.py new file mode 100644 index 00000000000..9dcc7feda69 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/BASIC/basiclog.py @@ -0,0 +1,73 @@ +# An implementation of Dartmouth BASIC (1964) +# + +import sys +sys.path.insert(0, "../..") + +if sys.version_info[0] >= 3: + raw_input = input + +import logging +logging.basicConfig( + level=logging.INFO, + filename="parselog.txt", + filemode="w" +) +log = logging.getLogger() + +import basiclex +import basparse +import basinterp + +# If a filename has been specified, we try to run it. +# If a runtime error occurs, we bail out and enter +# interactive mode below +if len(sys.argv) == 2: + data = open(sys.argv[1]).read() + prog = basparse.parse(data, debug=log) + if not prog: + raise SystemExit + b = basinterp.BasicInterpreter(prog) + try: + b.run() + raise SystemExit + except RuntimeError: + pass + +else: + b = basinterp.BasicInterpreter({}) + +# Interactive mode. This incrementally adds/deletes statements +# from the program stored in the BasicInterpreter object. In +# addition, special commands 'NEW','LIST',and 'RUN' are added. +# Specifying a line number with no code deletes that line from +# the program. + +while 1: + try: + line = raw_input("[BASIC] ") + except EOFError: + raise SystemExit + if not line: + continue + line += "\n" + prog = basparse.parse(line, debug=log) + if not prog: + continue + + keys = list(prog) + if keys[0] > 0: + b.add_statements(prog) + else: + stat = prog[keys[0]] + if stat[0] == 'RUN': + try: + b.run() + except RuntimeError: + pass + elif stat[0] == 'LIST': + b.list() + elif stat[0] == 'BLANK': + b.del_line(stat[1]) + elif stat[0] == 'NEW': + b.new() diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/basinterp.py b/components/script/dom/bindings/codegen/ply/example/BASIC/basinterp.py new file mode 100644 index 00000000000..67762c797bf --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/BASIC/basinterp.py @@ -0,0 +1,496 @@ +# This file provides the runtime support for running a basic program +# Assumes the program has been parsed using basparse.py + +import sys +import math +import random + + +class BasicInterpreter: + + # Initialize the interpreter. prog is a dictionary + # containing (line,statement) mappings + def __init__(self, prog): + self.prog = prog + + self.functions = { # Built-in function table + 'SIN': lambda z: math.sin(self.eval(z)), + 'COS': lambda z: math.cos(self.eval(z)), + 'TAN': lambda z: math.tan(self.eval(z)), + 'ATN': lambda z: math.atan(self.eval(z)), + 'EXP': lambda z: math.exp(self.eval(z)), + 'ABS': lambda z: abs(self.eval(z)), + 'LOG': lambda z: math.log(self.eval(z)), + 'SQR': lambda z: math.sqrt(self.eval(z)), + 'INT': lambda z: int(self.eval(z)), + 'RND': lambda z: random.random() + } + + # Collect all data statements + def collect_data(self): + self.data = [] + for lineno in self.stat: + if self.prog[lineno][0] == 'DATA': + self.data = self.data + self.prog[lineno][1] + self.dc = 0 # Initialize the data counter + + # Check for end statements + def check_end(self): + has_end = 0 + for lineno in self.stat: + if self.prog[lineno][0] == 'END' and not has_end: + has_end = lineno + if not has_end: + print("NO END INSTRUCTION") + self.error = 1 + return + if has_end != lineno: + print("END IS NOT LAST") + self.error = 1 + + # Check loops + def check_loops(self): + for pc in range(len(self.stat)): + lineno = self.stat[pc] + if self.prog[lineno][0] == 'FOR': + forinst = self.prog[lineno] + loopvar = forinst[1] + for i in range(pc + 1, len(self.stat)): + if self.prog[self.stat[i]][0] == 'NEXT': + nextvar = self.prog[self.stat[i]][1] + if nextvar != loopvar: + continue + self.loopend[pc] = i + break + else: + print("FOR WITHOUT NEXT AT LINE %s" % self.stat[pc]) + self.error = 1 + + # Evaluate an expression + def eval(self, expr): + etype = expr[0] + if etype == 'NUM': + return expr[1] + elif etype == 'GROUP': + return self.eval(expr[1]) + elif etype == 'UNARY': + if expr[1] == '-': + return -self.eval(expr[2]) + elif etype == 'BINOP': + if expr[1] == '+': + return self.eval(expr[2]) + self.eval(expr[3]) + elif expr[1] == '-': + return self.eval(expr[2]) - self.eval(expr[3]) + elif expr[1] == '*': + return self.eval(expr[2]) * self.eval(expr[3]) + elif expr[1] == '/': + return float(self.eval(expr[2])) / self.eval(expr[3]) + elif expr[1] == '^': + return abs(self.eval(expr[2]))**self.eval(expr[3]) + elif etype == 'VAR': + var, dim1, dim2 = expr[1] + if not dim1 and not dim2: + if var in self.vars: + return self.vars[var] + else: + print("UNDEFINED VARIABLE %s AT LINE %s" % + (var, self.stat[self.pc])) + raise RuntimeError + # May be a list lookup or a function evaluation + if dim1 and not dim2: + if var in self.functions: + # A function + return self.functions[var](dim1) + else: + # A list evaluation + if var in self.lists: + dim1val = self.eval(dim1) + if dim1val < 1 or dim1val > len(self.lists[var]): + print("LIST INDEX OUT OF BOUNDS AT LINE %s" % + self.stat[self.pc]) + raise RuntimeError + return self.lists[var][dim1val - 1] + if dim1 and dim2: + if var in self.tables: + dim1val = self.eval(dim1) + dim2val = self.eval(dim2) + if dim1val < 1 or dim1val > len(self.tables[var]) or dim2val < 1 or dim2val > len(self.tables[var][0]): + print("TABLE INDEX OUT OUT BOUNDS AT LINE %s" % + self.stat[self.pc]) + raise RuntimeError + return self.tables[var][dim1val - 1][dim2val - 1] + print("UNDEFINED VARIABLE %s AT LINE %s" % + (var, self.stat[self.pc])) + raise RuntimeError + + # Evaluate a relational expression + def releval(self, expr): + etype = expr[1] + lhs = self.eval(expr[2]) + rhs = self.eval(expr[3]) + if etype == '<': + if lhs < rhs: + return 1 + else: + return 0 + + elif etype == '<=': + if lhs <= rhs: + return 1 + else: + return 0 + + elif etype == '>': + if lhs > rhs: + return 1 + else: + return 0 + + elif etype == '>=': + if lhs >= rhs: + return 1 + else: + return 0 + + elif etype == '=': + if lhs == rhs: + return 1 + else: + return 0 + + elif etype == '<>': + if lhs != rhs: + return 1 + else: + return 0 + + # Assignment + def assign(self, target, value): + var, dim1, dim2 = target + if not dim1 and not dim2: + self.vars[var] = self.eval(value) + elif dim1 and not dim2: + # List assignment + dim1val = self.eval(dim1) + if not var in self.lists: + self.lists[var] = [0] * 10 + + if dim1val > len(self.lists[var]): + print ("DIMENSION TOO LARGE AT LINE %s" % self.stat[self.pc]) + raise RuntimeError + self.lists[var][dim1val - 1] = self.eval(value) + elif dim1 and dim2: + dim1val = self.eval(dim1) + dim2val = self.eval(dim2) + if not var in self.tables: + temp = [0] * 10 + v = [] + for i in range(10): + v.append(temp[:]) + self.tables[var] = v + # Variable already exists + if dim1val > len(self.tables[var]) or dim2val > len(self.tables[var][0]): + print("DIMENSION TOO LARGE AT LINE %s" % self.stat[self.pc]) + raise RuntimeError + self.tables[var][dim1val - 1][dim2val - 1] = self.eval(value) + + # Change the current line number + def goto(self, linenum): + if not linenum in self.prog: + print("UNDEFINED LINE NUMBER %d AT LINE %d" % + (linenum, self.stat[self.pc])) + raise RuntimeError + self.pc = self.stat.index(linenum) + + # Run it + def run(self): + self.vars = {} # All variables + self.lists = {} # List variables + self.tables = {} # Tables + self.loops = [] # Currently active loops + self.loopend = {} # Mapping saying where loops end + self.gosub = None # Gosub return point (if any) + self.error = 0 # Indicates program error + + self.stat = list(self.prog) # Ordered list of all line numbers + self.stat.sort() + self.pc = 0 # Current program counter + + # Processing prior to running + + self.collect_data() # Collect all of the data statements + self.check_end() + self.check_loops() + + if self.error: + raise RuntimeError + + while 1: + line = self.stat[self.pc] + instr = self.prog[line] + + op = instr[0] + + # END and STOP statements + if op == 'END' or op == 'STOP': + break # We're done + + # GOTO statement + elif op == 'GOTO': + newline = instr[1] + self.goto(newline) + continue + + # PRINT statement + elif op == 'PRINT': + plist = instr[1] + out = "" + for label, val in plist: + if out: + out += ' ' * (15 - (len(out) % 15)) + out += label + if val: + if label: + out += " " + eval = self.eval(val) + out += str(eval) + sys.stdout.write(out) + end = instr[2] + if not (end == ',' or end == ';'): + sys.stdout.write("\n") + if end == ',': + sys.stdout.write(" " * (15 - (len(out) % 15))) + if end == ';': + sys.stdout.write(" " * (3 - (len(out) % 3))) + + # LET statement + elif op == 'LET': + target = instr[1] + value = instr[2] + self.assign(target, value) + + # READ statement + elif op == 'READ': + for target in instr[1]: + if self.dc < len(self.data): + value = ('NUM', self.data[self.dc]) + self.assign(target, value) + self.dc += 1 + else: + # No more data. Program ends + return + elif op == 'IF': + relop = instr[1] + newline = instr[2] + if (self.releval(relop)): + self.goto(newline) + continue + + elif op == 'FOR': + loopvar = instr[1] + initval = instr[2] + finval = instr[3] + stepval = instr[4] + + # Check to see if this is a new loop + if not self.loops or self.loops[-1][0] != self.pc: + # Looks like a new loop. Make the initial assignment + newvalue = initval + self.assign((loopvar, None, None), initval) + if not stepval: + stepval = ('NUM', 1) + stepval = self.eval(stepval) # Evaluate step here + self.loops.append((self.pc, stepval)) + else: + # It's a repeat of the previous loop + # Update the value of the loop variable according to the + # step + stepval = ('NUM', self.loops[-1][1]) + newvalue = ( + 'BINOP', '+', ('VAR', (loopvar, None, None)), stepval) + + if self.loops[-1][1] < 0: + relop = '>=' + else: + relop = '<=' + if not self.releval(('RELOP', relop, newvalue, finval)): + # Loop is done. Jump to the NEXT + self.pc = self.loopend[self.pc] + self.loops.pop() + else: + self.assign((loopvar, None, None), newvalue) + + elif op == 'NEXT': + if not self.loops: + print("NEXT WITHOUT FOR AT LINE %s" % line) + return + + nextvar = instr[1] + self.pc = self.loops[-1][0] + loopinst = self.prog[self.stat[self.pc]] + forvar = loopinst[1] + if nextvar != forvar: + print("NEXT DOESN'T MATCH FOR AT LINE %s" % line) + return + continue + elif op == 'GOSUB': + newline = instr[1] + if self.gosub: + print("ALREADY IN A SUBROUTINE AT LINE %s" % line) + return + self.gosub = self.stat[self.pc] + self.goto(newline) + continue + + elif op == 'RETURN': + if not self.gosub: + print("RETURN WITHOUT A GOSUB AT LINE %s" % line) + return + self.goto(self.gosub) + self.gosub = None + + elif op == 'FUNC': + fname = instr[1] + pname = instr[2] + expr = instr[3] + + def eval_func(pvalue, name=pname, self=self, expr=expr): + self.assign((pname, None, None), pvalue) + return self.eval(expr) + self.functions[fname] = eval_func + + elif op == 'DIM': + for vname, x, y in instr[1]: + if y == 0: + # Single dimension variable + self.lists[vname] = [0] * x + else: + # Double dimension variable + temp = [0] * y + v = [] + for i in range(x): + v.append(temp[:]) + self.tables[vname] = v + + self.pc += 1 + + # Utility functions for program listing + def expr_str(self, expr): + etype = expr[0] + if etype == 'NUM': + return str(expr[1]) + elif etype == 'GROUP': + return "(%s)" % self.expr_str(expr[1]) + elif etype == 'UNARY': + if expr[1] == '-': + return "-" + str(expr[2]) + elif etype == 'BINOP': + return "%s %s %s" % (self.expr_str(expr[2]), expr[1], self.expr_str(expr[3])) + elif etype == 'VAR': + return self.var_str(expr[1]) + + def relexpr_str(self, expr): + return "%s %s %s" % (self.expr_str(expr[2]), expr[1], self.expr_str(expr[3])) + + def var_str(self, var): + varname, dim1, dim2 = var + if not dim1 and not dim2: + return varname + if dim1 and not dim2: + return "%s(%s)" % (varname, self.expr_str(dim1)) + return "%s(%s,%s)" % (varname, self.expr_str(dim1), self.expr_str(dim2)) + + # Create a program listing + def list(self): + stat = list(self.prog) # Ordered list of all line numbers + stat.sort() + for line in stat: + instr = self.prog[line] + op = instr[0] + if op in ['END', 'STOP', 'RETURN']: + print("%s %s" % (line, op)) + continue + elif op == 'REM': + print("%s %s" % (line, instr[1])) + elif op == 'PRINT': + _out = "%s %s " % (line, op) + first = 1 + for p in instr[1]: + if not first: + _out += ", " + if p[0] and p[1]: + _out += '"%s"%s' % (p[0], self.expr_str(p[1])) + elif p[1]: + _out += self.expr_str(p[1]) + else: + _out += '"%s"' % (p[0],) + first = 0 + if instr[2]: + _out += instr[2] + print(_out) + elif op == 'LET': + print("%s LET %s = %s" % + (line, self.var_str(instr[1]), self.expr_str(instr[2]))) + elif op == 'READ': + _out = "%s READ " % line + first = 1 + for r in instr[1]: + if not first: + _out += "," + _out += self.var_str(r) + first = 0 + print(_out) + elif op == 'IF': + print("%s IF %s THEN %d" % + (line, self.relexpr_str(instr[1]), instr[2])) + elif op == 'GOTO' or op == 'GOSUB': + print("%s %s %s" % (line, op, instr[1])) + elif op == 'FOR': + _out = "%s FOR %s = %s TO %s" % ( + line, instr[1], self.expr_str(instr[2]), self.expr_str(instr[3])) + if instr[4]: + _out += " STEP %s" % (self.expr_str(instr[4])) + print(_out) + elif op == 'NEXT': + print("%s NEXT %s" % (line, instr[1])) + elif op == 'FUNC': + print("%s DEF %s(%s) = %s" % + (line, instr[1], instr[2], self.expr_str(instr[3]))) + elif op == 'DIM': + _out = "%s DIM " % line + first = 1 + for vname, x, y in instr[1]: + if not first: + _out += "," + first = 0 + if y == 0: + _out += "%s(%d)" % (vname, x) + else: + _out += "%s(%d,%d)" % (vname, x, y) + + print(_out) + elif op == 'DATA': + _out = "%s DATA " % line + first = 1 + for v in instr[1]: + if not first: + _out += "," + first = 0 + _out += v + print(_out) + + # Erase the current program + def new(self): + self.prog = {} + + # Insert statements + def add_statements(self, prog): + for line, stat in prog.items(): + self.prog[line] = stat + + # Delete a statement + def del_line(self, lineno): + try: + del self.prog[lineno] + except KeyError: + pass diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/basparse.py b/components/script/dom/bindings/codegen/ply/example/BASIC/basparse.py new file mode 100644 index 00000000000..d610c7d9094 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/BASIC/basparse.py @@ -0,0 +1,474 @@ +# An implementation of Dartmouth BASIC (1964) +# + +from ply import * +import basiclex + +tokens = basiclex.tokens + +precedence = ( + ('left', 'PLUS', 'MINUS'), + ('left', 'TIMES', 'DIVIDE'), + ('left', 'POWER'), + ('right', 'UMINUS') +) + +# A BASIC program is a series of statements. We represent the program as a +# dictionary of tuples indexed by line number. + + +def p_program(p): + '''program : program statement + | statement''' + + if len(p) == 2 and p[1]: + p[0] = {} + line, stat = p[1] + p[0][line] = stat + elif len(p) == 3: + p[0] = p[1] + if not p[0]: + p[0] = {} + if p[2]: + line, stat = p[2] + p[0][line] = stat + +# This catch-all rule is used for any catastrophic errors. In this case, +# we simply return nothing + + +def p_program_error(p): + '''program : error''' + p[0] = None + p.parser.error = 1 + +# Format of all BASIC statements. + + +def p_statement(p): + '''statement : INTEGER command NEWLINE''' + if isinstance(p[2], str): + print("%s %s %s" % (p[2], "AT LINE", p[1])) + p[0] = None + p.parser.error = 1 + else: + lineno = int(p[1]) + p[0] = (lineno, p[2]) + +# Interactive statements. + + +def p_statement_interactive(p): + '''statement : RUN NEWLINE + | LIST NEWLINE + | NEW NEWLINE''' + p[0] = (0, (p[1], 0)) + +# Blank line number + + +def p_statement_blank(p): + '''statement : INTEGER NEWLINE''' + p[0] = (0, ('BLANK', int(p[1]))) + +# Error handling for malformed statements + + +def p_statement_bad(p): + '''statement : INTEGER error NEWLINE''' + print("MALFORMED STATEMENT AT LINE %s" % p[1]) + p[0] = None + p.parser.error = 1 + +# Blank line + + +def p_statement_newline(p): + '''statement : NEWLINE''' + p[0] = None + +# LET statement + + +def p_command_let(p): + '''command : LET variable EQUALS expr''' + p[0] = ('LET', p[2], p[4]) + + +def p_command_let_bad(p): + '''command : LET variable EQUALS error''' + p[0] = "BAD EXPRESSION IN LET" + +# READ statement + + +def p_command_read(p): + '''command : READ varlist''' + p[0] = ('READ', p[2]) + + +def p_command_read_bad(p): + '''command : READ error''' + p[0] = "MALFORMED VARIABLE LIST IN READ" + +# DATA statement + + +def p_command_data(p): + '''command : DATA numlist''' + p[0] = ('DATA', p[2]) + + +def p_command_data_bad(p): + '''command : DATA error''' + p[0] = "MALFORMED NUMBER LIST IN DATA" + +# PRINT statement + + +def p_command_print(p): + '''command : PRINT plist optend''' + p[0] = ('PRINT', p[2], p[3]) + + +def p_command_print_bad(p): + '''command : PRINT error''' + p[0] = "MALFORMED PRINT STATEMENT" + +# Optional ending on PRINT. Either a comma (,) or semicolon (;) + + +def p_optend(p): + '''optend : COMMA + | SEMI + |''' + if len(p) == 2: + p[0] = p[1] + else: + p[0] = None + +# PRINT statement with no arguments + + +def p_command_print_empty(p): + '''command : PRINT''' + p[0] = ('PRINT', [], None) + +# GOTO statement + + +def p_command_goto(p): + '''command : GOTO INTEGER''' + p[0] = ('GOTO', int(p[2])) + + +def p_command_goto_bad(p): + '''command : GOTO error''' + p[0] = "INVALID LINE NUMBER IN GOTO" + +# IF-THEN statement + + +def p_command_if(p): + '''command : IF relexpr THEN INTEGER''' + p[0] = ('IF', p[2], int(p[4])) + + +def p_command_if_bad(p): + '''command : IF error THEN INTEGER''' + p[0] = "BAD RELATIONAL EXPRESSION" + + +def p_command_if_bad2(p): + '''command : IF relexpr THEN error''' + p[0] = "INVALID LINE NUMBER IN THEN" + +# FOR statement + + +def p_command_for(p): + '''command : FOR ID EQUALS expr TO expr optstep''' + p[0] = ('FOR', p[2], p[4], p[6], p[7]) + + +def p_command_for_bad_initial(p): + '''command : FOR ID EQUALS error TO expr optstep''' + p[0] = "BAD INITIAL VALUE IN FOR STATEMENT" + + +def p_command_for_bad_final(p): + '''command : FOR ID EQUALS expr TO error optstep''' + p[0] = "BAD FINAL VALUE IN FOR STATEMENT" + + +def p_command_for_bad_step(p): + '''command : FOR ID EQUALS expr TO expr STEP error''' + p[0] = "MALFORMED STEP IN FOR STATEMENT" + +# Optional STEP qualifier on FOR statement + + +def p_optstep(p): + '''optstep : STEP expr + | empty''' + if len(p) == 3: + p[0] = p[2] + else: + p[0] = None + +# NEXT statement + + +def p_command_next(p): + '''command : NEXT ID''' + + p[0] = ('NEXT', p[2]) + + +def p_command_next_bad(p): + '''command : NEXT error''' + p[0] = "MALFORMED NEXT" + +# END statement + + +def p_command_end(p): + '''command : END''' + p[0] = ('END',) + +# REM statement + + +def p_command_rem(p): + '''command : REM''' + p[0] = ('REM', p[1]) + +# STOP statement + + +def p_command_stop(p): + '''command : STOP''' + p[0] = ('STOP',) + +# DEF statement + + +def p_command_def(p): + '''command : DEF ID LPAREN ID RPAREN EQUALS expr''' + p[0] = ('FUNC', p[2], p[4], p[7]) + + +def p_command_def_bad_rhs(p): + '''command : DEF ID LPAREN ID RPAREN EQUALS error''' + p[0] = "BAD EXPRESSION IN DEF STATEMENT" + + +def p_command_def_bad_arg(p): + '''command : DEF ID LPAREN error RPAREN EQUALS expr''' + p[0] = "BAD ARGUMENT IN DEF STATEMENT" + +# GOSUB statement + + +def p_command_gosub(p): + '''command : GOSUB INTEGER''' + p[0] = ('GOSUB', int(p[2])) + + +def p_command_gosub_bad(p): + '''command : GOSUB error''' + p[0] = "INVALID LINE NUMBER IN GOSUB" + +# RETURN statement + + +def p_command_return(p): + '''command : RETURN''' + p[0] = ('RETURN',) + +# DIM statement + + +def p_command_dim(p): + '''command : DIM dimlist''' + p[0] = ('DIM', p[2]) + + +def p_command_dim_bad(p): + '''command : DIM error''' + p[0] = "MALFORMED VARIABLE LIST IN DIM" + +# List of variables supplied to DIM statement + + +def p_dimlist(p): + '''dimlist : dimlist COMMA dimitem + | dimitem''' + if len(p) == 4: + p[0] = p[1] + p[0].append(p[3]) + else: + p[0] = [p[1]] + +# DIM items + + +def p_dimitem_single(p): + '''dimitem : ID LPAREN INTEGER RPAREN''' + p[0] = (p[1], eval(p[3]), 0) + + +def p_dimitem_double(p): + '''dimitem : ID LPAREN INTEGER COMMA INTEGER RPAREN''' + p[0] = (p[1], eval(p[3]), eval(p[5])) + +# Arithmetic expressions + + +def p_expr_binary(p): + '''expr : expr PLUS expr + | expr MINUS expr + | expr TIMES expr + | expr DIVIDE expr + | expr POWER expr''' + + p[0] = ('BINOP', p[2], p[1], p[3]) + + +def p_expr_number(p): + '''expr : INTEGER + | FLOAT''' + p[0] = ('NUM', eval(p[1])) + + +def p_expr_variable(p): + '''expr : variable''' + p[0] = ('VAR', p[1]) + + +def p_expr_group(p): + '''expr : LPAREN expr RPAREN''' + p[0] = ('GROUP', p[2]) + + +def p_expr_unary(p): + '''expr : MINUS expr %prec UMINUS''' + p[0] = ('UNARY', '-', p[2]) + +# Relational expressions + + +def p_relexpr(p): + '''relexpr : expr LT expr + | expr LE expr + | expr GT expr + | expr GE expr + | expr EQUALS expr + | expr NE expr''' + p[0] = ('RELOP', p[2], p[1], p[3]) + +# Variables + + +def p_variable(p): + '''variable : ID + | ID LPAREN expr RPAREN + | ID LPAREN expr COMMA expr RPAREN''' + if len(p) == 2: + p[0] = (p[1], None, None) + elif len(p) == 5: + p[0] = (p[1], p[3], None) + else: + p[0] = (p[1], p[3], p[5]) + +# Builds a list of variable targets as a Python list + + +def p_varlist(p): + '''varlist : varlist COMMA variable + | variable''' + if len(p) > 2: + p[0] = p[1] + p[0].append(p[3]) + else: + p[0] = [p[1]] + + +# Builds a list of numbers as a Python list + +def p_numlist(p): + '''numlist : numlist COMMA number + | number''' + + if len(p) > 2: + p[0] = p[1] + p[0].append(p[3]) + else: + p[0] = [p[1]] + +# A number. May be an integer or a float + + +def p_number(p): + '''number : INTEGER + | FLOAT''' + p[0] = eval(p[1]) + +# A signed number. + + +def p_number_signed(p): + '''number : MINUS INTEGER + | MINUS FLOAT''' + p[0] = eval("-" + p[2]) + +# List of targets for a print statement +# Returns a list of tuples (label,expr) + + +def p_plist(p): + '''plist : plist COMMA pitem + | pitem''' + if len(p) > 3: + p[0] = p[1] + p[0].append(p[3]) + else: + p[0] = [p[1]] + + +def p_item_string(p): + '''pitem : STRING''' + p[0] = (p[1][1:-1], None) + + +def p_item_string_expr(p): + '''pitem : STRING expr''' + p[0] = (p[1][1:-1], p[2]) + + +def p_item_expr(p): + '''pitem : expr''' + p[0] = ("", p[1]) + +# Empty + + +def p_empty(p): + '''empty : ''' + +# Catastrophic error handler + + +def p_error(p): + if not p: + print("SYNTAX ERROR AT EOF") + +bparser = yacc.yacc() + + +def parse(data, debug=0): + bparser.error = 0 + p = bparser.parse(data, debug=debug) + if bparser.error: + return None + return p diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/dim.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/dim.bas new file mode 100644 index 00000000000..87bd95b32ec --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/BASIC/dim.bas @@ -0,0 +1,14 @@ +5 DIM A(50,15) +10 FOR I = 1 TO 50 +20 FOR J = 1 TO 15 +30 LET A(I,J) = I + J +35 REM PRINT I,J, A(I,J) +40 NEXT J +50 NEXT I +100 FOR I = 1 TO 50 +110 FOR J = 1 TO 15 +120 PRINT A(I,J), +130 NEXT J +140 PRINT +150 NEXT I +999 END diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/func.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/func.bas new file mode 100644 index 00000000000..447ee16a927 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/BASIC/func.bas @@ -0,0 +1,5 @@ +10 DEF FDX(X) = 2*X +20 FOR I = 0 TO 100 +30 PRINT FDX(I) +40 NEXT I +50 END diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/gcd.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/gcd.bas new file mode 100644 index 00000000000..d0b77460894 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/BASIC/gcd.bas @@ -0,0 +1,22 @@ +10 PRINT "A","B","C","GCD" +20 READ A,B,C +30 LET X = A +40 LET Y = B +50 GOSUB 200 +60 LET X = G +70 LET Y = C +80 GOSUB 200 +90 PRINT A, B, C, G +100 GOTO 20 +110 DATA 60, 90, 120 +120 DATA 38456, 64872, 98765 +130 DATA 32, 384, 72 +200 LET Q = INT(X/Y) +210 LET R = X - Q*Y +220 IF R = 0 THEN 300 +230 LET X = Y +240 LET Y = R +250 GOTO 200 +300 LET G = Y +310 RETURN +999 END diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/gosub.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/gosub.bas new file mode 100644 index 00000000000..99737b16f15 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/BASIC/gosub.bas @@ -0,0 +1,13 @@ +100 LET X = 3 +110 GOSUB 400 +120 PRINT U, V, W +200 LET X = 5 +210 GOSUB 400 +220 LET Z = U + 2*V + 3*W +230 PRINT Z +240 GOTO 999 +400 LET U = X*X +410 LET V = X*X*X +420 LET W = X*X*X*X + X*X*X + X*X + X +430 RETURN +999 END diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/hello.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/hello.bas new file mode 100644 index 00000000000..cc6f0b0b511 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/BASIC/hello.bas @@ -0,0 +1,4 @@ +5 REM HELLO WORLD PROGAM +10 PRINT "HELLO WORLD" +99 END + diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/linear.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/linear.bas new file mode 100644 index 00000000000..56c08220b3e --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/BASIC/linear.bas @@ -0,0 +1,17 @@ +1 REM ::: SOLVE A SYSTEM OF LINEAR EQUATIONS +2 REM ::: A1*X1 + A2*X2 = B1 +3 REM ::: A3*X1 + A4*X2 = B2 +4 REM -------------------------------------- +10 READ A1, A2, A3, A4 +15 LET D = A1 * A4 - A3 * A2 +20 IF D = 0 THEN 65 +30 READ B1, B2 +37 LET X1 = (B1*A4 - B2*A2) / D +42 LET X2 = (A1*B2 - A3*B1) / D +55 PRINT X1, X2 +60 GOTO 30 +65 PRINT "NO UNIQUE SOLUTION" +70 DATA 1, 2, 4 +80 DATA 2, -7, 5 +85 DATA 1, 3, 4, -7 +90 END diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/maxsin.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/maxsin.bas new file mode 100644 index 00000000000..b96901530c2 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/BASIC/maxsin.bas @@ -0,0 +1,12 @@ +5 PRINT "X VALUE", "SINE", "RESOLUTION" +10 READ D +20 LET M = -1 +30 FOR X = 0 TO 3 STEP D +40 IF SIN(X) <= M THEN 80 +50 LET X0 = X +60 LET M = SIN(X) +80 NEXT X +85 PRINT X0, M, D +90 GOTO 10 +100 DATA .1, .01, .001 +110 END diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/powers.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/powers.bas new file mode 100644 index 00000000000..a454dc3e211 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/BASIC/powers.bas @@ -0,0 +1,13 @@ +5 PRINT "THIS PROGRAM COMPUTES AND PRINTS THE NTH POWERS" +6 PRINT "OF THE NUMBERS LESS THAN OR EQUAL TO N FOR VARIOUS" +7 PRINT "N FROM 1 THROUGH 7" +8 PRINT +10 FOR N = 1 TO 7 +15 PRINT "N = "N +20 FOR I = 1 TO N +30 PRINT I^N, +40 NEXT I +50 PRINT +60 PRINT +70 NEXT N +80 END diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/rand.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/rand.bas new file mode 100644 index 00000000000..4ff7a146702 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/BASIC/rand.bas @@ -0,0 +1,4 @@ +10 FOR I = 1 TO 20 +20 PRINT INT(10*RND(0)) +30 NEXT I +40 END diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/sales.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/sales.bas new file mode 100644 index 00000000000..a39aefb762c --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/BASIC/sales.bas @@ -0,0 +1,20 @@ +10 FOR I = 1 TO 3 +20 READ P(I) +30 NEXT I +40 FOR I = 1 TO 3 +50 FOR J = 1 TO 5 +60 READ S(I,J) +70 NEXT J +80 NEXT I +90 FOR J = 1 TO 5 +100 LET S = 0 +110 FOR I = 1 TO 3 +120 LET S = S + P(I) * S(I,J) +130 NEXT I +140 PRINT "TOTAL SALES FOR SALESMAN"J, "$"S +150 NEXT J +200 DATA 1.25, 4.30, 2.50 +210 DATA 40, 20, 37, 29, 42 +220 DATA 10, 16, 3, 21, 8 +230 DATA 35, 47, 29, 16, 33 +300 END diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/sears.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/sears.bas new file mode 100644 index 00000000000..5ced3974e24 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/BASIC/sears.bas @@ -0,0 +1,18 @@ +1 REM :: THIS PROGRAM COMPUTES HOW MANY TIMES YOU HAVE TO FOLD +2 REM :: A PIECE OF PAPER SO THAT IT IS TALLER THAN THE +3 REM :: SEARS TOWER. +4 REM :: S = HEIGHT OF TOWER (METERS) +5 REM :: T = THICKNESS OF PAPER (MILLIMETERS) +10 LET S = 442 +20 LET T = 0.1 +30 REM CONVERT T TO METERS +40 LET T = T * .001 +50 LET F = 1 +60 LET H = T +100 IF H > S THEN 200 +120 LET H = 2 * H +125 LET F = F + 1 +130 GOTO 100 +200 PRINT "NUMBER OF FOLDS ="F +220 PRINT "FINAL HEIGHT ="H +999 END diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/sqrt1.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/sqrt1.bas new file mode 100644 index 00000000000..6673a91524f --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/BASIC/sqrt1.bas @@ -0,0 +1,5 @@ +10 LET X = 0 +20 LET X = X + 1 +30 PRINT X, SQR(X) +40 IF X < 100 THEN 20 +50 END diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/sqrt2.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/sqrt2.bas new file mode 100644 index 00000000000..862d85ef269 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/BASIC/sqrt2.bas @@ -0,0 +1,4 @@ +10 FOR X = 1 TO 100 +20 PRINT X, SQR(X) +30 NEXT X +40 END diff --git a/components/script/dom/bindings/codegen/ply/example/GardenSnake/GardenSnake.py b/components/script/dom/bindings/codegen/ply/example/GardenSnake/GardenSnake.py new file mode 100644 index 00000000000..8b493b40dca --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/GardenSnake/GardenSnake.py @@ -0,0 +1,777 @@ +# GardenSnake - a parser generator demonstration program +# +# This implements a modified version of a subset of Python: +# - only 'def', 'return' and 'if' statements +# - 'if' only has 'then' clause (no elif nor else) +# - single-quoted strings only, content in raw format +# - numbers are decimal.Decimal instances (not integers or floats) +# - no print statment; use the built-in 'print' function +# - only < > == + - / * implemented (and unary + -) +# - assignment and tuple assignment work +# - no generators of any sort +# - no ... well, no quite a lot + +# Why? I'm thinking about a new indentation-based configuration +# language for a project and wanted to figure out how to do it. Once +# I got that working I needed a way to test it out. My original AST +# was dumb so I decided to target Python's AST and compile it into +# Python code. Plus, it's pretty cool that it only took a day or so +# from sitting down with Ply to having working code. + +# This uses David Beazley's Ply from http://www.dabeaz.com/ply/ + +# This work is hereby released into the Public Domain. To view a copy of +# the public domain dedication, visit +# http://creativecommons.org/licenses/publicdomain/ or send a letter to +# Creative Commons, 543 Howard Street, 5th Floor, San Francisco, +# California, 94105, USA. +# +# Portions of this work are derived from Python's Grammar definition +# and may be covered under the Python copyright and license +# +# Andrew Dalke / Dalke Scientific Software, LLC +# 30 August 2006 / Cape Town, South Africa + +# Changelog: +# 30 August - added link to CC license; removed the "swapcase" encoding + +# Modifications for inclusion in PLY distribution +import sys +sys.path.insert(0, "../..") +from ply import * + +##### Lexer ###### +#import lex +import decimal + +tokens = ( + 'DEF', + 'IF', + 'NAME', + 'NUMBER', # Python decimals + 'STRING', # single quoted strings only; syntax of raw strings + 'LPAR', + 'RPAR', + 'COLON', + 'EQ', + 'ASSIGN', + 'LT', + 'GT', + 'PLUS', + 'MINUS', + 'MULT', + 'DIV', + 'RETURN', + 'WS', + 'NEWLINE', + 'COMMA', + 'SEMICOLON', + 'INDENT', + 'DEDENT', + 'ENDMARKER', +) + +#t_NUMBER = r'\d+' +# taken from decmial.py but without the leading sign + + +def t_NUMBER(t): + r"""(\d+(\.\d*)?|\.\d+)([eE][-+]? \d+)?""" + t.value = decimal.Decimal(t.value) + return t + + +def t_STRING(t): + r"'([^\\']+|\\'|\\\\)*'" # I think this is right ... + t.value = t.value[1:-1].decode("string-escape") # .swapcase() # for fun + return t + +t_COLON = r':' +t_EQ = r'==' +t_ASSIGN = r'=' +t_LT = r'<' +t_GT = r'>' +t_PLUS = r'\+' +t_MINUS = r'-' +t_MULT = r'\*' +t_DIV = r'/' +t_COMMA = r',' +t_SEMICOLON = r';' + +# Ply nicely documented how to do this. + +RESERVED = { + "def": "DEF", + "if": "IF", + "return": "RETURN", +} + + +def t_NAME(t): + r'[a-zA-Z_][a-zA-Z0-9_]*' + t.type = RESERVED.get(t.value, "NAME") + return t + +# Putting this before t_WS let it consume lines with only comments in +# them so the latter code never sees the WS part. Not consuming the +# newline. Needed for "if 1: #comment" + + +def t_comment(t): + r"[ ]*\043[^\n]*" # \043 is '#' + pass + + +# Whitespace +def t_WS(t): + r' [ ]+ ' + if t.lexer.at_line_start and t.lexer.paren_count == 0: + return t + +# Don't generate newline tokens when inside of parenthesis, eg +# a = (1, +# 2, 3) + + +def t_newline(t): + r'\n+' + t.lexer.lineno += len(t.value) + t.type = "NEWLINE" + if t.lexer.paren_count == 0: + return t + + +def t_LPAR(t): + r'\(' + t.lexer.paren_count += 1 + return t + + +def t_RPAR(t): + r'\)' + # check for underflow? should be the job of the parser + t.lexer.paren_count -= 1 + return t + + +def t_error(t): + raise SyntaxError("Unknown symbol %r" % (t.value[0],)) + print "Skipping", repr(t.value[0]) + t.lexer.skip(1) + +# I implemented INDENT / DEDENT generation as a post-processing filter + +# The original lex token stream contains WS and NEWLINE characters. +# WS will only occur before any other tokens on a line. + +# I have three filters. One tags tokens by adding two attributes. +# "must_indent" is True if the token must be indented from the +# previous code. The other is "at_line_start" which is True for WS +# and the first non-WS/non-NEWLINE on a line. It flags the check so +# see if the new line has changed indication level. + +# Python's syntax has three INDENT states +# 0) no colon hence no need to indent +# 1) "if 1: go()" - simple statements have a COLON but no need for an indent +# 2) "if 1:\n go()" - complex statements have a COLON NEWLINE and must indent +NO_INDENT = 0 +MAY_INDENT = 1 +MUST_INDENT = 2 + +# only care about whitespace at the start of a line + + +def track_tokens_filter(lexer, tokens): + lexer.at_line_start = at_line_start = True + indent = NO_INDENT + saw_colon = False + for token in tokens: + token.at_line_start = at_line_start + + if token.type == "COLON": + at_line_start = False + indent = MAY_INDENT + token.must_indent = False + + elif token.type == "NEWLINE": + at_line_start = True + if indent == MAY_INDENT: + indent = MUST_INDENT + token.must_indent = False + + elif token.type == "WS": + assert token.at_line_start == True + at_line_start = True + token.must_indent = False + + else: + # A real token; only indent after COLON NEWLINE + if indent == MUST_INDENT: + token.must_indent = True + else: + token.must_indent = False + at_line_start = False + indent = NO_INDENT + + yield token + lexer.at_line_start = at_line_start + + +def _new_token(type, lineno): + tok = lex.LexToken() + tok.type = type + tok.value = None + tok.lineno = lineno + return tok + +# Synthesize a DEDENT tag + + +def DEDENT(lineno): + return _new_token("DEDENT", lineno) + +# Synthesize an INDENT tag + + +def INDENT(lineno): + return _new_token("INDENT", lineno) + + +# Track the indentation level and emit the right INDENT / DEDENT events. +def indentation_filter(tokens): + # A stack of indentation levels; will never pop item 0 + levels = [0] + token = None + depth = 0 + prev_was_ws = False + for token in tokens: + # if 1: + # print "Process", token, + # if token.at_line_start: + # print "at_line_start", + # if token.must_indent: + # print "must_indent", + # print + + # WS only occurs at the start of the line + # There may be WS followed by NEWLINE so + # only track the depth here. Don't indent/dedent + # until there's something real. + if token.type == "WS": + assert depth == 0 + depth = len(token.value) + prev_was_ws = True + # WS tokens are never passed to the parser + continue + + if token.type == "NEWLINE": + depth = 0 + if prev_was_ws or token.at_line_start: + # ignore blank lines + continue + # pass the other cases on through + yield token + continue + + # then it must be a real token (not WS, not NEWLINE) + # which can affect the indentation level + + prev_was_ws = False + if token.must_indent: + # The current depth must be larger than the previous level + if not (depth > levels[-1]): + raise IndentationError("expected an indented block") + + levels.append(depth) + yield INDENT(token.lineno) + + elif token.at_line_start: + # Must be on the same level or one of the previous levels + if depth == levels[-1]: + # At the same level + pass + elif depth > levels[-1]: + raise IndentationError( + "indentation increase but not in new block") + else: + # Back up; but only if it matches a previous level + try: + i = levels.index(depth) + except ValueError: + raise IndentationError("inconsistent indentation") + for _ in range(i + 1, len(levels)): + yield DEDENT(token.lineno) + levels.pop() + + yield token + + ### Finished processing ### + + # Must dedent any remaining levels + if len(levels) > 1: + assert token is not None + for _ in range(1, len(levels)): + yield DEDENT(token.lineno) + + +# The top-level filter adds an ENDMARKER, if requested. +# Python's grammar uses it. +def filter(lexer, add_endmarker=True): + token = None + tokens = iter(lexer.token, None) + tokens = track_tokens_filter(lexer, tokens) + for token in indentation_filter(tokens): + yield token + + if add_endmarker: + lineno = 1 + if token is not None: + lineno = token.lineno + yield _new_token("ENDMARKER", lineno) + +# Combine Ply and my filters into a new lexer + + +class IndentLexer(object): + + def __init__(self, debug=0, optimize=0, lextab='lextab', reflags=0): + self.lexer = lex.lex(debug=debug, optimize=optimize, + lextab=lextab, reflags=reflags) + self.token_stream = None + + def input(self, s, add_endmarker=True): + self.lexer.paren_count = 0 + self.lexer.input(s) + self.token_stream = filter(self.lexer, add_endmarker) + + def token(self): + try: + return self.token_stream.next() + except StopIteration: + return None + +########## Parser (tokens -> AST) ###### + +# also part of Ply +#import yacc + +# I use the Python AST +from compiler import ast + +# Helper function + + +def Assign(left, right): + names = [] + if isinstance(left, ast.Name): + # Single assignment on left + return ast.Assign([ast.AssName(left.name, 'OP_ASSIGN')], right) + elif isinstance(left, ast.Tuple): + # List of things - make sure they are Name nodes + names = [] + for child in left.getChildren(): + if not isinstance(child, ast.Name): + raise SyntaxError("that assignment not supported") + names.append(child.name) + ass_list = [ast.AssName(name, 'OP_ASSIGN') for name in names] + return ast.Assign([ast.AssTuple(ass_list)], right) + else: + raise SyntaxError("Can't do that yet") + + +# The grammar comments come from Python's Grammar/Grammar file + +# NB: compound_stmt in single_input is followed by extra NEWLINE! +# file_input: (NEWLINE | stmt)* ENDMARKER +def p_file_input_end(p): + """file_input_end : file_input ENDMARKER""" + p[0] = ast.Stmt(p[1]) + + +def p_file_input(p): + """file_input : file_input NEWLINE + | file_input stmt + | NEWLINE + | stmt""" + if isinstance(p[len(p) - 1], basestring): + if len(p) == 3: + p[0] = p[1] + else: + p[0] = [] # p == 2 --> only a blank line + else: + if len(p) == 3: + p[0] = p[1] + p[2] + else: + p[0] = p[1] + + +# funcdef: [decorators] 'def' NAME parameters ':' suite +# ignoring decorators +def p_funcdef(p): + "funcdef : DEF NAME parameters COLON suite" + p[0] = ast.Function(None, p[2], tuple(p[3]), (), 0, None, p[5]) + +# parameters: '(' [varargslist] ')' + + +def p_parameters(p): + """parameters : LPAR RPAR + | LPAR varargslist RPAR""" + if len(p) == 3: + p[0] = [] + else: + p[0] = p[2] + + +# varargslist: (fpdef ['=' test] ',')* ('*' NAME [',' '**' NAME] | '**' NAME) | +# highly simplified +def p_varargslist(p): + """varargslist : varargslist COMMA NAME + | NAME""" + if len(p) == 4: + p[0] = p[1] + p[3] + else: + p[0] = [p[1]] + +# stmt: simple_stmt | compound_stmt + + +def p_stmt_simple(p): + """stmt : simple_stmt""" + # simple_stmt is a list + p[0] = p[1] + + +def p_stmt_compound(p): + """stmt : compound_stmt""" + p[0] = [p[1]] + +# simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE + + +def p_simple_stmt(p): + """simple_stmt : small_stmts NEWLINE + | small_stmts SEMICOLON NEWLINE""" + p[0] = p[1] + + +def p_small_stmts(p): + """small_stmts : small_stmts SEMICOLON small_stmt + | small_stmt""" + if len(p) == 4: + p[0] = p[1] + [p[3]] + else: + p[0] = [p[1]] + +# small_stmt: expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt | +# import_stmt | global_stmt | exec_stmt | assert_stmt + + +def p_small_stmt(p): + """small_stmt : flow_stmt + | expr_stmt""" + p[0] = p[1] + +# expr_stmt: testlist (augassign (yield_expr|testlist) | +# ('=' (yield_expr|testlist))*) +# augassign: ('+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' | +# '<<=' | '>>=' | '**=' | '//=') + + +def p_expr_stmt(p): + """expr_stmt : testlist ASSIGN testlist + | testlist """ + if len(p) == 2: + # a list of expressions + p[0] = ast.Discard(p[1]) + else: + p[0] = Assign(p[1], p[3]) + + +def p_flow_stmt(p): + "flow_stmt : return_stmt" + p[0] = p[1] + +# return_stmt: 'return' [testlist] + + +def p_return_stmt(p): + "return_stmt : RETURN testlist" + p[0] = ast.Return(p[2]) + + +def p_compound_stmt(p): + """compound_stmt : if_stmt + | funcdef""" + p[0] = p[1] + + +def p_if_stmt(p): + 'if_stmt : IF test COLON suite' + p[0] = ast.If([(p[2], p[4])], None) + + +def p_suite(p): + """suite : simple_stmt + | NEWLINE INDENT stmts DEDENT""" + if len(p) == 2: + p[0] = ast.Stmt(p[1]) + else: + p[0] = ast.Stmt(p[3]) + + +def p_stmts(p): + """stmts : stmts stmt + | stmt""" + if len(p) == 3: + p[0] = p[1] + p[2] + else: + p[0] = p[1] + +# No using Python's approach because Ply supports precedence + +# comparison: expr (comp_op expr)* +# arith_expr: term (('+'|'-') term)* +# term: factor (('*'|'/'|'%'|'//') factor)* +# factor: ('+'|'-'|'~') factor | power +# comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' + + +def make_lt_compare((left, right)): + return ast.Compare(left, [('<', right), ]) + + +def make_gt_compare((left, right)): + return ast.Compare(left, [('>', right), ]) + + +def make_eq_compare((left, right)): + return ast.Compare(left, [('==', right), ]) + + +binary_ops = { + "+": ast.Add, + "-": ast.Sub, + "*": ast.Mul, + "/": ast.Div, + "<": make_lt_compare, + ">": make_gt_compare, + "==": make_eq_compare, +} +unary_ops = { + "+": ast.UnaryAdd, + "-": ast.UnarySub, +} +precedence = ( + ("left", "EQ", "GT", "LT"), + ("left", "PLUS", "MINUS"), + ("left", "MULT", "DIV"), +) + + +def p_comparison(p): + """comparison : comparison PLUS comparison + | comparison MINUS comparison + | comparison MULT comparison + | comparison DIV comparison + | comparison LT comparison + | comparison EQ comparison + | comparison GT comparison + | PLUS comparison + | MINUS comparison + | power""" + if len(p) == 4: + p[0] = binary_ops[p[2]]((p[1], p[3])) + elif len(p) == 3: + p[0] = unary_ops[p[1]](p[2]) + else: + p[0] = p[1] + +# power: atom trailer* ['**' factor] +# trailers enables function calls. I only allow one level of calls +# so this is 'trailer' + + +def p_power(p): + """power : atom + | atom trailer""" + if len(p) == 2: + p[0] = p[1] + else: + if p[2][0] == "CALL": + p[0] = ast.CallFunc(p[1], p[2][1], None, None) + else: + raise AssertionError("not implemented") + + +def p_atom_name(p): + """atom : NAME""" + p[0] = ast.Name(p[1]) + + +def p_atom_number(p): + """atom : NUMBER + | STRING""" + p[0] = ast.Const(p[1]) + + +def p_atom_tuple(p): + """atom : LPAR testlist RPAR""" + p[0] = p[2] + +# trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME + + +def p_trailer(p): + "trailer : LPAR arglist RPAR" + p[0] = ("CALL", p[2]) + +# testlist: test (',' test)* [','] +# Contains shift/reduce error + + +def p_testlist(p): + """testlist : testlist_multi COMMA + | testlist_multi """ + if len(p) == 2: + p[0] = p[1] + else: + # May need to promote singleton to tuple + if isinstance(p[1], list): + p[0] = p[1] + else: + p[0] = [p[1]] + # Convert into a tuple? + if isinstance(p[0], list): + p[0] = ast.Tuple(p[0]) + + +def p_testlist_multi(p): + """testlist_multi : testlist_multi COMMA test + | test""" + if len(p) == 2: + # singleton + p[0] = p[1] + else: + if isinstance(p[1], list): + p[0] = p[1] + [p[3]] + else: + # singleton -> tuple + p[0] = [p[1], p[3]] + + +# test: or_test ['if' or_test 'else' test] | lambdef +# as I don't support 'and', 'or', and 'not' this works down to 'comparison' +def p_test(p): + "test : comparison" + p[0] = p[1] + + +# arglist: (argument ',')* (argument [',']| '*' test [',' '**' test] | '**' test) +# XXX INCOMPLETE: this doesn't allow the trailing comma +def p_arglist(p): + """arglist : arglist COMMA argument + | argument""" + if len(p) == 4: + p[0] = p[1] + [p[3]] + else: + p[0] = [p[1]] + +# argument: test [gen_for] | test '=' test # Really [keyword '='] test + + +def p_argument(p): + "argument : test" + p[0] = p[1] + + +def p_error(p): + # print "Error!", repr(p) + raise SyntaxError(p) + + +class GardenSnakeParser(object): + + def __init__(self, lexer=None): + if lexer is None: + lexer = IndentLexer() + self.lexer = lexer + self.parser = yacc.yacc(start="file_input_end") + + def parse(self, code): + self.lexer.input(code) + result = self.parser.parse(lexer=self.lexer) + return ast.Module(None, result) + + +###### Code generation ###### + +from compiler import misc, syntax, pycodegen + + +class GardenSnakeCompiler(object): + + def __init__(self): + self.parser = GardenSnakeParser() + + def compile(self, code, filename=""): + tree = self.parser.parse(code) + # print tree + misc.set_filename(filename, tree) + syntax.check(tree) + gen = pycodegen.ModuleCodeGenerator(tree) + code = gen.getCode() + return code + +####### Test code ####### + +compile = GardenSnakeCompiler().compile + +code = r""" + +print('LET\'S TRY THIS \\OUT') + +#Comment here +def x(a): + print('called with',a) + if a == 1: + return 2 + if a*2 > 10: return 999 / 4 + # Another comment here + + return a+2*3 + +ints = (1, 2, + 3, 4, +5) +print('mutiline-expression', ints) + +t = 4+1/3*2+6*(9-5+1) +print('predence test; should be 34+2/3:', t, t==(34+2/3)) + +print('numbers', 1,2,3,4,5) +if 1: + 8 + a=9 + print(x(a)) + +print(x(1)) +print(x(2)) +print(x(8),'3') +print('this is decimal', 1/5) +print('BIG DECIMAL', 1.234567891234567e12345) + +""" + +# Set up the GardenSnake run-time environment + + +def print_(*args): + print "-->", " ".join(map(str, args)) + +globals()["print"] = print_ + +compiled_code = compile(code) + +exec compiled_code in globals() +print "Done" diff --git a/components/script/dom/bindings/codegen/ply/example/GardenSnake/README b/components/script/dom/bindings/codegen/ply/example/GardenSnake/README new file mode 100644 index 00000000000..4d8be2db050 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/GardenSnake/README @@ -0,0 +1,5 @@ +This example is Andrew Dalke's GardenSnake language. It shows how to process an +indentation-like language like Python. Further details can be found here: + +http://dalkescientific.com/writings/diary/archive/2006/08/30/gardensnake_language.html + diff --git a/components/script/dom/bindings/codegen/ply/example/README b/components/script/dom/bindings/codegen/ply/example/README new file mode 100644 index 00000000000..63519b557f1 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/README @@ -0,0 +1,10 @@ +Simple examples: + calc - Simple calculator + classcalc - Simple calculate defined as a class + +Complex examples + ansic - ANSI C grammar from K&R + BASIC - A small BASIC interpreter + GardenSnake - A simple python-like language + yply - Converts Unix yacc files to PLY programs. + diff --git a/components/script/dom/bindings/codegen/ply/example/ansic/README b/components/script/dom/bindings/codegen/ply/example/ansic/README new file mode 100644 index 00000000000..e049d3b4e48 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/ansic/README @@ -0,0 +1,2 @@ +This example is incomplete. Was going to specify an ANSI C parser. +This is part of it. diff --git a/components/script/dom/bindings/codegen/ply/example/ansic/clex.py b/components/script/dom/bindings/codegen/ply/example/ansic/clex.py new file mode 100644 index 00000000000..4bde1d730b0 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/ansic/clex.py @@ -0,0 +1,168 @@ +# ---------------------------------------------------------------------- +# clex.py +# +# A lexer for ANSI C. +# ---------------------------------------------------------------------- + +import sys +sys.path.insert(0, "../..") + +import ply.lex as lex + +# Reserved words +reserved = ( + 'AUTO', 'BREAK', 'CASE', 'CHAR', 'CONST', 'CONTINUE', 'DEFAULT', 'DO', 'DOUBLE', + 'ELSE', 'ENUM', 'EXTERN', 'FLOAT', 'FOR', 'GOTO', 'IF', 'INT', 'LONG', 'REGISTER', + 'RETURN', 'SHORT', 'SIGNED', 'SIZEOF', 'STATIC', 'STRUCT', 'SWITCH', 'TYPEDEF', + 'UNION', 'UNSIGNED', 'VOID', 'VOLATILE', 'WHILE', +) + +tokens = reserved + ( + # Literals (identifier, integer constant, float constant, string constant, + # char const) + 'ID', 'TYPEID', 'ICONST', 'FCONST', 'SCONST', 'CCONST', + + # Operators (+,-,*,/,%,|,&,~,^,<<,>>, ||, &&, !, <, <=, >, >=, ==, !=) + 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MOD', + 'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT', + 'LOR', 'LAND', 'LNOT', + 'LT', 'LE', 'GT', 'GE', 'EQ', 'NE', + + # Assignment (=, *=, /=, %=, +=, -=, <<=, >>=, &=, ^=, |=) + 'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', 'PLUSEQUAL', 'MINUSEQUAL', + 'LSHIFTEQUAL', 'RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', 'OREQUAL', + + # Increment/decrement (++,--) + 'PLUSPLUS', 'MINUSMINUS', + + # Structure dereference (->) + 'ARROW', + + # Conditional operator (?) + 'CONDOP', + + # Delimeters ( ) [ ] { } , . ; : + 'LPAREN', 'RPAREN', + 'LBRACKET', 'RBRACKET', + 'LBRACE', 'RBRACE', + 'COMMA', 'PERIOD', 'SEMI', 'COLON', + + # Ellipsis (...) + 'ELLIPSIS', +) + +# Completely ignored characters +t_ignore = ' \t\x0c' + +# Newlines + + +def t_NEWLINE(t): + r'\n+' + t.lexer.lineno += t.value.count("\n") + +# Operators +t_PLUS = r'\+' +t_MINUS = r'-' +t_TIMES = r'\*' +t_DIVIDE = r'/' +t_MOD = r'%' +t_OR = r'\|' +t_AND = r'&' +t_NOT = r'~' +t_XOR = r'\^' +t_LSHIFT = r'<<' +t_RSHIFT = r'>>' +t_LOR = r'\|\|' +t_LAND = r'&&' +t_LNOT = r'!' +t_LT = r'<' +t_GT = r'>' +t_LE = r'<=' +t_GE = r'>=' +t_EQ = r'==' +t_NE = r'!=' + +# Assignment operators + +t_EQUALS = r'=' +t_TIMESEQUAL = r'\*=' +t_DIVEQUAL = r'/=' +t_MODEQUAL = r'%=' +t_PLUSEQUAL = r'\+=' +t_MINUSEQUAL = r'-=' +t_LSHIFTEQUAL = r'<<=' +t_RSHIFTEQUAL = r'>>=' +t_ANDEQUAL = r'&=' +t_OREQUAL = r'\|=' +t_XOREQUAL = r'\^=' + +# Increment/decrement +t_PLUSPLUS = r'\+\+' +t_MINUSMINUS = r'--' + +# -> +t_ARROW = r'->' + +# ? +t_CONDOP = r'\?' + +# Delimeters +t_LPAREN = r'\(' +t_RPAREN = r'\)' +t_LBRACKET = r'\[' +t_RBRACKET = r'\]' +t_LBRACE = r'\{' +t_RBRACE = r'\}' +t_COMMA = r',' +t_PERIOD = r'\.' +t_SEMI = r';' +t_COLON = r':' +t_ELLIPSIS = r'\.\.\.' + +# Identifiers and reserved words + +reserved_map = {} +for r in reserved: + reserved_map[r.lower()] = r + + +def t_ID(t): + r'[A-Za-z_][\w_]*' + t.type = reserved_map.get(t.value, "ID") + return t + +# Integer literal +t_ICONST = r'\d+([uU]|[lL]|[uU][lL]|[lL][uU])?' + +# Floating literal +t_FCONST = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?' + +# String literal +t_SCONST = r'\"([^\\\n]|(\\.))*?\"' + +# Character constant 'c' or L'c' +t_CCONST = r'(L)?\'([^\\\n]|(\\.))*?\'' + +# Comments + + +def t_comment(t): + r'/\*(.|\n)*?\*/' + t.lexer.lineno += t.value.count('\n') + +# Preprocessor directive (ignored) + + +def t_preprocessor(t): + r'\#(.)*?\n' + t.lexer.lineno += 1 + + +def t_error(t): + print("Illegal character %s" % repr(t.value[0])) + t.lexer.skip(1) + +lexer = lex.lex() +if __name__ == "__main__": + lex.runmain(lexer) diff --git a/components/script/dom/bindings/codegen/ply/example/ansic/cparse.py b/components/script/dom/bindings/codegen/ply/example/ansic/cparse.py new file mode 100644 index 00000000000..5fe9bce0428 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/ansic/cparse.py @@ -0,0 +1,1048 @@ +# ----------------------------------------------------------------------------- +# cparse.py +# +# Simple parser for ANSI C. Based on the grammar in K&R, 2nd Ed. +# ----------------------------------------------------------------------------- + +import sys +import clex +import ply.yacc as yacc + +# Get the token map +tokens = clex.tokens + +# translation-unit: + + +def p_translation_unit_1(t): + 'translation_unit : external_declaration' + pass + + +def p_translation_unit_2(t): + 'translation_unit : translation_unit external_declaration' + pass + +# external-declaration: + + +def p_external_declaration_1(t): + 'external_declaration : function_definition' + pass + + +def p_external_declaration_2(t): + 'external_declaration : declaration' + pass + +# function-definition: + + +def p_function_definition_1(t): + 'function_definition : declaration_specifiers declarator declaration_list compound_statement' + pass + + +def p_function_definition_2(t): + 'function_definition : declarator declaration_list compound_statement' + pass + + +def p_function_definition_3(t): + 'function_definition : declarator compound_statement' + pass + + +def p_function_definition_4(t): + 'function_definition : declaration_specifiers declarator compound_statement' + pass + +# declaration: + + +def p_declaration_1(t): + 'declaration : declaration_specifiers init_declarator_list SEMI' + pass + + +def p_declaration_2(t): + 'declaration : declaration_specifiers SEMI' + pass + +# declaration-list: + + +def p_declaration_list_1(t): + 'declaration_list : declaration' + pass + + +def p_declaration_list_2(t): + 'declaration_list : declaration_list declaration ' + pass + +# declaration-specifiers + + +def p_declaration_specifiers_1(t): + 'declaration_specifiers : storage_class_specifier declaration_specifiers' + pass + + +def p_declaration_specifiers_2(t): + 'declaration_specifiers : type_specifier declaration_specifiers' + pass + + +def p_declaration_specifiers_3(t): + 'declaration_specifiers : type_qualifier declaration_specifiers' + pass + + +def p_declaration_specifiers_4(t): + 'declaration_specifiers : storage_class_specifier' + pass + + +def p_declaration_specifiers_5(t): + 'declaration_specifiers : type_specifier' + pass + + +def p_declaration_specifiers_6(t): + 'declaration_specifiers : type_qualifier' + pass + +# storage-class-specifier + + +def p_storage_class_specifier(t): + '''storage_class_specifier : AUTO + | REGISTER + | STATIC + | EXTERN + | TYPEDEF + ''' + pass + +# type-specifier: + + +def p_type_specifier(t): + '''type_specifier : VOID + | CHAR + | SHORT + | INT + | LONG + | FLOAT + | DOUBLE + | SIGNED + | UNSIGNED + | struct_or_union_specifier + | enum_specifier + | TYPEID + ''' + pass + +# type-qualifier: + + +def p_type_qualifier(t): + '''type_qualifier : CONST + | VOLATILE''' + pass + +# struct-or-union-specifier + + +def p_struct_or_union_specifier_1(t): + 'struct_or_union_specifier : struct_or_union ID LBRACE struct_declaration_list RBRACE' + pass + + +def p_struct_or_union_specifier_2(t): + 'struct_or_union_specifier : struct_or_union LBRACE struct_declaration_list RBRACE' + pass + + +def p_struct_or_union_specifier_3(t): + 'struct_or_union_specifier : struct_or_union ID' + pass + +# struct-or-union: + + +def p_struct_or_union(t): + '''struct_or_union : STRUCT + | UNION + ''' + pass + +# struct-declaration-list: + + +def p_struct_declaration_list_1(t): + 'struct_declaration_list : struct_declaration' + pass + + +def p_struct_declaration_list_2(t): + 'struct_declaration_list : struct_declaration_list struct_declaration' + pass + +# init-declarator-list: + + +def p_init_declarator_list_1(t): + 'init_declarator_list : init_declarator' + pass + + +def p_init_declarator_list_2(t): + 'init_declarator_list : init_declarator_list COMMA init_declarator' + pass + +# init-declarator + + +def p_init_declarator_1(t): + 'init_declarator : declarator' + pass + + +def p_init_declarator_2(t): + 'init_declarator : declarator EQUALS initializer' + pass + +# struct-declaration: + + +def p_struct_declaration(t): + 'struct_declaration : specifier_qualifier_list struct_declarator_list SEMI' + pass + +# specifier-qualifier-list: + + +def p_specifier_qualifier_list_1(t): + 'specifier_qualifier_list : type_specifier specifier_qualifier_list' + pass + + +def p_specifier_qualifier_list_2(t): + 'specifier_qualifier_list : type_specifier' + pass + + +def p_specifier_qualifier_list_3(t): + 'specifier_qualifier_list : type_qualifier specifier_qualifier_list' + pass + + +def p_specifier_qualifier_list_4(t): + 'specifier_qualifier_list : type_qualifier' + pass + +# struct-declarator-list: + + +def p_struct_declarator_list_1(t): + 'struct_declarator_list : struct_declarator' + pass + + +def p_struct_declarator_list_2(t): + 'struct_declarator_list : struct_declarator_list COMMA struct_declarator' + pass + +# struct-declarator: + + +def p_struct_declarator_1(t): + 'struct_declarator : declarator' + pass + + +def p_struct_declarator_2(t): + 'struct_declarator : declarator COLON constant_expression' + pass + + +def p_struct_declarator_3(t): + 'struct_declarator : COLON constant_expression' + pass + +# enum-specifier: + + +def p_enum_specifier_1(t): + 'enum_specifier : ENUM ID LBRACE enumerator_list RBRACE' + pass + + +def p_enum_specifier_2(t): + 'enum_specifier : ENUM LBRACE enumerator_list RBRACE' + pass + + +def p_enum_specifier_3(t): + 'enum_specifier : ENUM ID' + pass + +# enumerator_list: + + +def p_enumerator_list_1(t): + 'enumerator_list : enumerator' + pass + + +def p_enumerator_list_2(t): + 'enumerator_list : enumerator_list COMMA enumerator' + pass + +# enumerator: + + +def p_enumerator_1(t): + 'enumerator : ID' + pass + + +def p_enumerator_2(t): + 'enumerator : ID EQUALS constant_expression' + pass + +# declarator: + + +def p_declarator_1(t): + 'declarator : pointer direct_declarator' + pass + + +def p_declarator_2(t): + 'declarator : direct_declarator' + pass + +# direct-declarator: + + +def p_direct_declarator_1(t): + 'direct_declarator : ID' + pass + + +def p_direct_declarator_2(t): + 'direct_declarator : LPAREN declarator RPAREN' + pass + + +def p_direct_declarator_3(t): + 'direct_declarator : direct_declarator LBRACKET constant_expression_opt RBRACKET' + pass + + +def p_direct_declarator_4(t): + 'direct_declarator : direct_declarator LPAREN parameter_type_list RPAREN ' + pass + + +def p_direct_declarator_5(t): + 'direct_declarator : direct_declarator LPAREN identifier_list RPAREN ' + pass + + +def p_direct_declarator_6(t): + 'direct_declarator : direct_declarator LPAREN RPAREN ' + pass + +# pointer: + + +def p_pointer_1(t): + 'pointer : TIMES type_qualifier_list' + pass + + +def p_pointer_2(t): + 'pointer : TIMES' + pass + + +def p_pointer_3(t): + 'pointer : TIMES type_qualifier_list pointer' + pass + + +def p_pointer_4(t): + 'pointer : TIMES pointer' + pass + +# type-qualifier-list: + + +def p_type_qualifier_list_1(t): + 'type_qualifier_list : type_qualifier' + pass + + +def p_type_qualifier_list_2(t): + 'type_qualifier_list : type_qualifier_list type_qualifier' + pass + +# parameter-type-list: + + +def p_parameter_type_list_1(t): + 'parameter_type_list : parameter_list' + pass + + +def p_parameter_type_list_2(t): + 'parameter_type_list : parameter_list COMMA ELLIPSIS' + pass + +# parameter-list: + + +def p_parameter_list_1(t): + 'parameter_list : parameter_declaration' + pass + + +def p_parameter_list_2(t): + 'parameter_list : parameter_list COMMA parameter_declaration' + pass + +# parameter-declaration: + + +def p_parameter_declaration_1(t): + 'parameter_declaration : declaration_specifiers declarator' + pass + + +def p_parameter_declaration_2(t): + 'parameter_declaration : declaration_specifiers abstract_declarator_opt' + pass + +# identifier-list: + + +def p_identifier_list_1(t): + 'identifier_list : ID' + pass + + +def p_identifier_list_2(t): + 'identifier_list : identifier_list COMMA ID' + pass + +# initializer: + + +def p_initializer_1(t): + 'initializer : assignment_expression' + pass + + +def p_initializer_2(t): + '''initializer : LBRACE initializer_list RBRACE + | LBRACE initializer_list COMMA RBRACE''' + pass + +# initializer-list: + + +def p_initializer_list_1(t): + 'initializer_list : initializer' + pass + + +def p_initializer_list_2(t): + 'initializer_list : initializer_list COMMA initializer' + pass + +# type-name: + + +def p_type_name(t): + 'type_name : specifier_qualifier_list abstract_declarator_opt' + pass + + +def p_abstract_declarator_opt_1(t): + 'abstract_declarator_opt : empty' + pass + + +def p_abstract_declarator_opt_2(t): + 'abstract_declarator_opt : abstract_declarator' + pass + +# abstract-declarator: + + +def p_abstract_declarator_1(t): + 'abstract_declarator : pointer ' + pass + + +def p_abstract_declarator_2(t): + 'abstract_declarator : pointer direct_abstract_declarator' + pass + + +def p_abstract_declarator_3(t): + 'abstract_declarator : direct_abstract_declarator' + pass + +# direct-abstract-declarator: + + +def p_direct_abstract_declarator_1(t): + 'direct_abstract_declarator : LPAREN abstract_declarator RPAREN' + pass + + +def p_direct_abstract_declarator_2(t): + 'direct_abstract_declarator : direct_abstract_declarator LBRACKET constant_expression_opt RBRACKET' + pass + + +def p_direct_abstract_declarator_3(t): + 'direct_abstract_declarator : LBRACKET constant_expression_opt RBRACKET' + pass + + +def p_direct_abstract_declarator_4(t): + 'direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN' + pass + + +def p_direct_abstract_declarator_5(t): + 'direct_abstract_declarator : LPAREN parameter_type_list_opt RPAREN' + pass + +# Optional fields in abstract declarators + + +def p_constant_expression_opt_1(t): + 'constant_expression_opt : empty' + pass + + +def p_constant_expression_opt_2(t): + 'constant_expression_opt : constant_expression' + pass + + +def p_parameter_type_list_opt_1(t): + 'parameter_type_list_opt : empty' + pass + + +def p_parameter_type_list_opt_2(t): + 'parameter_type_list_opt : parameter_type_list' + pass + +# statement: + + +def p_statement(t): + ''' + statement : labeled_statement + | expression_statement + | compound_statement + | selection_statement + | iteration_statement + | jump_statement + ''' + pass + +# labeled-statement: + + +def p_labeled_statement_1(t): + 'labeled_statement : ID COLON statement' + pass + + +def p_labeled_statement_2(t): + 'labeled_statement : CASE constant_expression COLON statement' + pass + + +def p_labeled_statement_3(t): + 'labeled_statement : DEFAULT COLON statement' + pass + +# expression-statement: + + +def p_expression_statement(t): + 'expression_statement : expression_opt SEMI' + pass + +# compound-statement: + + +def p_compound_statement_1(t): + 'compound_statement : LBRACE declaration_list statement_list RBRACE' + pass + + +def p_compound_statement_2(t): + 'compound_statement : LBRACE statement_list RBRACE' + pass + + +def p_compound_statement_3(t): + 'compound_statement : LBRACE declaration_list RBRACE' + pass + + +def p_compound_statement_4(t): + 'compound_statement : LBRACE RBRACE' + pass + +# statement-list: + + +def p_statement_list_1(t): + 'statement_list : statement' + pass + + +def p_statement_list_2(t): + 'statement_list : statement_list statement' + pass + +# selection-statement + + +def p_selection_statement_1(t): + 'selection_statement : IF LPAREN expression RPAREN statement' + pass + + +def p_selection_statement_2(t): + 'selection_statement : IF LPAREN expression RPAREN statement ELSE statement ' + pass + + +def p_selection_statement_3(t): + 'selection_statement : SWITCH LPAREN expression RPAREN statement ' + pass + +# iteration_statement: + + +def p_iteration_statement_1(t): + 'iteration_statement : WHILE LPAREN expression RPAREN statement' + pass + + +def p_iteration_statement_2(t): + 'iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN statement ' + pass + + +def p_iteration_statement_3(t): + 'iteration_statement : DO statement WHILE LPAREN expression RPAREN SEMI' + pass + +# jump_statement: + + +def p_jump_statement_1(t): + 'jump_statement : GOTO ID SEMI' + pass + + +def p_jump_statement_2(t): + 'jump_statement : CONTINUE SEMI' + pass + + +def p_jump_statement_3(t): + 'jump_statement : BREAK SEMI' + pass + + +def p_jump_statement_4(t): + 'jump_statement : RETURN expression_opt SEMI' + pass + + +def p_expression_opt_1(t): + 'expression_opt : empty' + pass + + +def p_expression_opt_2(t): + 'expression_opt : expression' + pass + +# expression: + + +def p_expression_1(t): + 'expression : assignment_expression' + pass + + +def p_expression_2(t): + 'expression : expression COMMA assignment_expression' + pass + +# assigment_expression: + + +def p_assignment_expression_1(t): + 'assignment_expression : conditional_expression' + pass + + +def p_assignment_expression_2(t): + 'assignment_expression : unary_expression assignment_operator assignment_expression' + pass + +# assignment_operator: + + +def p_assignment_operator(t): + ''' + assignment_operator : EQUALS + | TIMESEQUAL + | DIVEQUAL + | MODEQUAL + | PLUSEQUAL + | MINUSEQUAL + | LSHIFTEQUAL + | RSHIFTEQUAL + | ANDEQUAL + | OREQUAL + | XOREQUAL + ''' + pass + +# conditional-expression + + +def p_conditional_expression_1(t): + 'conditional_expression : logical_or_expression' + pass + + +def p_conditional_expression_2(t): + 'conditional_expression : logical_or_expression CONDOP expression COLON conditional_expression ' + pass + +# constant-expression + + +def p_constant_expression(t): + 'constant_expression : conditional_expression' + pass + +# logical-or-expression + + +def p_logical_or_expression_1(t): + 'logical_or_expression : logical_and_expression' + pass + + +def p_logical_or_expression_2(t): + 'logical_or_expression : logical_or_expression LOR logical_and_expression' + pass + +# logical-and-expression + + +def p_logical_and_expression_1(t): + 'logical_and_expression : inclusive_or_expression' + pass + + +def p_logical_and_expression_2(t): + 'logical_and_expression : logical_and_expression LAND inclusive_or_expression' + pass + +# inclusive-or-expression: + + +def p_inclusive_or_expression_1(t): + 'inclusive_or_expression : exclusive_or_expression' + pass + + +def p_inclusive_or_expression_2(t): + 'inclusive_or_expression : inclusive_or_expression OR exclusive_or_expression' + pass + +# exclusive-or-expression: + + +def p_exclusive_or_expression_1(t): + 'exclusive_or_expression : and_expression' + pass + + +def p_exclusive_or_expression_2(t): + 'exclusive_or_expression : exclusive_or_expression XOR and_expression' + pass + +# AND-expression + + +def p_and_expression_1(t): + 'and_expression : equality_expression' + pass + + +def p_and_expression_2(t): + 'and_expression : and_expression AND equality_expression' + pass + + +# equality-expression: +def p_equality_expression_1(t): + 'equality_expression : relational_expression' + pass + + +def p_equality_expression_2(t): + 'equality_expression : equality_expression EQ relational_expression' + pass + + +def p_equality_expression_3(t): + 'equality_expression : equality_expression NE relational_expression' + pass + + +# relational-expression: +def p_relational_expression_1(t): + 'relational_expression : shift_expression' + pass + + +def p_relational_expression_2(t): + 'relational_expression : relational_expression LT shift_expression' + pass + + +def p_relational_expression_3(t): + 'relational_expression : relational_expression GT shift_expression' + pass + + +def p_relational_expression_4(t): + 'relational_expression : relational_expression LE shift_expression' + pass + + +def p_relational_expression_5(t): + 'relational_expression : relational_expression GE shift_expression' + pass + +# shift-expression + + +def p_shift_expression_1(t): + 'shift_expression : additive_expression' + pass + + +def p_shift_expression_2(t): + 'shift_expression : shift_expression LSHIFT additive_expression' + pass + + +def p_shift_expression_3(t): + 'shift_expression : shift_expression RSHIFT additive_expression' + pass + +# additive-expression + + +def p_additive_expression_1(t): + 'additive_expression : multiplicative_expression' + pass + + +def p_additive_expression_2(t): + 'additive_expression : additive_expression PLUS multiplicative_expression' + pass + + +def p_additive_expression_3(t): + 'additive_expression : additive_expression MINUS multiplicative_expression' + pass + +# multiplicative-expression + + +def p_multiplicative_expression_1(t): + 'multiplicative_expression : cast_expression' + pass + + +def p_multiplicative_expression_2(t): + 'multiplicative_expression : multiplicative_expression TIMES cast_expression' + pass + + +def p_multiplicative_expression_3(t): + 'multiplicative_expression : multiplicative_expression DIVIDE cast_expression' + pass + + +def p_multiplicative_expression_4(t): + 'multiplicative_expression : multiplicative_expression MOD cast_expression' + pass + +# cast-expression: + + +def p_cast_expression_1(t): + 'cast_expression : unary_expression' + pass + + +def p_cast_expression_2(t): + 'cast_expression : LPAREN type_name RPAREN cast_expression' + pass + +# unary-expression: + + +def p_unary_expression_1(t): + 'unary_expression : postfix_expression' + pass + + +def p_unary_expression_2(t): + 'unary_expression : PLUSPLUS unary_expression' + pass + + +def p_unary_expression_3(t): + 'unary_expression : MINUSMINUS unary_expression' + pass + + +def p_unary_expression_4(t): + 'unary_expression : unary_operator cast_expression' + pass + + +def p_unary_expression_5(t): + 'unary_expression : SIZEOF unary_expression' + pass + + +def p_unary_expression_6(t): + 'unary_expression : SIZEOF LPAREN type_name RPAREN' + pass + +# unary-operator + + +def p_unary_operator(t): + '''unary_operator : AND + | TIMES + | PLUS + | MINUS + | NOT + | LNOT ''' + pass + +# postfix-expression: + + +def p_postfix_expression_1(t): + 'postfix_expression : primary_expression' + pass + + +def p_postfix_expression_2(t): + 'postfix_expression : postfix_expression LBRACKET expression RBRACKET' + pass + + +def p_postfix_expression_3(t): + 'postfix_expression : postfix_expression LPAREN argument_expression_list RPAREN' + pass + + +def p_postfix_expression_4(t): + 'postfix_expression : postfix_expression LPAREN RPAREN' + pass + + +def p_postfix_expression_5(t): + 'postfix_expression : postfix_expression PERIOD ID' + pass + + +def p_postfix_expression_6(t): + 'postfix_expression : postfix_expression ARROW ID' + pass + + +def p_postfix_expression_7(t): + 'postfix_expression : postfix_expression PLUSPLUS' + pass + + +def p_postfix_expression_8(t): + 'postfix_expression : postfix_expression MINUSMINUS' + pass + +# primary-expression: + + +def p_primary_expression(t): + '''primary_expression : ID + | constant + | SCONST + | LPAREN expression RPAREN''' + pass + +# argument-expression-list: + + +def p_argument_expression_list(t): + '''argument_expression_list : assignment_expression + | argument_expression_list COMMA assignment_expression''' + pass + +# constant: + + +def p_constant(t): + '''constant : ICONST + | FCONST + | CCONST''' + pass + + +def p_empty(t): + 'empty : ' + pass + + +def p_error(t): + print("Whoa. We're hosed") + +import profile +# Build the grammar + +yacc.yacc() +#yacc.yacc(method='LALR',write_tables=False,debug=False) + +#profile.run("yacc.yacc(method='LALR')") diff --git a/components/script/dom/bindings/codegen/ply/example/calc/calc.py b/components/script/dom/bindings/codegen/ply/example/calc/calc.py new file mode 100644 index 00000000000..824c3d7d0a2 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/calc/calc.py @@ -0,0 +1,123 @@ +# ----------------------------------------------------------------------------- +# calc.py +# +# A simple calculator with variables. This is from O'Reilly's +# "Lex and Yacc", p. 63. +# ----------------------------------------------------------------------------- + +import sys +sys.path.insert(0, "../..") + +if sys.version_info[0] >= 3: + raw_input = input + +tokens = ( + 'NAME', 'NUMBER', +) + +literals = ['=', '+', '-', '*', '/', '(', ')'] + +# Tokens + +t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*' + + +def t_NUMBER(t): + r'\d+' + t.value = int(t.value) + return t + +t_ignore = " \t" + + +def t_newline(t): + r'\n+' + t.lexer.lineno += t.value.count("\n") + + +def t_error(t): + print("Illegal character '%s'" % t.value[0]) + t.lexer.skip(1) + +# Build the lexer +import ply.lex as lex +lex.lex() + +# Parsing rules + +precedence = ( + ('left', '+', '-'), + ('left', '*', '/'), + ('right', 'UMINUS'), +) + +# dictionary of names +names = {} + + +def p_statement_assign(p): + 'statement : NAME "=" expression' + names[p[1]] = p[3] + + +def p_statement_expr(p): + 'statement : expression' + print(p[1]) + + +def p_expression_binop(p): + '''expression : expression '+' expression + | expression '-' expression + | expression '*' expression + | expression '/' expression''' + if p[2] == '+': + p[0] = p[1] + p[3] + elif p[2] == '-': + p[0] = p[1] - p[3] + elif p[2] == '*': + p[0] = p[1] * p[3] + elif p[2] == '/': + p[0] = p[1] / p[3] + + +def p_expression_uminus(p): + "expression : '-' expression %prec UMINUS" + p[0] = -p[2] + + +def p_expression_group(p): + "expression : '(' expression ')'" + p[0] = p[2] + + +def p_expression_number(p): + "expression : NUMBER" + p[0] = p[1] + + +def p_expression_name(p): + "expression : NAME" + try: + p[0] = names[p[1]] + except LookupError: + print("Undefined name '%s'" % p[1]) + p[0] = 0 + + +def p_error(p): + if p: + print("Syntax error at '%s'" % p.value) + else: + print("Syntax error at EOF") + +import ply.yacc as yacc +yacc.yacc() + +while 1: + try: + s = raw_input('calc > ') + except EOFError: + break + if not s: + continue + yacc.parse(s) diff --git a/components/script/dom/bindings/codegen/ply/example/calcdebug/calc.py b/components/script/dom/bindings/codegen/ply/example/calcdebug/calc.py new file mode 100644 index 00000000000..06831e2ca56 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/calcdebug/calc.py @@ -0,0 +1,129 @@ +# ----------------------------------------------------------------------------- +# calc.py +# +# This example shows how to run the parser in a debugging mode +# with output routed to a logging object. +# ----------------------------------------------------------------------------- + +import sys +sys.path.insert(0, "../..") + +if sys.version_info[0] >= 3: + raw_input = input + +tokens = ( + 'NAME', 'NUMBER', +) + +literals = ['=', '+', '-', '*', '/', '(', ')'] + +# Tokens + +t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*' + + +def t_NUMBER(t): + r'\d+' + t.value = int(t.value) + return t + +t_ignore = " \t" + + +def t_newline(t): + r'\n+' + t.lexer.lineno += t.value.count("\n") + + +def t_error(t): + print("Illegal character '%s'" % t.value[0]) + t.lexer.skip(1) + +# Build the lexer +import ply.lex as lex +lex.lex() + +# Parsing rules + +precedence = ( + ('left', '+', '-'), + ('left', '*', '/'), + ('right', 'UMINUS'), +) + +# dictionary of names +names = {} + + +def p_statement_assign(p): + 'statement : NAME "=" expression' + names[p[1]] = p[3] + + +def p_statement_expr(p): + 'statement : expression' + print(p[1]) + + +def p_expression_binop(p): + '''expression : expression '+' expression + | expression '-' expression + | expression '*' expression + | expression '/' expression''' + if p[2] == '+': + p[0] = p[1] + p[3] + elif p[2] == '-': + p[0] = p[1] - p[3] + elif p[2] == '*': + p[0] = p[1] * p[3] + elif p[2] == '/': + p[0] = p[1] / p[3] + + +def p_expression_uminus(p): + "expression : '-' expression %prec UMINUS" + p[0] = -p[2] + + +def p_expression_group(p): + "expression : '(' expression ')'" + p[0] = p[2] + + +def p_expression_number(p): + "expression : NUMBER" + p[0] = p[1] + + +def p_expression_name(p): + "expression : NAME" + try: + p[0] = names[p[1]] + except LookupError: + print("Undefined name '%s'" % p[1]) + p[0] = 0 + + +def p_error(p): + if p: + print("Syntax error at '%s'" % p.value) + else: + print("Syntax error at EOF") + +import ply.yacc as yacc +yacc.yacc() + +import logging +logging.basicConfig( + level=logging.INFO, + filename="parselog.txt" +) + +while 1: + try: + s = raw_input('calc > ') + except EOFError: + break + if not s: + continue + yacc.parse(s, debug=logging.getLogger()) diff --git a/components/script/dom/bindings/codegen/ply/example/calceof/calc.py b/components/script/dom/bindings/codegen/ply/example/calceof/calc.py new file mode 100644 index 00000000000..22b39a41a86 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/calceof/calc.py @@ -0,0 +1,132 @@ +# ----------------------------------------------------------------------------- +# calc.py +# +# A simple calculator with variables. Asks the user for more input and +# demonstrates the use of the t_eof() rule. +# ----------------------------------------------------------------------------- + +import sys +sys.path.insert(0, "../..") + +if sys.version_info[0] >= 3: + raw_input = input + +tokens = ( + 'NAME', 'NUMBER', +) + +literals = ['=', '+', '-', '*', '/', '(', ')'] + +# Tokens + +t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*' + + +def t_NUMBER(t): + r'\d+' + t.value = int(t.value) + return t + +t_ignore = " \t" + + +def t_newline(t): + r'\n+' + t.lexer.lineno += t.value.count("\n") + + +def t_eof(t): + more = raw_input('... ') + if more: + t.lexer.input(more + '\n') + return t.lexer.token() + else: + return None + + +def t_error(t): + print("Illegal character '%s'" % t.value[0]) + t.lexer.skip(1) + +# Build the lexer +import ply.lex as lex +lex.lex() + +# Parsing rules + +precedence = ( + ('left', '+', '-'), + ('left', '*', '/'), + ('right', 'UMINUS'), +) + +# dictionary of names +names = {} + + +def p_statement_assign(p): + 'statement : NAME "=" expression' + names[p[1]] = p[3] + + +def p_statement_expr(p): + 'statement : expression' + print(p[1]) + + +def p_expression_binop(p): + '''expression : expression '+' expression + | expression '-' expression + | expression '*' expression + | expression '/' expression''' + if p[2] == '+': + p[0] = p[1] + p[3] + elif p[2] == '-': + p[0] = p[1] - p[3] + elif p[2] == '*': + p[0] = p[1] * p[3] + elif p[2] == '/': + p[0] = p[1] / p[3] + + +def p_expression_uminus(p): + "expression : '-' expression %prec UMINUS" + p[0] = -p[2] + + +def p_expression_group(p): + "expression : '(' expression ')'" + p[0] = p[2] + + +def p_expression_number(p): + "expression : NUMBER" + p[0] = p[1] + + +def p_expression_name(p): + "expression : NAME" + try: + p[0] = names[p[1]] + except LookupError: + print("Undefined name '%s'" % p[1]) + p[0] = 0 + + +def p_error(p): + if p: + print("Syntax error at '%s'" % p.value) + else: + print("Syntax error at EOF") + +import ply.yacc as yacc +yacc.yacc() + +while 1: + try: + s = raw_input('calc > ') + except EOFError: + break + if not s: + continue + yacc.parse(s + '\n') diff --git a/components/script/dom/bindings/codegen/ply/example/classcalc/calc.py b/components/script/dom/bindings/codegen/ply/example/classcalc/calc.py new file mode 100755 index 00000000000..ada4afd426c --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/classcalc/calc.py @@ -0,0 +1,165 @@ +#!/usr/bin/env python + +# ----------------------------------------------------------------------------- +# calc.py +# +# A simple calculator with variables. This is from O'Reilly's +# "Lex and Yacc", p. 63. +# +# Class-based example contributed to PLY by David McNab +# ----------------------------------------------------------------------------- + +import sys +sys.path.insert(0, "../..") + +if sys.version_info[0] >= 3: + raw_input = input + +import ply.lex as lex +import ply.yacc as yacc +import os + + +class Parser: + """ + Base class for a lexer/parser that has the rules defined as methods + """ + tokens = () + precedence = () + + def __init__(self, **kw): + self.debug = kw.get('debug', 0) + self.names = {} + try: + modname = os.path.split(os.path.splitext(__file__)[0])[ + 1] + "_" + self.__class__.__name__ + except: + modname = "parser" + "_" + self.__class__.__name__ + self.debugfile = modname + ".dbg" + self.tabmodule = modname + "_" + "parsetab" + # print self.debugfile, self.tabmodule + + # Build the lexer and parser + lex.lex(module=self, debug=self.debug) + yacc.yacc(module=self, + debug=self.debug, + debugfile=self.debugfile, + tabmodule=self.tabmodule) + + def run(self): + while 1: + try: + s = raw_input('calc > ') + except EOFError: + break + if not s: + continue + yacc.parse(s) + + +class Calc(Parser): + + tokens = ( + 'NAME', 'NUMBER', + 'PLUS', 'MINUS', 'EXP', 'TIMES', 'DIVIDE', 'EQUALS', + 'LPAREN', 'RPAREN', + ) + + # Tokens + + t_PLUS = r'\+' + t_MINUS = r'-' + t_EXP = r'\*\*' + t_TIMES = r'\*' + t_DIVIDE = r'/' + t_EQUALS = r'=' + t_LPAREN = r'\(' + t_RPAREN = r'\)' + t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*' + + def t_NUMBER(self, t): + r'\d+' + try: + t.value = int(t.value) + except ValueError: + print("Integer value too large %s" % t.value) + t.value = 0 + # print "parsed number %s" % repr(t.value) + return t + + t_ignore = " \t" + + def t_newline(self, t): + r'\n+' + t.lexer.lineno += t.value.count("\n") + + def t_error(self, t): + print("Illegal character '%s'" % t.value[0]) + t.lexer.skip(1) + + # Parsing rules + + precedence = ( + ('left', 'PLUS', 'MINUS'), + ('left', 'TIMES', 'DIVIDE'), + ('left', 'EXP'), + ('right', 'UMINUS'), + ) + + def p_statement_assign(self, p): + 'statement : NAME EQUALS expression' + self.names[p[1]] = p[3] + + def p_statement_expr(self, p): + 'statement : expression' + print(p[1]) + + def p_expression_binop(self, p): + """ + expression : expression PLUS expression + | expression MINUS expression + | expression TIMES expression + | expression DIVIDE expression + | expression EXP expression + """ + # print [repr(p[i]) for i in range(0,4)] + if p[2] == '+': + p[0] = p[1] + p[3] + elif p[2] == '-': + p[0] = p[1] - p[3] + elif p[2] == '*': + p[0] = p[1] * p[3] + elif p[2] == '/': + p[0] = p[1] / p[3] + elif p[2] == '**': + p[0] = p[1] ** p[3] + + def p_expression_uminus(self, p): + 'expression : MINUS expression %prec UMINUS' + p[0] = -p[2] + + def p_expression_group(self, p): + 'expression : LPAREN expression RPAREN' + p[0] = p[2] + + def p_expression_number(self, p): + 'expression : NUMBER' + p[0] = p[1] + + def p_expression_name(self, p): + 'expression : NAME' + try: + p[0] = self.names[p[1]] + except LookupError: + print("Undefined name '%s'" % p[1]) + p[0] = 0 + + def p_error(self, p): + if p: + print("Syntax error at '%s'" % p.value) + else: + print("Syntax error at EOF") + +if __name__ == '__main__': + calc = Calc() + calc.run() diff --git a/components/script/dom/bindings/codegen/ply/example/cleanup.sh b/components/script/dom/bindings/codegen/ply/example/cleanup.sh new file mode 100755 index 00000000000..3e115f41c42 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/cleanup.sh @@ -0,0 +1,2 @@ +#!/bin/sh +rm -f */*.pyc */parsetab.py */parser.out */*~ */*.class diff --git a/components/script/dom/bindings/codegen/ply/example/closurecalc/calc.py b/components/script/dom/bindings/codegen/ply/example/closurecalc/calc.py new file mode 100644 index 00000000000..6031b058130 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/closurecalc/calc.py @@ -0,0 +1,132 @@ +# ----------------------------------------------------------------------------- +# calc.py +# +# A calculator parser that makes use of closures. The function make_calculator() +# returns a function that accepts an input string and returns a result. All +# lexing rules, parsing rules, and internal state are held inside the function. +# ----------------------------------------------------------------------------- + +import sys +sys.path.insert(0, "../..") + +if sys.version_info[0] >= 3: + raw_input = input + +# Make a calculator function + + +def make_calculator(): + import ply.lex as lex + import ply.yacc as yacc + + # ------- Internal calculator state + + variables = {} # Dictionary of stored variables + + # ------- Calculator tokenizing rules + + tokens = ( + 'NAME', 'NUMBER', + ) + + literals = ['=', '+', '-', '*', '/', '(', ')'] + + t_ignore = " \t" + + t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*' + + def t_NUMBER(t): + r'\d+' + t.value = int(t.value) + return t + + def t_newline(t): + r'\n+' + t.lexer.lineno += t.value.count("\n") + + def t_error(t): + print("Illegal character '%s'" % t.value[0]) + t.lexer.skip(1) + + # Build the lexer + lexer = lex.lex() + + # ------- Calculator parsing rules + + precedence = ( + ('left', '+', '-'), + ('left', '*', '/'), + ('right', 'UMINUS'), + ) + + def p_statement_assign(p): + 'statement : NAME "=" expression' + variables[p[1]] = p[3] + p[0] = None + + def p_statement_expr(p): + 'statement : expression' + p[0] = p[1] + + def p_expression_binop(p): + '''expression : expression '+' expression + | expression '-' expression + | expression '*' expression + | expression '/' expression''' + if p[2] == '+': + p[0] = p[1] + p[3] + elif p[2] == '-': + p[0] = p[1] - p[3] + elif p[2] == '*': + p[0] = p[1] * p[3] + elif p[2] == '/': + p[0] = p[1] / p[3] + + def p_expression_uminus(p): + "expression : '-' expression %prec UMINUS" + p[0] = -p[2] + + def p_expression_group(p): + "expression : '(' expression ')'" + p[0] = p[2] + + def p_expression_number(p): + "expression : NUMBER" + p[0] = p[1] + + def p_expression_name(p): + "expression : NAME" + try: + p[0] = variables[p[1]] + except LookupError: + print("Undefined name '%s'" % p[1]) + p[0] = 0 + + def p_error(p): + if p: + print("Syntax error at '%s'" % p.value) + else: + print("Syntax error at EOF") + + # Build the parser + parser = yacc.yacc() + + # ------- Input function + + def input(text): + result = parser.parse(text, lexer=lexer) + return result + + return input + +# Make a calculator object and use it +calc = make_calculator() + +while True: + try: + s = raw_input("calc > ") + except EOFError: + break + r = calc(s) + if r: + print(r) diff --git a/components/script/dom/bindings/codegen/ply/example/hedit/hedit.py b/components/script/dom/bindings/codegen/ply/example/hedit/hedit.py new file mode 100644 index 00000000000..32da745677c --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/hedit/hedit.py @@ -0,0 +1,48 @@ +# ----------------------------------------------------------------------------- +# hedit.py +# +# Paring of Fortran H Edit descriptions (Contributed by Pearu Peterson) +# +# These tokens can't be easily tokenized because they are of the following +# form: +# +# nHc1...cn +# +# where n is a positive integer and c1 ... cn are characters. +# +# This example shows how to modify the state of the lexer to parse +# such tokens +# ----------------------------------------------------------------------------- + +import sys +sys.path.insert(0, "../..") + + +tokens = ( + 'H_EDIT_DESCRIPTOR', +) + +# Tokens +t_ignore = " \t\n" + + +def t_H_EDIT_DESCRIPTOR(t): + r"\d+H.*" # This grabs all of the remaining text + i = t.value.index('H') + n = eval(t.value[:i]) + + # Adjust the tokenizing position + t.lexer.lexpos -= len(t.value) - (i + 1 + n) + + t.value = t.value[i + 1:i + 1 + n] + return t + + +def t_error(t): + print("Illegal character '%s'" % t.value[0]) + t.lexer.skip(1) + +# Build the lexer +import ply.lex as lex +lex.lex() +lex.runmain() diff --git a/components/script/dom/bindings/codegen/ply/example/newclasscalc/calc.py b/components/script/dom/bindings/codegen/ply/example/newclasscalc/calc.py new file mode 100755 index 00000000000..43c9506a8aa --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/newclasscalc/calc.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python + +# ----------------------------------------------------------------------------- +# calc.py +# +# A simple calculator with variables. This is from O'Reilly's +# "Lex and Yacc", p. 63. +# +# Class-based example contributed to PLY by David McNab. +# +# Modified to use new-style classes. Test case. +# ----------------------------------------------------------------------------- + +import sys +sys.path.insert(0, "../..") + +if sys.version_info[0] >= 3: + raw_input = input + +import ply.lex as lex +import ply.yacc as yacc +import os + + +class Parser(object): + """ + Base class for a lexer/parser that has the rules defined as methods + """ + tokens = () + precedence = () + + def __init__(self, **kw): + self.debug = kw.get('debug', 0) + self.names = {} + try: + modname = os.path.split(os.path.splitext(__file__)[0])[ + 1] + "_" + self.__class__.__name__ + except: + modname = "parser" + "_" + self.__class__.__name__ + self.debugfile = modname + ".dbg" + self.tabmodule = modname + "_" + "parsetab" + # print self.debugfile, self.tabmodule + + # Build the lexer and parser + lex.lex(module=self, debug=self.debug) + yacc.yacc(module=self, + debug=self.debug, + debugfile=self.debugfile, + tabmodule=self.tabmodule) + + def run(self): + while 1: + try: + s = raw_input('calc > ') + except EOFError: + break + if not s: + continue + yacc.parse(s) + + +class Calc(Parser): + + tokens = ( + 'NAME', 'NUMBER', + 'PLUS', 'MINUS', 'EXP', 'TIMES', 'DIVIDE', 'EQUALS', + 'LPAREN', 'RPAREN', + ) + + # Tokens + + t_PLUS = r'\+' + t_MINUS = r'-' + t_EXP = r'\*\*' + t_TIMES = r'\*' + t_DIVIDE = r'/' + t_EQUALS = r'=' + t_LPAREN = r'\(' + t_RPAREN = r'\)' + t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*' + + def t_NUMBER(self, t): + r'\d+' + try: + t.value = int(t.value) + except ValueError: + print("Integer value too large %s" % t.value) + t.value = 0 + # print "parsed number %s" % repr(t.value) + return t + + t_ignore = " \t" + + def t_newline(self, t): + r'\n+' + t.lexer.lineno += t.value.count("\n") + + def t_error(self, t): + print("Illegal character '%s'" % t.value[0]) + t.lexer.skip(1) + + # Parsing rules + + precedence = ( + ('left', 'PLUS', 'MINUS'), + ('left', 'TIMES', 'DIVIDE'), + ('left', 'EXP'), + ('right', 'UMINUS'), + ) + + def p_statement_assign(self, p): + 'statement : NAME EQUALS expression' + self.names[p[1]] = p[3] + + def p_statement_expr(self, p): + 'statement : expression' + print(p[1]) + + def p_expression_binop(self, p): + """ + expression : expression PLUS expression + | expression MINUS expression + | expression TIMES expression + | expression DIVIDE expression + | expression EXP expression + """ + # print [repr(p[i]) for i in range(0,4)] + if p[2] == '+': + p[0] = p[1] + p[3] + elif p[2] == '-': + p[0] = p[1] - p[3] + elif p[2] == '*': + p[0] = p[1] * p[3] + elif p[2] == '/': + p[0] = p[1] / p[3] + elif p[2] == '**': + p[0] = p[1] ** p[3] + + def p_expression_uminus(self, p): + 'expression : MINUS expression %prec UMINUS' + p[0] = -p[2] + + def p_expression_group(self, p): + 'expression : LPAREN expression RPAREN' + p[0] = p[2] + + def p_expression_number(self, p): + 'expression : NUMBER' + p[0] = p[1] + + def p_expression_name(self, p): + 'expression : NAME' + try: + p[0] = self.names[p[1]] + except LookupError: + print("Undefined name '%s'" % p[1]) + p[0] = 0 + + def p_error(self, p): + if p: + print("Syntax error at '%s'" % p.value) + else: + print("Syntax error at EOF") + +if __name__ == '__main__': + calc = Calc() + calc.run() diff --git a/components/script/dom/bindings/codegen/ply/example/optcalc/README b/components/script/dom/bindings/codegen/ply/example/optcalc/README new file mode 100644 index 00000000000..53dd5fcd559 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/optcalc/README @@ -0,0 +1,9 @@ +An example showing how to use Python optimized mode. +To run: + + - First run 'python calc.py' + + - Then run 'python -OO calc.py' + +If working correctly, the second version should run the +same way. diff --git a/components/script/dom/bindings/codegen/ply/example/optcalc/calc.py b/components/script/dom/bindings/codegen/ply/example/optcalc/calc.py new file mode 100644 index 00000000000..0c223e59949 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/optcalc/calc.py @@ -0,0 +1,134 @@ +# ----------------------------------------------------------------------------- +# calc.py +# +# A simple calculator with variables. This is from O'Reilly's +# "Lex and Yacc", p. 63. +# ----------------------------------------------------------------------------- + +import sys +sys.path.insert(0, "../..") + +if sys.version_info[0] >= 3: + raw_input = input + +tokens = ( + 'NAME', 'NUMBER', + 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'EQUALS', + 'LPAREN', 'RPAREN', +) + +# Tokens + +t_PLUS = r'\+' +t_MINUS = r'-' +t_TIMES = r'\*' +t_DIVIDE = r'/' +t_EQUALS = r'=' +t_LPAREN = r'\(' +t_RPAREN = r'\)' +t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*' + + +def t_NUMBER(t): + r'\d+' + try: + t.value = int(t.value) + except ValueError: + print("Integer value too large %s" % t.value) + t.value = 0 + return t + +t_ignore = " \t" + + +def t_newline(t): + r'\n+' + t.lexer.lineno += t.value.count("\n") + + +def t_error(t): + print("Illegal character '%s'" % t.value[0]) + t.lexer.skip(1) + +# Build the lexer +import ply.lex as lex +lex.lex(optimize=1) + +# Parsing rules + +precedence = ( + ('left', 'PLUS', 'MINUS'), + ('left', 'TIMES', 'DIVIDE'), + ('right', 'UMINUS'), +) + +# dictionary of names +names = {} + + +def p_statement_assign(t): + 'statement : NAME EQUALS expression' + names[t[1]] = t[3] + + +def p_statement_expr(t): + 'statement : expression' + print(t[1]) + + +def p_expression_binop(t): + '''expression : expression PLUS expression + | expression MINUS expression + | expression TIMES expression + | expression DIVIDE expression''' + if t[2] == '+': + t[0] = t[1] + t[3] + elif t[2] == '-': + t[0] = t[1] - t[3] + elif t[2] == '*': + t[0] = t[1] * t[3] + elif t[2] == '/': + t[0] = t[1] / t[3] + elif t[2] == '<': + t[0] = t[1] < t[3] + + +def p_expression_uminus(t): + 'expression : MINUS expression %prec UMINUS' + t[0] = -t[2] + + +def p_expression_group(t): + 'expression : LPAREN expression RPAREN' + t[0] = t[2] + + +def p_expression_number(t): + 'expression : NUMBER' + t[0] = t[1] + + +def p_expression_name(t): + 'expression : NAME' + try: + t[0] = names[t[1]] + except LookupError: + print("Undefined name '%s'" % t[1]) + t[0] = 0 + + +def p_error(t): + if t: + print("Syntax error at '%s'" % t.value) + else: + print("Syntax error at EOF") + +import ply.yacc as yacc +yacc.yacc(optimize=1) + +while 1: + try: + s = raw_input('calc > ') + except EOFError: + break + yacc.parse(s) diff --git a/components/script/dom/bindings/codegen/ply/example/unicalc/calc.py b/components/script/dom/bindings/codegen/ply/example/unicalc/calc.py new file mode 100644 index 00000000000..901c4b9d761 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/unicalc/calc.py @@ -0,0 +1,133 @@ +# ----------------------------------------------------------------------------- +# calc.py +# +# A simple calculator with variables. This is from O'Reilly's +# "Lex and Yacc", p. 63. +# +# This example uses unicode strings for tokens, docstrings, and input. +# ----------------------------------------------------------------------------- + +import sys +sys.path.insert(0, "../..") + +tokens = ( + 'NAME', 'NUMBER', + 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'EQUALS', + 'LPAREN', 'RPAREN', +) + +# Tokens + +t_PLUS = ur'\+' +t_MINUS = ur'-' +t_TIMES = ur'\*' +t_DIVIDE = ur'/' +t_EQUALS = ur'=' +t_LPAREN = ur'\(' +t_RPAREN = ur'\)' +t_NAME = ur'[a-zA-Z_][a-zA-Z0-9_]*' + + +def t_NUMBER(t): + ur'\d+' + try: + t.value = int(t.value) + except ValueError: + print "Integer value too large", t.value + t.value = 0 + return t + +t_ignore = u" \t" + + +def t_newline(t): + ur'\n+' + t.lexer.lineno += t.value.count("\n") + + +def t_error(t): + print "Illegal character '%s'" % t.value[0] + t.lexer.skip(1) + +# Build the lexer +import ply.lex as lex +lex.lex() + +# Parsing rules + +precedence = ( + ('left', 'PLUS', 'MINUS'), + ('left', 'TIMES', 'DIVIDE'), + ('right', 'UMINUS'), +) + +# dictionary of names +names = {} + + +def p_statement_assign(p): + 'statement : NAME EQUALS expression' + names[p[1]] = p[3] + + +def p_statement_expr(p): + 'statement : expression' + print p[1] + + +def p_expression_binop(p): + '''expression : expression PLUS expression + | expression MINUS expression + | expression TIMES expression + | expression DIVIDE expression''' + if p[2] == u'+': + p[0] = p[1] + p[3] + elif p[2] == u'-': + p[0] = p[1] - p[3] + elif p[2] == u'*': + p[0] = p[1] * p[3] + elif p[2] == u'/': + p[0] = p[1] / p[3] + + +def p_expression_uminus(p): + 'expression : MINUS expression %prec UMINUS' + p[0] = -p[2] + + +def p_expression_group(p): + 'expression : LPAREN expression RPAREN' + p[0] = p[2] + + +def p_expression_number(p): + 'expression : NUMBER' + p[0] = p[1] + + +def p_expression_name(p): + 'expression : NAME' + try: + p[0] = names[p[1]] + except LookupError: + print "Undefined name '%s'" % p[1] + p[0] = 0 + + +def p_error(p): + if p: + print "Syntax error at '%s'" % p.value + else: + print "Syntax error at EOF" + +import ply.yacc as yacc +yacc.yacc() + +while 1: + try: + s = raw_input('calc > ') + except EOFError: + break + if not s: + continue + yacc.parse(unicode(s)) diff --git a/components/script/dom/bindings/codegen/ply/example/yply/README b/components/script/dom/bindings/codegen/ply/example/yply/README new file mode 100644 index 00000000000..bfadf36436f --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/yply/README @@ -0,0 +1,41 @@ +yply.py + +This example implements a program yply.py that converts a UNIX-yacc +specification file into a PLY-compatible program. To use, simply +run it like this: + + % python yply.py [-nocode] inputfile.y >myparser.py + +The output of this program is Python code. In the output, +any C code in the original file is included, but is commented out. +If you use the -nocode option, then all of the C code in the +original file is just discarded. + +To use the resulting grammer with PLY, you'll need to edit the +myparser.py file. Within this file, some stub code is included that +can be used to test the construction of the parsing tables. However, +you'll need to do more editing to make a workable parser. + +Disclaimer: This just an example I threw together in an afternoon. +It might have some bugs. However, it worked when I tried it on +a yacc-specified C++ parser containing 442 rules and 855 parsing +states. + +Comments: + +1. This example does not parse specification files meant for lex/flex. + You'll need to specify the tokenizer on your own. + +2. This example shows a number of interesting PLY features including + + - Parsing of literal text delimited by nested parentheses + - Some interaction between the parser and the lexer. + - Use of literals in the grammar specification + - One pass compilation. The program just emits the result, + there is no intermediate parse tree. + +3. This program could probably be cleaned up and enhanced a lot. + It would be great if someone wanted to work on this (hint). + +-Dave + diff --git a/components/script/dom/bindings/codegen/ply/example/yply/ylex.py b/components/script/dom/bindings/codegen/ply/example/yply/ylex.py new file mode 100644 index 00000000000..16410e250ee --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/yply/ylex.py @@ -0,0 +1,119 @@ +# lexer for yacc-grammars +# +# Author: David Beazley (dave@dabeaz.com) +# Date : October 2, 2006 + +import sys +sys.path.append("../..") + +from ply import * + +tokens = ( + 'LITERAL', 'SECTION', 'TOKEN', 'LEFT', 'RIGHT', 'PREC', 'START', 'TYPE', 'NONASSOC', 'UNION', 'CODE', + 'ID', 'QLITERAL', 'NUMBER', +) + +states = (('code', 'exclusive'),) + +literals = [';', ',', '<', '>', '|', ':'] +t_ignore = ' \t' + +t_TOKEN = r'%token' +t_LEFT = r'%left' +t_RIGHT = r'%right' +t_NONASSOC = r'%nonassoc' +t_PREC = r'%prec' +t_START = r'%start' +t_TYPE = r'%type' +t_UNION = r'%union' +t_ID = r'[a-zA-Z_][a-zA-Z_0-9]*' +t_QLITERAL = r'''(?P['"]).*?(?P=quote)''' +t_NUMBER = r'\d+' + + +def t_SECTION(t): + r'%%' + if getattr(t.lexer, "lastsection", 0): + t.value = t.lexer.lexdata[t.lexpos + 2:] + t.lexer.lexpos = len(t.lexer.lexdata) + else: + t.lexer.lastsection = 0 + return t + +# Comments + + +def t_ccomment(t): + r'/\*(.|\n)*?\*/' + t.lexer.lineno += t.value.count('\n') + +t_ignore_cppcomment = r'//.*' + + +def t_LITERAL(t): + r'%\{(.|\n)*?%\}' + t.lexer.lineno += t.value.count("\n") + return t + + +def t_NEWLINE(t): + r'\n' + t.lexer.lineno += 1 + + +def t_code(t): + r'\{' + t.lexer.codestart = t.lexpos + t.lexer.level = 1 + t.lexer.begin('code') + + +def t_code_ignore_string(t): + r'\"([^\\\n]|(\\.))*?\"' + + +def t_code_ignore_char(t): + r'\'([^\\\n]|(\\.))*?\'' + + +def t_code_ignore_comment(t): + r'/\*(.|\n)*?\*/' + + +def t_code_ignore_cppcom(t): + r'//.*' + + +def t_code_lbrace(t): + r'\{' + t.lexer.level += 1 + + +def t_code_rbrace(t): + r'\}' + t.lexer.level -= 1 + if t.lexer.level == 0: + t.type = 'CODE' + t.value = t.lexer.lexdata[t.lexer.codestart:t.lexpos + 1] + t.lexer.begin('INITIAL') + t.lexer.lineno += t.value.count('\n') + return t + +t_code_ignore_nonspace = r'[^\s\}\'\"\{]+' +t_code_ignore_whitespace = r'\s+' +t_code_ignore = "" + + +def t_code_error(t): + raise RuntimeError + + +def t_error(t): + print("%d: Illegal character '%s'" % (t.lexer.lineno, t.value[0])) + print(t.value) + t.lexer.skip(1) + +lex.lex() + +if __name__ == '__main__': + lex.runmain() diff --git a/components/script/dom/bindings/codegen/ply/example/yply/yparse.py b/components/script/dom/bindings/codegen/ply/example/yply/yparse.py new file mode 100644 index 00000000000..1f2e8d0922c --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/yply/yparse.py @@ -0,0 +1,244 @@ +# parser for Unix yacc-based grammars +# +# Author: David Beazley (dave@dabeaz.com) +# Date : October 2, 2006 + +import ylex +tokens = ylex.tokens + +from ply import * + +tokenlist = [] +preclist = [] + +emit_code = 1 + + +def p_yacc(p): + '''yacc : defsection rulesection''' + + +def p_defsection(p): + '''defsection : definitions SECTION + | SECTION''' + p.lexer.lastsection = 1 + print("tokens = ", repr(tokenlist)) + print() + print("precedence = ", repr(preclist)) + print() + print("# -------------- RULES ----------------") + print() + + +def p_rulesection(p): + '''rulesection : rules SECTION''' + + print("# -------------- RULES END ----------------") + print_code(p[2], 0) + + +def p_definitions(p): + '''definitions : definitions definition + | definition''' + + +def p_definition_literal(p): + '''definition : LITERAL''' + print_code(p[1], 0) + + +def p_definition_start(p): + '''definition : START ID''' + print("start = '%s'" % p[2]) + + +def p_definition_token(p): + '''definition : toktype opttype idlist optsemi ''' + for i in p[3]: + if i[0] not in "'\"": + tokenlist.append(i) + if p[1] == '%left': + preclist.append(('left',) + tuple(p[3])) + elif p[1] == '%right': + preclist.append(('right',) + tuple(p[3])) + elif p[1] == '%nonassoc': + preclist.append(('nonassoc',) + tuple(p[3])) + + +def p_toktype(p): + '''toktype : TOKEN + | LEFT + | RIGHT + | NONASSOC''' + p[0] = p[1] + + +def p_opttype(p): + '''opttype : '<' ID '>' + | empty''' + + +def p_idlist(p): + '''idlist : idlist optcomma tokenid + | tokenid''' + if len(p) == 2: + p[0] = [p[1]] + else: + p[0] = p[1] + p[1].append(p[3]) + + +def p_tokenid(p): + '''tokenid : ID + | ID NUMBER + | QLITERAL + | QLITERAL NUMBER''' + p[0] = p[1] + + +def p_optsemi(p): + '''optsemi : ';' + | empty''' + + +def p_optcomma(p): + '''optcomma : ',' + | empty''' + + +def p_definition_type(p): + '''definition : TYPE '<' ID '>' namelist optsemi''' + # type declarations are ignored + + +def p_namelist(p): + '''namelist : namelist optcomma ID + | ID''' + + +def p_definition_union(p): + '''definition : UNION CODE optsemi''' + # Union declarations are ignored + + +def p_rules(p): + '''rules : rules rule + | rule''' + if len(p) == 2: + rule = p[1] + else: + rule = p[2] + + # Print out a Python equivalent of this rule + + embedded = [] # Embedded actions (a mess) + embed_count = 0 + + rulename = rule[0] + rulecount = 1 + for r in rule[1]: + # r contains one of the rule possibilities + print("def p_%s_%d(p):" % (rulename, rulecount)) + prod = [] + prodcode = "" + for i in range(len(r)): + item = r[i] + if item[0] == '{': # A code block + if i == len(r) - 1: + prodcode = item + break + else: + # an embedded action + embed_name = "_embed%d_%s" % (embed_count, rulename) + prod.append(embed_name) + embedded.append((embed_name, item)) + embed_count += 1 + else: + prod.append(item) + print(" '''%s : %s'''" % (rulename, " ".join(prod))) + # Emit code + print_code(prodcode, 4) + print() + rulecount += 1 + + for e, code in embedded: + print("def p_%s(p):" % e) + print(" '''%s : '''" % e) + print_code(code, 4) + print() + + +def p_rule(p): + '''rule : ID ':' rulelist ';' ''' + p[0] = (p[1], [p[3]]) + + +def p_rule2(p): + '''rule : ID ':' rulelist morerules ';' ''' + p[4].insert(0, p[3]) + p[0] = (p[1], p[4]) + + +def p_rule_empty(p): + '''rule : ID ':' ';' ''' + p[0] = (p[1], [[]]) + + +def p_rule_empty2(p): + '''rule : ID ':' morerules ';' ''' + + p[3].insert(0, []) + p[0] = (p[1], p[3]) + + +def p_morerules(p): + '''morerules : morerules '|' rulelist + | '|' rulelist + | '|' ''' + + if len(p) == 2: + p[0] = [[]] + elif len(p) == 3: + p[0] = [p[2]] + else: + p[0] = p[1] + p[0].append(p[3]) + +# print("morerules", len(p), p[0]) + + +def p_rulelist(p): + '''rulelist : rulelist ruleitem + | ruleitem''' + + if len(p) == 2: + p[0] = [p[1]] + else: + p[0] = p[1] + p[1].append(p[2]) + + +def p_ruleitem(p): + '''ruleitem : ID + | QLITERAL + | CODE + | PREC''' + p[0] = p[1] + + +def p_empty(p): + '''empty : ''' + + +def p_error(p): + pass + +yacc.yacc(debug=0) + + +def print_code(code, indent): + if not emit_code: + return + codelines = code.splitlines() + for c in codelines: + print("%s# %s" % (" " * indent, c)) diff --git a/components/script/dom/bindings/codegen/ply/example/yply/yply.py b/components/script/dom/bindings/codegen/ply/example/yply/yply.py new file mode 100755 index 00000000000..e24616c831c --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/example/yply/yply.py @@ -0,0 +1,51 @@ +#!/usr/local/bin/python +# yply.py +# +# Author: David Beazley (dave@dabeaz.com) +# Date : October 2, 2006 +# +# Converts a UNIX-yacc specification file into a PLY-compatible +# specification. To use, simply do this: +# +# % python yply.py [-nocode] inputfile.y >myparser.py +# +# The output of this program is Python code. In the output, +# any C code in the original file is included, but is commented. +# If you use the -nocode option, then all of the C code in the +# original file is discarded. +# +# Disclaimer: This just an example I threw together in an afternoon. +# It might have some bugs. However, it worked when I tried it on +# a yacc-specified C++ parser containing 442 rules and 855 parsing +# states. +# + +import sys +sys.path.insert(0, "../..") + +import ylex +import yparse + +from ply import * + +if len(sys.argv) == 1: + print("usage : yply.py [-nocode] inputfile") + raise SystemExit + +if len(sys.argv) == 3: + if sys.argv[1] == '-nocode': + yparse.emit_code = 0 + else: + print("Unknown option '%s'" % sys.argv[1]) + raise SystemExit + filename = sys.argv[2] +else: + filename = sys.argv[1] + +yacc.parse(open(filename).read()) + +print(""" +if __name__ == '__main__': + from ply import * + yacc.yacc() +""") diff --git a/components/script/dom/bindings/codegen/ply/ply.egg-info/PKG-INFO b/components/script/dom/bindings/codegen/ply/ply.egg-info/PKG-INFO new file mode 100644 index 00000000000..6eedf425953 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/ply.egg-info/PKG-INFO @@ -0,0 +1,22 @@ +Metadata-Version: 1.1 +Name: ply +Version: 3.10 +Summary: Python Lex & Yacc +Home-page: http://www.dabeaz.com/ply/ +Author: David Beazley +Author-email: dave@dabeaz.com +License: BSD +Description: + PLY is yet another implementation of lex and yacc for Python. Some notable + features include the fact that its implemented entirely in Python and it + uses LALR(1) parsing which is efficient and well suited for larger grammars. + + PLY provides most of the standard lex/yacc features including support for empty + productions, precedence rules, error recovery, and support for ambiguous grammars. + + PLY is extremely easy to use and provides very extensive error checking. + It is compatible with both Python 2 and Python 3. + +Platform: UNKNOWN +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 2 diff --git a/components/script/dom/bindings/codegen/ply/ply.egg-info/SOURCES.txt b/components/script/dom/bindings/codegen/ply/ply.egg-info/SOURCES.txt new file mode 100644 index 00000000000..2dff7dd29b8 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/ply.egg-info/SOURCES.txt @@ -0,0 +1,172 @@ +ANNOUNCE +CHANGES +MANIFEST.in +README.md +TODO +setup.cfg +setup.py +doc/internal.html +doc/makedoc.py +doc/ply.html +example/README +example/cleanup.sh +example/BASIC/README +example/BASIC/basic.py +example/BASIC/basiclex.py +example/BASIC/basiclog.py +example/BASIC/basinterp.py +example/BASIC/basparse.py +example/BASIC/dim.bas +example/BASIC/func.bas +example/BASIC/gcd.bas +example/BASIC/gosub.bas +example/BASIC/hello.bas +example/BASIC/linear.bas +example/BASIC/maxsin.bas +example/BASIC/powers.bas +example/BASIC/rand.bas +example/BASIC/sales.bas +example/BASIC/sears.bas +example/BASIC/sqrt1.bas +example/BASIC/sqrt2.bas +example/GardenSnake/GardenSnake.py +example/GardenSnake/README +example/ansic/README +example/ansic/clex.py +example/ansic/cparse.py +example/calc/calc.py +example/calcdebug/calc.py +example/calceof/calc.py +example/classcalc/calc.py +example/closurecalc/calc.py +example/hedit/hedit.py +example/newclasscalc/calc.py +example/optcalc/README +example/optcalc/calc.py +example/unicalc/calc.py +example/yply/README +example/yply/ylex.py +example/yply/yparse.py +example/yply/yply.py +ply/__init__.py +ply/cpp.py +ply/ctokens.py +ply/lex.py +ply/yacc.py +ply/ygen.py +ply.egg-info/PKG-INFO +ply.egg-info/SOURCES.txt +ply.egg-info/dependency_links.txt +ply.egg-info/top_level.txt +test/README +test/calclex.py +test/cleanup.sh +test/lex_closure.py +test/lex_doc1.py +test/lex_dup1.py +test/lex_dup2.py +test/lex_dup3.py +test/lex_empty.py +test/lex_error1.py +test/lex_error2.py +test/lex_error3.py +test/lex_error4.py +test/lex_hedit.py +test/lex_ignore.py +test/lex_ignore2.py +test/lex_literal1.py +test/lex_literal2.py +test/lex_literal3.py +test/lex_many_tokens.py +test/lex_module.py +test/lex_module_import.py +test/lex_object.py +test/lex_opt_alias.py +test/lex_optimize.py +test/lex_optimize2.py +test/lex_optimize3.py +test/lex_re1.py +test/lex_re2.py +test/lex_re3.py +test/lex_rule1.py +test/lex_rule2.py +test/lex_rule3.py +test/lex_state1.py +test/lex_state2.py +test/lex_state3.py +test/lex_state4.py +test/lex_state5.py +test/lex_state_noerror.py +test/lex_state_norule.py +test/lex_state_try.py +test/lex_token1.py +test/lex_token2.py +test/lex_token3.py +test/lex_token4.py +test/lex_token5.py +test/lex_token_dup.py +test/testlex.py +test/testyacc.py +test/yacc_badargs.py +test/yacc_badid.py +test/yacc_badprec.py +test/yacc_badprec2.py +test/yacc_badprec3.py +test/yacc_badrule.py +test/yacc_badtok.py +test/yacc_dup.py +test/yacc_error1.py +test/yacc_error2.py +test/yacc_error3.py +test/yacc_error4.py +test/yacc_error5.py +test/yacc_error6.py +test/yacc_error7.py +test/yacc_inf.py +test/yacc_literal.py +test/yacc_misplaced.py +test/yacc_missing1.py +test/yacc_nested.py +test/yacc_nodoc.py +test/yacc_noerror.py +test/yacc_nop.py +test/yacc_notfunc.py +test/yacc_notok.py +test/yacc_prec1.py +test/yacc_rr.py +test/yacc_rr_unused.py +test/yacc_simple.py +test/yacc_sr.py +test/yacc_term1.py +test/yacc_unicode_literals.py +test/yacc_unused.py +test/yacc_unused_rule.py +test/yacc_uprec.py +test/yacc_uprec2.py +test/pkg_test1/__init__.py +test/pkg_test1/parsing/__init__.py +test/pkg_test1/parsing/calclex.py +test/pkg_test1/parsing/calcparse.py +test/pkg_test2/__init__.py +test/pkg_test2/parsing/__init__.py +test/pkg_test2/parsing/calclex.py +test/pkg_test2/parsing/calcparse.py +test/pkg_test3/__init__.py +test/pkg_test3/generated/__init__.py +test/pkg_test3/parsing/__init__.py +test/pkg_test3/parsing/calclex.py +test/pkg_test3/parsing/calcparse.py +test/pkg_test4/__init__.py +test/pkg_test4/parsing/__init__.py +test/pkg_test4/parsing/calclex.py +test/pkg_test4/parsing/calcparse.py +test/pkg_test5/__init__.py +test/pkg_test5/parsing/__init__.py +test/pkg_test5/parsing/calclex.py +test/pkg_test5/parsing/calcparse.py +test/pkg_test6/__init__.py +test/pkg_test6/parsing/__init__.py +test/pkg_test6/parsing/calclex.py +test/pkg_test6/parsing/calcparse.py +test/pkg_test6/parsing/expression.py +test/pkg_test6/parsing/statement.py \ No newline at end of file diff --git a/components/script/dom/bindings/codegen/ply/ply.egg-info/dependency_links.txt b/components/script/dom/bindings/codegen/ply/ply.egg-info/dependency_links.txt new file mode 100644 index 00000000000..8b137891791 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/ply.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/components/script/dom/bindings/codegen/ply/ply.egg-info/top_level.txt b/components/script/dom/bindings/codegen/ply/ply.egg-info/top_level.txt new file mode 100644 index 00000000000..90412f06833 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/ply.egg-info/top_level.txt @@ -0,0 +1 @@ +ply diff --git a/components/script/dom/bindings/codegen/ply/ply/__init__.py b/components/script/dom/bindings/codegen/ply/ply/__init__.py index 87838622863..6e53cddcf67 100644 --- a/components/script/dom/bindings/codegen/ply/ply/__init__.py +++ b/components/script/dom/bindings/codegen/ply/ply/__init__.py @@ -1,6 +1,5 @@ # PLY package # Author: David Beazley (dave@dabeaz.com) -# https://dabeaz.com/ply/index.html -__version__ = '4.0' +__version__ = '3.9' __all__ = ['lex','yacc'] diff --git a/components/script/dom/bindings/codegen/ply/ply/cpp.py b/components/script/dom/bindings/codegen/ply/ply/cpp.py new file mode 100644 index 00000000000..b6bfc69614b --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/ply/cpp.py @@ -0,0 +1,918 @@ +# ----------------------------------------------------------------------------- +# cpp.py +# +# Author: David Beazley (http://www.dabeaz.com) +# Copyright (C) 2007 +# All rights reserved +# +# This module implements an ANSI-C style lexical preprocessor for PLY. +# ----------------------------------------------------------------------------- +from __future__ import generators + +import sys + +# Some Python 3 compatibility shims +if sys.version_info.major < 3: + STRING_TYPES = (str, unicode) +else: + STRING_TYPES = str + xrange = range + +# ----------------------------------------------------------------------------- +# Default preprocessor lexer definitions. These tokens are enough to get +# a basic preprocessor working. Other modules may import these if they want +# ----------------------------------------------------------------------------- + +tokens = ( + 'CPP_ID','CPP_INTEGER', 'CPP_FLOAT', 'CPP_STRING', 'CPP_CHAR', 'CPP_WS', 'CPP_COMMENT1', 'CPP_COMMENT2', 'CPP_POUND','CPP_DPOUND' +) + +literals = "+-*/%|&~^<>=!?()[]{}.,;:\\\'\"" + +# Whitespace +def t_CPP_WS(t): + r'\s+' + t.lexer.lineno += t.value.count("\n") + return t + +t_CPP_POUND = r'\#' +t_CPP_DPOUND = r'\#\#' + +# Identifier +t_CPP_ID = r'[A-Za-z_][\w_]*' + +# Integer literal +def CPP_INTEGER(t): + r'(((((0x)|(0X))[0-9a-fA-F]+)|(\d+))([uU][lL]|[lL][uU]|[uU]|[lL])?)' + return t + +t_CPP_INTEGER = CPP_INTEGER + +# Floating literal +t_CPP_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?' + +# String literal +def t_CPP_STRING(t): + r'\"([^\\\n]|(\\(.|\n)))*?\"' + t.lexer.lineno += t.value.count("\n") + return t + +# Character constant 'c' or L'c' +def t_CPP_CHAR(t): + r'(L)?\'([^\\\n]|(\\(.|\n)))*?\'' + t.lexer.lineno += t.value.count("\n") + return t + +# Comment +def t_CPP_COMMENT1(t): + r'(/\*(.|\n)*?\*/)' + ncr = t.value.count("\n") + t.lexer.lineno += ncr + # replace with one space or a number of '\n' + t.type = 'CPP_WS'; t.value = '\n' * ncr if ncr else ' ' + return t + +# Line comment +def t_CPP_COMMENT2(t): + r'(//.*?(\n|$))' + # replace with '/n' + t.type = 'CPP_WS'; t.value = '\n' + return t + +def t_error(t): + t.type = t.value[0] + t.value = t.value[0] + t.lexer.skip(1) + return t + +import re +import copy +import time +import os.path + +# ----------------------------------------------------------------------------- +# trigraph() +# +# Given an input string, this function replaces all trigraph sequences. +# The following mapping is used: +# +# ??= # +# ??/ \ +# ??' ^ +# ??( [ +# ??) ] +# ??! | +# ??< { +# ??> } +# ??- ~ +# ----------------------------------------------------------------------------- + +_trigraph_pat = re.compile(r'''\?\?[=/\'\(\)\!<>\-]''') +_trigraph_rep = { + '=':'#', + '/':'\\', + "'":'^', + '(':'[', + ')':']', + '!':'|', + '<':'{', + '>':'}', + '-':'~' +} + +def trigraph(input): + return _trigraph_pat.sub(lambda g: _trigraph_rep[g.group()[-1]],input) + +# ------------------------------------------------------------------ +# Macro object +# +# This object holds information about preprocessor macros +# +# .name - Macro name (string) +# .value - Macro value (a list of tokens) +# .arglist - List of argument names +# .variadic - Boolean indicating whether or not variadic macro +# .vararg - Name of the variadic parameter +# +# When a macro is created, the macro replacement token sequence is +# pre-scanned and used to create patch lists that are later used +# during macro expansion +# ------------------------------------------------------------------ + +class Macro(object): + def __init__(self,name,value,arglist=None,variadic=False): + self.name = name + self.value = value + self.arglist = arglist + self.variadic = variadic + if variadic: + self.vararg = arglist[-1] + self.source = None + +# ------------------------------------------------------------------ +# Preprocessor object +# +# Object representing a preprocessor. Contains macro definitions, +# include directories, and other information +# ------------------------------------------------------------------ + +class Preprocessor(object): + def __init__(self,lexer=None): + if lexer is None: + lexer = lex.lexer + self.lexer = lexer + self.macros = { } + self.path = [] + self.temp_path = [] + + # Probe the lexer for selected tokens + self.lexprobe() + + tm = time.localtime() + self.define("__DATE__ \"%s\"" % time.strftime("%b %d %Y",tm)) + self.define("__TIME__ \"%s\"" % time.strftime("%H:%M:%S",tm)) + self.parser = None + + # ----------------------------------------------------------------------------- + # tokenize() + # + # Utility function. Given a string of text, tokenize into a list of tokens + # ----------------------------------------------------------------------------- + + def tokenize(self,text): + tokens = [] + self.lexer.input(text) + while True: + tok = self.lexer.token() + if not tok: break + tokens.append(tok) + return tokens + + # --------------------------------------------------------------------- + # error() + # + # Report a preprocessor error/warning of some kind + # ---------------------------------------------------------------------- + + def error(self,file,line,msg): + print("%s:%d %s" % (file,line,msg)) + + # ---------------------------------------------------------------------- + # lexprobe() + # + # This method probes the preprocessor lexer object to discover + # the token types of symbols that are important to the preprocessor. + # If this works right, the preprocessor will simply "work" + # with any suitable lexer regardless of how tokens have been named. + # ---------------------------------------------------------------------- + + def lexprobe(self): + + # Determine the token type for identifiers + self.lexer.input("identifier") + tok = self.lexer.token() + if not tok or tok.value != "identifier": + print("Couldn't determine identifier type") + else: + self.t_ID = tok.type + + # Determine the token type for integers + self.lexer.input("12345") + tok = self.lexer.token() + if not tok or int(tok.value) != 12345: + print("Couldn't determine integer type") + else: + self.t_INTEGER = tok.type + self.t_INTEGER_TYPE = type(tok.value) + + # Determine the token type for strings enclosed in double quotes + self.lexer.input("\"filename\"") + tok = self.lexer.token() + if not tok or tok.value != "\"filename\"": + print("Couldn't determine string type") + else: + self.t_STRING = tok.type + + # Determine the token type for whitespace--if any + self.lexer.input(" ") + tok = self.lexer.token() + if not tok or tok.value != " ": + self.t_SPACE = None + else: + self.t_SPACE = tok.type + + # Determine the token type for newlines + self.lexer.input("\n") + tok = self.lexer.token() + if not tok or tok.value != "\n": + self.t_NEWLINE = None + print("Couldn't determine token for newlines") + else: + self.t_NEWLINE = tok.type + + self.t_WS = (self.t_SPACE, self.t_NEWLINE) + + # Check for other characters used by the preprocessor + chars = [ '<','>','#','##','\\','(',')',',','.'] + for c in chars: + self.lexer.input(c) + tok = self.lexer.token() + if not tok or tok.value != c: + print("Unable to lex '%s' required for preprocessor" % c) + + # ---------------------------------------------------------------------- + # add_path() + # + # Adds a search path to the preprocessor. + # ---------------------------------------------------------------------- + + def add_path(self,path): + self.path.append(path) + + # ---------------------------------------------------------------------- + # group_lines() + # + # Given an input string, this function splits it into lines. Trailing whitespace + # is removed. Any line ending with \ is grouped with the next line. This + # function forms the lowest level of the preprocessor---grouping into text into + # a line-by-line format. + # ---------------------------------------------------------------------- + + def group_lines(self,input): + lex = self.lexer.clone() + lines = [x.rstrip() for x in input.splitlines()] + for i in xrange(len(lines)): + j = i+1 + while lines[i].endswith('\\') and (j < len(lines)): + lines[i] = lines[i][:-1]+lines[j] + lines[j] = "" + j += 1 + + input = "\n".join(lines) + lex.input(input) + lex.lineno = 1 + + current_line = [] + while True: + tok = lex.token() + if not tok: + break + current_line.append(tok) + if tok.type in self.t_WS and '\n' in tok.value: + yield current_line + current_line = [] + + if current_line: + yield current_line + + # ---------------------------------------------------------------------- + # tokenstrip() + # + # Remove leading/trailing whitespace tokens from a token list + # ---------------------------------------------------------------------- + + def tokenstrip(self,tokens): + i = 0 + while i < len(tokens) and tokens[i].type in self.t_WS: + i += 1 + del tokens[:i] + i = len(tokens)-1 + while i >= 0 and tokens[i].type in self.t_WS: + i -= 1 + del tokens[i+1:] + return tokens + + + # ---------------------------------------------------------------------- + # collect_args() + # + # Collects comma separated arguments from a list of tokens. The arguments + # must be enclosed in parenthesis. Returns a tuple (tokencount,args,positions) + # where tokencount is the number of tokens consumed, args is a list of arguments, + # and positions is a list of integers containing the starting index of each + # argument. Each argument is represented by a list of tokens. + # + # When collecting arguments, leading and trailing whitespace is removed + # from each argument. + # + # This function properly handles nested parenthesis and commas---these do not + # define new arguments. + # ---------------------------------------------------------------------- + + def collect_args(self,tokenlist): + args = [] + positions = [] + current_arg = [] + nesting = 1 + tokenlen = len(tokenlist) + + # Search for the opening '('. + i = 0 + while (i < tokenlen) and (tokenlist[i].type in self.t_WS): + i += 1 + + if (i < tokenlen) and (tokenlist[i].value == '('): + positions.append(i+1) + else: + self.error(self.source,tokenlist[0].lineno,"Missing '(' in macro arguments") + return 0, [], [] + + i += 1 + + while i < tokenlen: + t = tokenlist[i] + if t.value == '(': + current_arg.append(t) + nesting += 1 + elif t.value == ')': + nesting -= 1 + if nesting == 0: + if current_arg: + args.append(self.tokenstrip(current_arg)) + positions.append(i) + return i+1,args,positions + current_arg.append(t) + elif t.value == ',' and nesting == 1: + args.append(self.tokenstrip(current_arg)) + positions.append(i+1) + current_arg = [] + else: + current_arg.append(t) + i += 1 + + # Missing end argument + self.error(self.source,tokenlist[-1].lineno,"Missing ')' in macro arguments") + return 0, [],[] + + # ---------------------------------------------------------------------- + # macro_prescan() + # + # Examine the macro value (token sequence) and identify patch points + # This is used to speed up macro expansion later on---we'll know + # right away where to apply patches to the value to form the expansion + # ---------------------------------------------------------------------- + + def macro_prescan(self,macro): + macro.patch = [] # Standard macro arguments + macro.str_patch = [] # String conversion expansion + macro.var_comma_patch = [] # Variadic macro comma patch + i = 0 + while i < len(macro.value): + if macro.value[i].type == self.t_ID and macro.value[i].value in macro.arglist: + argnum = macro.arglist.index(macro.value[i].value) + # Conversion of argument to a string + if i > 0 and macro.value[i-1].value == '#': + macro.value[i] = copy.copy(macro.value[i]) + macro.value[i].type = self.t_STRING + del macro.value[i-1] + macro.str_patch.append((argnum,i-1)) + continue + # Concatenation + elif (i > 0 and macro.value[i-1].value == '##'): + macro.patch.append(('c',argnum,i-1)) + del macro.value[i-1] + continue + elif ((i+1) < len(macro.value) and macro.value[i+1].value == '##'): + macro.patch.append(('c',argnum,i)) + i += 1 + continue + # Standard expansion + else: + macro.patch.append(('e',argnum,i)) + elif macro.value[i].value == '##': + if macro.variadic and (i > 0) and (macro.value[i-1].value == ',') and \ + ((i+1) < len(macro.value)) and (macro.value[i+1].type == self.t_ID) and \ + (macro.value[i+1].value == macro.vararg): + macro.var_comma_patch.append(i-1) + i += 1 + macro.patch.sort(key=lambda x: x[2],reverse=True) + + # ---------------------------------------------------------------------- + # macro_expand_args() + # + # Given a Macro and list of arguments (each a token list), this method + # returns an expanded version of a macro. The return value is a token sequence + # representing the replacement macro tokens + # ---------------------------------------------------------------------- + + def macro_expand_args(self,macro,args): + # Make a copy of the macro token sequence + rep = [copy.copy(_x) for _x in macro.value] + + # Make string expansion patches. These do not alter the length of the replacement sequence + + str_expansion = {} + for argnum, i in macro.str_patch: + if argnum not in str_expansion: + str_expansion[argnum] = ('"%s"' % "".join([x.value for x in args[argnum]])).replace("\\","\\\\") + rep[i] = copy.copy(rep[i]) + rep[i].value = str_expansion[argnum] + + # Make the variadic macro comma patch. If the variadic macro argument is empty, we get rid + comma_patch = False + if macro.variadic and not args[-1]: + for i in macro.var_comma_patch: + rep[i] = None + comma_patch = True + + # Make all other patches. The order of these matters. It is assumed that the patch list + # has been sorted in reverse order of patch location since replacements will cause the + # size of the replacement sequence to expand from the patch point. + + expanded = { } + for ptype, argnum, i in macro.patch: + # Concatenation. Argument is left unexpanded + if ptype == 'c': + rep[i:i+1] = args[argnum] + # Normal expansion. Argument is macro expanded first + elif ptype == 'e': + if argnum not in expanded: + expanded[argnum] = self.expand_macros(args[argnum]) + rep[i:i+1] = expanded[argnum] + + # Get rid of removed comma if necessary + if comma_patch: + rep = [_i for _i in rep if _i] + + return rep + + + # ---------------------------------------------------------------------- + # expand_macros() + # + # Given a list of tokens, this function performs macro expansion. + # The expanded argument is a dictionary that contains macros already + # expanded. This is used to prevent infinite recursion. + # ---------------------------------------------------------------------- + + def expand_macros(self,tokens,expanded=None): + if expanded is None: + expanded = {} + i = 0 + while i < len(tokens): + t = tokens[i] + if t.type == self.t_ID: + if t.value in self.macros and t.value not in expanded: + # Yes, we found a macro match + expanded[t.value] = True + + m = self.macros[t.value] + if not m.arglist: + # A simple macro + ex = self.expand_macros([copy.copy(_x) for _x in m.value],expanded) + for e in ex: + e.lineno = t.lineno + tokens[i:i+1] = ex + i += len(ex) + else: + # A macro with arguments + j = i + 1 + while j < len(tokens) and tokens[j].type in self.t_WS: + j += 1 + if tokens[j].value == '(': + tokcount,args,positions = self.collect_args(tokens[j:]) + if not m.variadic and len(args) != len(m.arglist): + self.error(self.source,t.lineno,"Macro %s requires %d arguments" % (t.value,len(m.arglist))) + i = j + tokcount + elif m.variadic and len(args) < len(m.arglist)-1: + if len(m.arglist) > 2: + self.error(self.source,t.lineno,"Macro %s must have at least %d arguments" % (t.value, len(m.arglist)-1)) + else: + self.error(self.source,t.lineno,"Macro %s must have at least %d argument" % (t.value, len(m.arglist)-1)) + i = j + tokcount + else: + if m.variadic: + if len(args) == len(m.arglist)-1: + args.append([]) + else: + args[len(m.arglist)-1] = tokens[j+positions[len(m.arglist)-1]:j+tokcount-1] + del args[len(m.arglist):] + + # Get macro replacement text + rep = self.macro_expand_args(m,args) + rep = self.expand_macros(rep,expanded) + for r in rep: + r.lineno = t.lineno + tokens[i:j+tokcount] = rep + i += len(rep) + del expanded[t.value] + continue + elif t.value == '__LINE__': + t.type = self.t_INTEGER + t.value = self.t_INTEGER_TYPE(t.lineno) + + i += 1 + return tokens + + # ---------------------------------------------------------------------- + # evalexpr() + # + # Evaluate an expression token sequence for the purposes of evaluating + # integral expressions. + # ---------------------------------------------------------------------- + + def evalexpr(self,tokens): + # tokens = tokenize(line) + # Search for defined macros + i = 0 + while i < len(tokens): + if tokens[i].type == self.t_ID and tokens[i].value == 'defined': + j = i + 1 + needparen = False + result = "0L" + while j < len(tokens): + if tokens[j].type in self.t_WS: + j += 1 + continue + elif tokens[j].type == self.t_ID: + if tokens[j].value in self.macros: + result = "1L" + else: + result = "0L" + if not needparen: break + elif tokens[j].value == '(': + needparen = True + elif tokens[j].value == ')': + break + else: + self.error(self.source,tokens[i].lineno,"Malformed defined()") + j += 1 + tokens[i].type = self.t_INTEGER + tokens[i].value = self.t_INTEGER_TYPE(result) + del tokens[i+1:j+1] + i += 1 + tokens = self.expand_macros(tokens) + for i,t in enumerate(tokens): + if t.type == self.t_ID: + tokens[i] = copy.copy(t) + tokens[i].type = self.t_INTEGER + tokens[i].value = self.t_INTEGER_TYPE("0L") + elif t.type == self.t_INTEGER: + tokens[i] = copy.copy(t) + # Strip off any trailing suffixes + tokens[i].value = str(tokens[i].value) + while tokens[i].value[-1] not in "0123456789abcdefABCDEF": + tokens[i].value = tokens[i].value[:-1] + + expr = "".join([str(x.value) for x in tokens]) + expr = expr.replace("&&"," and ") + expr = expr.replace("||"," or ") + expr = expr.replace("!"," not ") + try: + result = eval(expr) + except Exception: + self.error(self.source,tokens[0].lineno,"Couldn't evaluate expression") + result = 0 + return result + + # ---------------------------------------------------------------------- + # parsegen() + # + # Parse an input string/ + # ---------------------------------------------------------------------- + def parsegen(self,input,source=None): + + # Replace trigraph sequences + t = trigraph(input) + lines = self.group_lines(t) + + if not source: + source = "" + + self.define("__FILE__ \"%s\"" % source) + + self.source = source + chunk = [] + enable = True + iftrigger = False + ifstack = [] + + for x in lines: + for i,tok in enumerate(x): + if tok.type not in self.t_WS: break + if tok.value == '#': + # Preprocessor directive + + # insert necessary whitespace instead of eaten tokens + for tok in x: + if tok.type in self.t_WS and '\n' in tok.value: + chunk.append(tok) + + dirtokens = self.tokenstrip(x[i+1:]) + if dirtokens: + name = dirtokens[0].value + args = self.tokenstrip(dirtokens[1:]) + else: + name = "" + args = [] + + if name == 'define': + if enable: + for tok in self.expand_macros(chunk): + yield tok + chunk = [] + self.define(args) + elif name == 'include': + if enable: + for tok in self.expand_macros(chunk): + yield tok + chunk = [] + oldfile = self.macros['__FILE__'] + for tok in self.include(args): + yield tok + self.macros['__FILE__'] = oldfile + self.source = source + elif name == 'undef': + if enable: + for tok in self.expand_macros(chunk): + yield tok + chunk = [] + self.undef(args) + elif name == 'ifdef': + ifstack.append((enable,iftrigger)) + if enable: + if not args[0].value in self.macros: + enable = False + iftrigger = False + else: + iftrigger = True + elif name == 'ifndef': + ifstack.append((enable,iftrigger)) + if enable: + if args[0].value in self.macros: + enable = False + iftrigger = False + else: + iftrigger = True + elif name == 'if': + ifstack.append((enable,iftrigger)) + if enable: + result = self.evalexpr(args) + if not result: + enable = False + iftrigger = False + else: + iftrigger = True + elif name == 'elif': + if ifstack: + if ifstack[-1][0]: # We only pay attention if outer "if" allows this + if enable: # If already true, we flip enable False + enable = False + elif not iftrigger: # If False, but not triggered yet, we'll check expression + result = self.evalexpr(args) + if result: + enable = True + iftrigger = True + else: + self.error(self.source,dirtokens[0].lineno,"Misplaced #elif") + + elif name == 'else': + if ifstack: + if ifstack[-1][0]: + if enable: + enable = False + elif not iftrigger: + enable = True + iftrigger = True + else: + self.error(self.source,dirtokens[0].lineno,"Misplaced #else") + + elif name == 'endif': + if ifstack: + enable,iftrigger = ifstack.pop() + else: + self.error(self.source,dirtokens[0].lineno,"Misplaced #endif") + else: + # Unknown preprocessor directive + pass + + else: + # Normal text + if enable: + chunk.extend(x) + + for tok in self.expand_macros(chunk): + yield tok + chunk = [] + + # ---------------------------------------------------------------------- + # include() + # + # Implementation of file-inclusion + # ---------------------------------------------------------------------- + + def include(self,tokens): + # Try to extract the filename and then process an include file + if not tokens: + return + if tokens: + if tokens[0].value != '<' and tokens[0].type != self.t_STRING: + tokens = self.expand_macros(tokens) + + if tokens[0].value == '<': + # Include <...> + i = 1 + while i < len(tokens): + if tokens[i].value == '>': + break + i += 1 + else: + print("Malformed #include <...>") + return + filename = "".join([x.value for x in tokens[1:i]]) + path = self.path + [""] + self.temp_path + elif tokens[0].type == self.t_STRING: + filename = tokens[0].value[1:-1] + path = self.temp_path + [""] + self.path + else: + print("Malformed #include statement") + return + for p in path: + iname = os.path.join(p,filename) + try: + data = open(iname,"r").read() + dname = os.path.dirname(iname) + if dname: + self.temp_path.insert(0,dname) + for tok in self.parsegen(data,filename): + yield tok + if dname: + del self.temp_path[0] + break + except IOError: + pass + else: + print("Couldn't find '%s'" % filename) + + # ---------------------------------------------------------------------- + # define() + # + # Define a new macro + # ---------------------------------------------------------------------- + + def define(self,tokens): + if isinstance(tokens,STRING_TYPES): + tokens = self.tokenize(tokens) + + linetok = tokens + try: + name = linetok[0] + if len(linetok) > 1: + mtype = linetok[1] + else: + mtype = None + if not mtype: + m = Macro(name.value,[]) + self.macros[name.value] = m + elif mtype.type in self.t_WS: + # A normal macro + m = Macro(name.value,self.tokenstrip(linetok[2:])) + self.macros[name.value] = m + elif mtype.value == '(': + # A macro with arguments + tokcount, args, positions = self.collect_args(linetok[1:]) + variadic = False + for a in args: + if variadic: + print("No more arguments may follow a variadic argument") + break + astr = "".join([str(_i.value) for _i in a]) + if astr == "...": + variadic = True + a[0].type = self.t_ID + a[0].value = '__VA_ARGS__' + variadic = True + del a[1:] + continue + elif astr[-3:] == "..." and a[0].type == self.t_ID: + variadic = True + del a[1:] + # If, for some reason, "." is part of the identifier, strip off the name for the purposes + # of macro expansion + if a[0].value[-3:] == '...': + a[0].value = a[0].value[:-3] + continue + if len(a) > 1 or a[0].type != self.t_ID: + print("Invalid macro argument") + break + else: + mvalue = self.tokenstrip(linetok[1+tokcount:]) + i = 0 + while i < len(mvalue): + if i+1 < len(mvalue): + if mvalue[i].type in self.t_WS and mvalue[i+1].value == '##': + del mvalue[i] + continue + elif mvalue[i].value == '##' and mvalue[i+1].type in self.t_WS: + del mvalue[i+1] + i += 1 + m = Macro(name.value,mvalue,[x[0].value for x in args],variadic) + self.macro_prescan(m) + self.macros[name.value] = m + else: + print("Bad macro definition") + except LookupError: + print("Bad macro definition") + + # ---------------------------------------------------------------------- + # undef() + # + # Undefine a macro + # ---------------------------------------------------------------------- + + def undef(self,tokens): + id = tokens[0].value + try: + del self.macros[id] + except LookupError: + pass + + # ---------------------------------------------------------------------- + # parse() + # + # Parse input text. + # ---------------------------------------------------------------------- + def parse(self,input,source=None,ignore={}): + self.ignore = ignore + self.parser = self.parsegen(input,source) + + # ---------------------------------------------------------------------- + # token() + # + # Method to return individual tokens + # ---------------------------------------------------------------------- + def token(self): + try: + while True: + tok = next(self.parser) + if tok.type not in self.ignore: return tok + except StopIteration: + self.parser = None + return None + +if __name__ == '__main__': + import ply.lex as lex + lexer = lex.lex() + + # Run a preprocessor + import sys + f = open(sys.argv[1]) + input = f.read() + + p = Preprocessor(lexer) + p.parse(input,sys.argv[1]) + while True: + tok = p.token() + if not tok: break + print(p.source, tok) + + + + + + + + + + + diff --git a/components/script/dom/bindings/codegen/ply/ply/ctokens.py b/components/script/dom/bindings/codegen/ply/ply/ctokens.py new file mode 100644 index 00000000000..f6f6952d605 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/ply/ctokens.py @@ -0,0 +1,133 @@ +# ---------------------------------------------------------------------- +# ctokens.py +# +# Token specifications for symbols in ANSI C and C++. This file is +# meant to be used as a library in other tokenizers. +# ---------------------------------------------------------------------- + +# Reserved words + +tokens = [ + # Literals (identifier, integer constant, float constant, string constant, char const) + 'ID', 'TYPEID', 'INTEGER', 'FLOAT', 'STRING', 'CHARACTER', + + # Operators (+,-,*,/,%,|,&,~,^,<<,>>, ||, &&, !, <, <=, >, >=, ==, !=) + 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MODULO', + 'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT', + 'LOR', 'LAND', 'LNOT', + 'LT', 'LE', 'GT', 'GE', 'EQ', 'NE', + + # Assignment (=, *=, /=, %=, +=, -=, <<=, >>=, &=, ^=, |=) + 'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', 'PLUSEQUAL', 'MINUSEQUAL', + 'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', 'OREQUAL', + + # Increment/decrement (++,--) + 'INCREMENT', 'DECREMENT', + + # Structure dereference (->) + 'ARROW', + + # Ternary operator (?) + 'TERNARY', + + # Delimeters ( ) [ ] { } , . ; : + 'LPAREN', 'RPAREN', + 'LBRACKET', 'RBRACKET', + 'LBRACE', 'RBRACE', + 'COMMA', 'PERIOD', 'SEMI', 'COLON', + + # Ellipsis (...) + 'ELLIPSIS', +] + +# Operators +t_PLUS = r'\+' +t_MINUS = r'-' +t_TIMES = r'\*' +t_DIVIDE = r'/' +t_MODULO = r'%' +t_OR = r'\|' +t_AND = r'&' +t_NOT = r'~' +t_XOR = r'\^' +t_LSHIFT = r'<<' +t_RSHIFT = r'>>' +t_LOR = r'\|\|' +t_LAND = r'&&' +t_LNOT = r'!' +t_LT = r'<' +t_GT = r'>' +t_LE = r'<=' +t_GE = r'>=' +t_EQ = r'==' +t_NE = r'!=' + +# Assignment operators + +t_EQUALS = r'=' +t_TIMESEQUAL = r'\*=' +t_DIVEQUAL = r'/=' +t_MODEQUAL = r'%=' +t_PLUSEQUAL = r'\+=' +t_MINUSEQUAL = r'-=' +t_LSHIFTEQUAL = r'<<=' +t_RSHIFTEQUAL = r'>>=' +t_ANDEQUAL = r'&=' +t_OREQUAL = r'\|=' +t_XOREQUAL = r'\^=' + +# Increment/decrement +t_INCREMENT = r'\+\+' +t_DECREMENT = r'--' + +# -> +t_ARROW = r'->' + +# ? +t_TERNARY = r'\?' + +# Delimeters +t_LPAREN = r'\(' +t_RPAREN = r'\)' +t_LBRACKET = r'\[' +t_RBRACKET = r'\]' +t_LBRACE = r'\{' +t_RBRACE = r'\}' +t_COMMA = r',' +t_PERIOD = r'\.' +t_SEMI = r';' +t_COLON = r':' +t_ELLIPSIS = r'\.\.\.' + +# Identifiers +t_ID = r'[A-Za-z_][A-Za-z0-9_]*' + +# Integer literal +t_INTEGER = r'\d+([uU]|[lL]|[uU][lL]|[lL][uU])?' + +# Floating literal +t_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?' + +# String literal +t_STRING = r'\"([^\\\n]|(\\.))*?\"' + +# Character constant 'c' or L'c' +t_CHARACTER = r'(L)?\'([^\\\n]|(\\.))*?\'' + +# Comment (C-Style) +def t_COMMENT(t): + r'/\*(.|\n)*?\*/' + t.lexer.lineno += t.value.count('\n') + return t + +# Comment (C++-Style) +def t_CPPCOMMENT(t): + r'//.*\n' + t.lexer.lineno += 1 + return t + + + + + + diff --git a/components/script/dom/bindings/codegen/ply/ply/lex.py b/components/script/dom/bindings/codegen/ply/ply/lex.py index 57b61f1779e..3e240d1aa20 100644 --- a/components/script/dom/bindings/codegen/ply/ply/lex.py +++ b/components/script/dom/bindings/codegen/ply/ply/lex.py @@ -1,12 +1,10 @@ # ----------------------------------------------------------------------------- # ply: lex.py # -# Copyright (C) 2001-2020 +# Copyright (C) 2001-2017 # David M. Beazley (Dabeaz LLC) # All rights reserved. # -# Latest version: https://github.com/dabeaz/ply -# # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: @@ -16,9 +14,9 @@ # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. -# * Neither the name of David Beazley or Dabeaz LLC may be used to +# * Neither the name of the David Beazley or Dabeaz LLC may be used to # endorse or promote products derived from this software without -# specific prior written permission. +# specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT @@ -33,6 +31,9 @@ # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ----------------------------------------------------------------------------- +__version__ = '3.10' +__tabversion__ = '3.10' + import re import sys import types @@ -40,8 +41,13 @@ import copy import os import inspect -# This tuple contains acceptable string types -StringTypes = (str, bytes) +# This tuple contains known string types +try: + # Python 2.6 + StringTypes = (types.StringType, types.UnicodeType) +except AttributeError: + # Python 3.0 + StringTypes = (str, bytes) # This regular expression is used to match valid token names _is_identifier = re.compile(r'^[a-zA-Z0-9_]+$') @@ -53,10 +59,15 @@ class LexError(Exception): self.args = (message,) self.text = s + # Token class. This class is used to represent the tokens produced. class LexToken(object): + def __str__(self): + return 'LexToken(%s,%r,%d,%d)' % (self.type, self.value, self.lineno, self.lexpos) + def __repr__(self): - return f'LexToken({self.type},{self.value!r},{self.lineno},{self.lexpos})' + return str(self) + # This object is a stand-in for a logging object created by the # logging module. @@ -77,6 +88,16 @@ class PlyLogger(object): info = critical debug = critical + +# Null logger is used when no output is generated. Does nothing. +class NullLogger(object): + def __getattribute__(self, name): + return self + + def __call__(self, *args, **kwargs): + return self + + # ----------------------------------------------------------------------------- # === Lexing Engine === # @@ -94,9 +115,9 @@ class PlyLogger(object): class Lexer: def __init__(self): self.lexre = None # Master regular expression. This is a list of - # tuples (re, findex) where re is a compiled - # regular expression and findex is a list - # mapping regex group numbers to rules + # tuples (re, findex) where re is a compiled + # regular expression and findex is a list + # mapping regex group numbers to rules self.lexretext = None # Current regular expression strings self.lexstatere = {} # Dictionary mapping lexer states to master regexs self.lexstateretext = {} # Dictionary mapping lexer states to regex strings @@ -118,6 +139,7 @@ class Lexer: self.lexliterals = '' # Literal characters that can be passed through self.lexmodule = None # Module self.lineno = 1 # Current line number + self.lexoptimize = False # Optimized mode def clone(self, object=None): c = copy.copy(self) @@ -146,10 +168,91 @@ class Lexer: c.lexmodule = object return c + # ------------------------------------------------------------ + # writetab() - Write lexer information to a table file + # ------------------------------------------------------------ + def writetab(self, lextab, outputdir=''): + if isinstance(lextab, types.ModuleType): + raise IOError("Won't overwrite existing lextab module") + basetabmodule = lextab.split('.')[-1] + filename = os.path.join(outputdir, basetabmodule) + '.py' + with open(filename, 'w') as tf: + tf.write('# %s.py. This file automatically created by PLY (version %s). Don\'t edit!\n' % (basetabmodule, __version__)) + tf.write('_tabversion = %s\n' % repr(__tabversion__)) + tf.write('_lextokens = set(%s)\n' % repr(tuple(self.lextokens))) + tf.write('_lexreflags = %s\n' % repr(self.lexreflags)) + tf.write('_lexliterals = %s\n' % repr(self.lexliterals)) + tf.write('_lexstateinfo = %s\n' % repr(self.lexstateinfo)) + + # Rewrite the lexstatere table, replacing function objects with function names + tabre = {} + for statename, lre in self.lexstatere.items(): + titem = [] + for (pat, func), retext, renames in zip(lre, self.lexstateretext[statename], self.lexstaterenames[statename]): + titem.append((retext, _funcs_to_names(func, renames))) + tabre[statename] = titem + + tf.write('_lexstatere = %s\n' % repr(tabre)) + tf.write('_lexstateignore = %s\n' % repr(self.lexstateignore)) + + taberr = {} + for statename, ef in self.lexstateerrorf.items(): + taberr[statename] = ef.__name__ if ef else None + tf.write('_lexstateerrorf = %s\n' % repr(taberr)) + + tabeof = {} + for statename, ef in self.lexstateeoff.items(): + tabeof[statename] = ef.__name__ if ef else None + tf.write('_lexstateeoff = %s\n' % repr(tabeof)) + + # ------------------------------------------------------------ + # readtab() - Read lexer information from a tab file + # ------------------------------------------------------------ + def readtab(self, tabfile, fdict): + if isinstance(tabfile, types.ModuleType): + lextab = tabfile + else: + exec('import %s' % tabfile) + lextab = sys.modules[tabfile] + + if getattr(lextab, '_tabversion', '0.0') != __tabversion__: + raise ImportError('Inconsistent PLY version') + + self.lextokens = lextab._lextokens + self.lexreflags = lextab._lexreflags + self.lexliterals = lextab._lexliterals + self.lextokens_all = self.lextokens | set(self.lexliterals) + self.lexstateinfo = lextab._lexstateinfo + self.lexstateignore = lextab._lexstateignore + self.lexstatere = {} + self.lexstateretext = {} + for statename, lre in lextab._lexstatere.items(): + titem = [] + txtitem = [] + for pat, func_name in lre: + titem.append((re.compile(pat, lextab._lexreflags), _names_to_funcs(func_name, fdict))) + + self.lexstatere[statename] = titem + self.lexstateretext[statename] = txtitem + + self.lexstateerrorf = {} + for statename, ef in lextab._lexstateerrorf.items(): + self.lexstateerrorf[statename] = fdict[ef] + + self.lexstateeoff = {} + for statename, ef in lextab._lexstateeoff.items(): + self.lexstateeoff[statename] = fdict[ef] + + self.begin('INITIAL') + # ------------------------------------------------------------ # input() - Push a new string into the lexer # ------------------------------------------------------------ def input(self, s): + # Pull off the first character to see if s looks like a string + c = s[:1] + if not isinstance(c, StringTypes): + raise ValueError('Expected a string') self.lexdata = s self.lexpos = 0 self.lexlen = len(s) @@ -159,7 +262,7 @@ class Lexer: # ------------------------------------------------------------ def begin(self, state): if state not in self.lexstatere: - raise ValueError(f'Undefined state {state!r}') + raise ValueError('Undefined state') self.lexre = self.lexstatere[state] self.lexretext = self.lexstateretext[state] self.lexignore = self.lexstateignore.get(state, '') @@ -193,7 +296,7 @@ class Lexer: self.lexpos += n # ------------------------------------------------------------ - # token() - Return the next token from the Lexer + # opttoken() - Return the next token from the Lexer # # Note: This function has been carefully implemented to be as fast # as possible. Don't make changes unless you really know what @@ -243,15 +346,22 @@ class Lexer: tok.lexer = self # Set additional attributes useful in token rules self.lexmatch = m self.lexpos = lexpos + newtok = func(tok) - del tok.lexer - del self.lexmatch # Every function must return a token, if nothing, we just move to next token if not newtok: lexpos = self.lexpos # This is here in case user has updated lexpos. lexignore = self.lexignore # This is here in case there was a state change break + + # Verify type of the token. If not in the token map, raise an error + if not self.lexoptimize: + if newtok.type not in self.lextokens_all: + raise LexError("%s:%d: Rule '%s' returned an unknown token type '%s'" % ( + func.__code__.co_filename, func.__code__.co_firstlineno, + func.__name__, newtok.type), lexdata[lexpos:]) + return newtok else: # No match, see if in literals @@ -276,16 +386,14 @@ class Lexer: newtok = self.lexerrorf(tok) if lexpos == self.lexpos: # Error method didn't change text position at all. This is an error. - raise LexError(f"Scanning error. Illegal character {lexdata[lexpos]!r}", - lexdata[lexpos:]) + raise LexError("Scanning error. Illegal character '%s'" % (lexdata[lexpos]), lexdata[lexpos:]) lexpos = self.lexpos if not newtok: continue return newtok self.lexpos = lexpos - raise LexError(f"Illegal character {lexdata[lexpos]!r} at index {lexpos}", - lexdata[lexpos:]) + raise LexError("Illegal character '%s' at index %d" % (lexdata[lexpos], lexpos), lexdata[lexpos:]) if self.lexeoff: tok = LexToken() @@ -307,12 +415,14 @@ class Lexer: def __iter__(self): return self - def __next__(self): + def next(self): t = self.token() if t is None: raise StopIteration return t + __next__ = next + # ----------------------------------------------------------------------------- # ==== Lex Builder === # @@ -338,7 +448,40 @@ def _get_regex(func): # ----------------------------------------------------------------------------- def get_caller_module_dict(levels): f = sys._getframe(levels) - return { **f.f_globals, **f.f_locals } + ldict = f.f_globals.copy() + if f.f_globals != f.f_locals: + ldict.update(f.f_locals) + return ldict + +# ----------------------------------------------------------------------------- +# _funcs_to_names() +# +# Given a list of regular expression functions, this converts it to a list +# suitable for output to a table file +# ----------------------------------------------------------------------------- +def _funcs_to_names(funclist, namelist): + result = [] + for f, name in zip(funclist, namelist): + if f and f[0]: + result.append((name, f[1])) + else: + result.append(f) + return result + +# ----------------------------------------------------------------------------- +# _names_to_funcs() +# +# Given a list of regular expression function names, this converts it back to +# functions. +# ----------------------------------------------------------------------------- +def _names_to_funcs(namelist, fdict): + result = [] + for n in namelist: + if n and n[0]: + result.append((fdict[n[0]], n[1])) + else: + result.append(n) + return result # ----------------------------------------------------------------------------- # _form_master_re() @@ -349,7 +492,7 @@ def get_caller_module_dict(levels): # ----------------------------------------------------------------------------- def _form_master_re(relist, reflags, ldict, toknames): if not relist: - return [], [], [] + return [] regex = '|'.join(relist) try: lexre = re.compile(regex, reflags) @@ -372,7 +515,9 @@ def _form_master_re(relist, reflags, ldict, toknames): return [(lexre, lexindexfunc)], [regex], [lexindexnames] except Exception: - m = (len(relist) // 2) + 1 + m = int(len(relist)/2) + if m == 0: + m = 1 llist, lre, lnames = _form_master_re(relist[:m], reflags, ldict, toknames) rlist, rre, rnames = _form_master_re(relist[m:], reflags, ldict, toknames) return (llist+rlist), (lre+rre), (lnames+rnames) @@ -386,11 +531,12 @@ def _form_master_re(relist, reflags, ldict, toknames): # calling this with s = "t_foo_bar_SPAM" might return (('foo','bar'),'SPAM') # ----------------------------------------------------------------------------- def _statetoken(s, names): + nonstate = 1 parts = s.split('_') for i, part in enumerate(parts[1:], 1): if part not in names and part != 'ANY': break - + if i > 1: states = tuple(parts[1:i]) else: @@ -459,10 +605,10 @@ class LexerReflect(object): terminals = {} for n in self.tokens: if not _is_identifier.match(n): - self.log.error(f"Bad token name {n!r}") + self.log.error("Bad token name '%s'", n) self.error = True if n in terminals: - self.log.warning(f"Token {n!r} multiply defined") + self.log.warning("Token '%s' multiply defined", n) terminals[n] = 1 # Get the literals specifier @@ -476,7 +622,7 @@ class LexerReflect(object): try: for c in self.literals: if not isinstance(c, StringTypes) or len(c) > 1: - self.log.error(f'Invalid literal {c!r}. Must be a single character') + self.log.error('Invalid literal %s. Must be a single character', repr(c)) self.error = True except TypeError: @@ -493,20 +639,20 @@ class LexerReflect(object): else: for s in self.states: if not isinstance(s, tuple) or len(s) != 2: - self.log.error("Invalid state specifier %r. Must be a tuple (statename,'exclusive|inclusive')", s) + self.log.error("Invalid state specifier %s. Must be a tuple (statename,'exclusive|inclusive')", repr(s)) self.error = True continue name, statetype = s if not isinstance(name, StringTypes): - self.log.error('State name %r must be a string', name) + self.log.error('State name %s must be a string', repr(name)) self.error = True continue if not (statetype == 'inclusive' or statetype == 'exclusive'): - self.log.error("State type for state %r must be 'inclusive' or 'exclusive'", name) + self.log.error("State type for state %s must be 'inclusive' or 'exclusive'", name) self.error = True continue if name in self.stateinfo: - self.log.error("State %r already defined", name) + self.log.error("State '%s' already defined", name) self.error = True continue self.stateinfo[name] = statetype @@ -549,7 +695,7 @@ class LexerReflect(object): elif tokname == 'ignore': line = t.__code__.co_firstlineno file = t.__code__.co_filename - self.log.error("%s:%d: Rule %r must be defined as a string", file, line, t.__name__) + self.log.error("%s:%d: Rule '%s' must be defined as a string", file, line, t.__name__) self.error = True else: for s in states: @@ -562,7 +708,7 @@ class LexerReflect(object): self.log.warning("%s contains a literal backslash '\\'", f) elif tokname == 'error': - self.log.error("Rule %r must be defined as a function", f) + self.log.error("Rule '%s' must be defined as a function", f) self.error = True else: for s in states: @@ -597,57 +743,57 @@ class LexerReflect(object): reqargs = 1 nargs = f.__code__.co_argcount if nargs > reqargs: - self.log.error("%s:%d: Rule %r has too many arguments", file, line, f.__name__) + self.log.error("%s:%d: Rule '%s' has too many arguments", file, line, f.__name__) self.error = True continue if nargs < reqargs: - self.log.error("%s:%d: Rule %r requires an argument", file, line, f.__name__) + self.log.error("%s:%d: Rule '%s' requires an argument", file, line, f.__name__) self.error = True continue if not _get_regex(f): - self.log.error("%s:%d: No regular expression defined for rule %r", file, line, f.__name__) + self.log.error("%s:%d: No regular expression defined for rule '%s'", file, line, f.__name__) self.error = True continue try: c = re.compile('(?P<%s>%s)' % (fname, _get_regex(f)), self.reflags) if c.match(''): - self.log.error("%s:%d: Regular expression for rule %r matches empty string", file, line, f.__name__) + self.log.error("%s:%d: Regular expression for rule '%s' matches empty string", file, line, f.__name__) self.error = True except re.error as e: self.log.error("%s:%d: Invalid regular expression for rule '%s'. %s", file, line, f.__name__, e) if '#' in _get_regex(f): - self.log.error("%s:%d. Make sure '#' in rule %r is escaped with '\\#'", file, line, f.__name__) + self.log.error("%s:%d. Make sure '#' in rule '%s' is escaped with '\\#'", file, line, f.__name__) self.error = True # Validate all rules defined by strings for name, r in self.strsym[state]: tokname = self.toknames[name] if tokname == 'error': - self.log.error("Rule %r must be defined as a function", name) + self.log.error("Rule '%s' must be defined as a function", name) self.error = True continue if tokname not in self.tokens and tokname.find('ignore_') < 0: - self.log.error("Rule %r defined for an unspecified token %s", name, tokname) + self.log.error("Rule '%s' defined for an unspecified token %s", name, tokname) self.error = True continue try: c = re.compile('(?P<%s>%s)' % (name, r), self.reflags) if (c.match('')): - self.log.error("Regular expression for rule %r matches empty string", name) + self.log.error("Regular expression for rule '%s' matches empty string", name) self.error = True except re.error as e: - self.log.error("Invalid regular expression for rule %r. %s", name, e) + self.log.error("Invalid regular expression for rule '%s'. %s", name, e) if '#' in r: - self.log.error("Make sure '#' in rule %r is escaped with '\\#'", name) + self.log.error("Make sure '#' in rule '%s' is escaped with '\\#'", name) self.error = True if not self.funcsym[state] and not self.strsym[state]: - self.log.error("No rules defined for state %r", state) + self.log.error("No rules defined for state '%s'", state) self.error = True # Validate the error function @@ -665,11 +811,11 @@ class LexerReflect(object): reqargs = 1 nargs = f.__code__.co_argcount if nargs > reqargs: - self.log.error("%s:%d: Rule %r has too many arguments", file, line, f.__name__) + self.log.error("%s:%d: Rule '%s' has too many arguments", file, line, f.__name__) self.error = True if nargs < reqargs: - self.log.error("%s:%d: Rule %r requires an argument", file, line, f.__name__) + self.log.error("%s:%d: Rule '%s' requires an argument", file, line, f.__name__) self.error = True for module in self.modules: @@ -714,14 +860,18 @@ class LexerReflect(object): # # Build all of the regular expression rules from definitions in the supplied module # ----------------------------------------------------------------------------- -def lex(*, module=None, object=None, debug=False, - reflags=int(re.VERBOSE), debuglog=None, errorlog=None): +def lex(module=None, object=None, debug=False, optimize=False, lextab='lextab', + reflags=int(re.VERBOSE), nowarn=False, outputdir=None, debuglog=None, errorlog=None): + + if lextab is None: + lextab = 'lextab' global lexer ldict = None stateinfo = {'INITIAL': 'inclusive'} lexobj = Lexer() + lexobj.lexoptimize = optimize global token, input if errorlog is None: @@ -745,11 +895,30 @@ def lex(*, module=None, object=None, debug=False, else: ldict = get_caller_module_dict(2) + # Determine if the module is package of a package or not. + # If so, fix the tabmodule setting so that tables load correctly + pkg = ldict.get('__package__') + if pkg and isinstance(lextab, str): + if '.' not in lextab: + lextab = pkg + '.' + lextab + # Collect parser information from the dictionary linfo = LexerReflect(ldict, log=errorlog, reflags=reflags) linfo.get_all() - if linfo.validate_all(): - raise SyntaxError("Can't build lexer") + if not optimize: + if linfo.validate_all(): + raise SyntaxError("Can't build lexer") + + if optimize and lextab: + try: + lexobj.readtab(lextab, ldict) + token = lexobj.token + input = lexobj.input + lexer = lexobj + return lexobj + + except ImportError: + pass # Dump some basic debugging information if debug: @@ -780,6 +949,8 @@ def lex(*, module=None, object=None, debug=False, # Add rules defined by functions first for fname, f in linfo.funcsym[state]: + line = f.__code__.co_firstlineno + file = f.__code__.co_filename regex_list.append('(?P<%s>%s)' % (fname, _get_regex(f))) if debug: debuglog.info("lex: Adding rule %s -> '%s' (state '%s')", fname, _get_regex(f), state) @@ -836,9 +1007,9 @@ def lex(*, module=None, object=None, debug=False, for s, stype in stateinfo.items(): if stype == 'exclusive': if s not in linfo.errorf: - errorlog.warning("No error rule is defined for exclusive state %r", s) + errorlog.warning("No error rule is defined for exclusive state '%s'", s) if s not in linfo.ignore and lexobj.lexignore: - errorlog.warning("No ignore rule is defined for exclusive state %r", s) + errorlog.warning("No ignore rule is defined for exclusive state '%s'", s) elif stype == 'inclusive': if s not in linfo.errorf: linfo.errorf[s] = linfo.errorf.get('INITIAL', None) @@ -850,6 +1021,29 @@ def lex(*, module=None, object=None, debug=False, input = lexobj.input lexer = lexobj + # If in optimize mode, we write the lextab + if lextab and optimize: + if outputdir is None: + # If no output directory is set, the location of the output files + # is determined according to the following rules: + # - If lextab specifies a package, files go into that package directory + # - Otherwise, files go in the same directory as the specifying module + if isinstance(lextab, types.ModuleType): + srcfile = lextab.__file__ + else: + if '.' not in lextab: + srcfile = ldict['__file__'] + else: + parts = lextab.split('.') + pkgname = '.'.join(parts[:-1]) + exec('import %s' % pkgname) + srcfile = getattr(sys.modules[pkgname], '__file__', '') + outputdir = os.path.dirname(srcfile) + try: + lexobj.writetab(lextab, outputdir) + except IOError as e: + errorlog.warning("Couldn't write lextab module %r. %s" % (lextab, e)) + return lexobj # ----------------------------------------------------------------------------- @@ -862,8 +1056,9 @@ def runmain(lexer=None, data=None): if not data: try: filename = sys.argv[1] - with open(filename) as f: - data = f.read() + f = open(filename) + data = f.read() + f.close() except IndexError: sys.stdout.write('Reading from standard input (type EOF to end):\n') data = sys.stdin.read() @@ -882,7 +1077,7 @@ def runmain(lexer=None, data=None): tok = _token() if not tok: break - sys.stdout.write(f'({tok.type},{tok.value!r},{tok.lineno},{tok.lexpos})\n') + sys.stdout.write('(%s,%r,%d,%d)\n' % (tok.type, tok.value, tok.lineno, tok.lexpos)) # ----------------------------------------------------------------------------- # @TOKEN(regex) @@ -899,3 +1094,7 @@ def TOKEN(r): f.regex = r return f return set_regex + +# Alternative spelling of the TOKEN decorator +Token = TOKEN + diff --git a/components/script/dom/bindings/codegen/ply/ply/yacc.py b/components/script/dom/bindings/codegen/ply/ply/yacc.py index bce63c18241..03bd86ee078 100644 --- a/components/script/dom/bindings/codegen/ply/ply/yacc.py +++ b/components/script/dom/bindings/codegen/ply/ply/yacc.py @@ -1,12 +1,10 @@ # ----------------------------------------------------------------------------- # ply: yacc.py # -# Copyright (C) 2001-2020 +# Copyright (C) 2001-2017 # David M. Beazley (Dabeaz LLC) # All rights reserved. # -# Latest version: https://github.com/dabeaz/ply -# # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: @@ -16,9 +14,9 @@ # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. -# * Neither the name of David Beazley or Dabeaz LLC may be used to +# * Neither the name of the David Beazley or Dabeaz LLC may be used to # endorse or promote products derived from this software without -# specific prior written permission. +# specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT @@ -34,7 +32,7 @@ # ----------------------------------------------------------------------------- # # This implements an LR parser that is constructed from grammar rules defined -# as Python functions. The grammar is specified by supplying the BNF inside +# as Python functions. The grammer is specified by supplying the BNF inside # Python documentation strings. The inspiration for this technique was borrowed # from John Aycock's Spark parsing system. PLY might be viewed as cross between # Spark and the GNU bison utility. @@ -64,7 +62,13 @@ import re import types import sys +import os.path import inspect +import base64 +import warnings + +__version__ = '3.10' +__tabversion__ = '3.10' #----------------------------------------------------------------------------- # === User configurable parameters === @@ -72,13 +76,28 @@ import inspect # Change these to modify the default behavior of yacc (if you wish) #----------------------------------------------------------------------------- -yaccdebug = False # Debugging mode. If set, yacc generates a -# a 'parser.out' file in the current directory +yaccdebug = True # Debugging mode. If set, yacc generates a + # a 'parser.out' file in the current directory debug_file = 'parser.out' # Default name of the debugging file +tab_module = 'parsetab' # Default name of the table module +default_lr = 'LALR' # Default LR table generation method + error_count = 3 # Number of symbols that must be shifted to leave recovery mode + +yaccdevel = False # Set to True if developing yacc. This turns off optimized + # implementations of certain functions. + resultlimit = 40 # Size limit of results when running in debug mode. +pickle_protocol = 0 # Protocol to use when writing pickle files + +# String type-checking compatibility +if sys.version_info[0] < 3: + string_types = basestring +else: + string_types = str + MAXINT = sys.maxsize # This object is a stand-in for a logging object created by the @@ -136,6 +155,48 @@ def format_stack_entry(r): else: return '<%s @ 0x%x>' % (type(r).__name__, id(r)) +# Panic mode error recovery support. This feature is being reworked--much of the +# code here is to offer a deprecation/backwards compatible transition + +_errok = None +_token = None +_restart = None +_warnmsg = '''PLY: Don't use global functions errok(), token(), and restart() in p_error(). +Instead, invoke the methods on the associated parser instance: + + def p_error(p): + ... + # Use parser.errok(), parser.token(), parser.restart() + ... + + parser = yacc.yacc() +''' + +def errok(): + warnings.warn(_warnmsg) + return _errok() + +def restart(): + warnings.warn(_warnmsg) + return _restart() + +def token(): + warnings.warn(_warnmsg) + return _token() + +# Utility function to call the p_error() function with some deprecation hacks +def call_errorfunc(errorfunc, token, parser): + global _errok, _token, _restart + _errok = parser.errok + _token = parser.token + _restart = parser.restart + r = errorfunc(token) + try: + del _errok, _token, _restart + except NameError: + pass + return r + #----------------------------------------------------------------------------- # === LR Parsing Engine === # @@ -207,9 +268,6 @@ class YaccProduction: def lexpos(self, n): return getattr(self.slice[n], 'lexpos', 0) - def set_lexpos(self, n, lexpos): - self.slice[n].lexpos = lexpos - def lexspan(self, n): startpos = getattr(self.slice[n], 'lexpos', 0) endpos = getattr(self.slice[n], 'endlexpos', startpos) @@ -262,19 +320,33 @@ class LRParser: def disable_defaulted_states(self): self.defaulted_states = {} - # parse(). + def parse(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None): + if debug or yaccdevel: + if isinstance(debug, int): + debug = PlyLogger(sys.stderr) + return self.parsedebug(input, lexer, debug, tracking, tokenfunc) + elif tracking: + return self.parseopt(input, lexer, debug, tracking, tokenfunc) + else: + return self.parseopt_notrack(input, lexer, debug, tracking, tokenfunc) + + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # parsedebug(). # - # This is the core parsing engine. To operate, it requires a lexer object. - # Two options are provided. The debug flag turns on debugging so that you can - # see the various rule reductions and parsing steps. tracking turns on position - # tracking. In this mode, symbols will record the starting/ending line number and - # character index. - - def parse(self, input=None, lexer=None, debug=False, tracking=False): - # If debugging has been specified as a flag, turn it into a logging object - if isinstance(debug, int) and debug: - debug = PlyLogger(sys.stderr) + # This is the debugging enabled version of parse(). All changes made to the + # parsing engine should be made here. Optimized versions of this function + # are automatically created by the ply/ygen.py script. This script cuts out + # sections enclosed in markers such as this: + # + # #--! DEBUG + # statements + # #--! DEBUG + # + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + def parsedebug(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None): + #--! parsedebug-start lookahead = None # Current lookahead symbol lookaheadstack = [] # Stack of lookahead symbols actions = self.action # Local reference to action table (to avoid lookup on self.) @@ -284,8 +356,9 @@ class LRParser: pslice = YaccProduction(None) # Production object passed to grammar rules errorcount = 0 # Used during error recovery - if debug: - debug.info('PLY: PARSE DEBUG START') + #--! DEBUG + debug.info('PLY: PARSE DEBUG START') + #--! DEBUG # If no lexer was given, we will try to use the lex module if not lexer: @@ -300,14 +373,24 @@ class LRParser: if input is not None: lexer.input(input) - # Set the token function - get_token = self.token = lexer.token + if tokenfunc is None: + # Tokenize function + get_token = lexer.token + else: + get_token = tokenfunc + + # Set the parser() token method (sometimes used in error recovery) + self.token = get_token # Set up the state and symbol stacks - statestack = self.statestack = [] # Stack of parsing states - symstack = self.symstack = [] # Stack of grammar symbols - pslice.stack = symstack # Put in the production - errtoken = None # Err token + + statestack = [] # Stack of parsing states + self.statestack = statestack + symstack = [] # Stack of grammar symbols + self.symstack = symstack + + pslice.stack = symstack # Put in the production + errtoken = None # Err token # The start state is assumed to be (0,$end) @@ -321,8 +404,10 @@ class LRParser: # is already set, we just use that. Otherwise, we'll pull # the next token off of the lookaheadstack or from the lexer - if debug: - debug.debug('State : %s', state) + #--! DEBUG + debug.debug('') + debug.debug('State : %s', state) + #--! DEBUG if state not in defaulted_states: if not lookahead: @@ -339,12 +424,14 @@ class LRParser: t = actions[state].get(ltype) else: t = defaulted_states[state] - if debug: - debug.debug('Defaulted state %s: Reduce using %d', state, -t) + #--! DEBUG + debug.debug('Defaulted state %s: Reduce using %d', state, -t) + #--! DEBUG - if debug: - debug.debug('Stack : %s', - ('%s . %s' % (' '.join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip()) + #--! DEBUG + debug.debug('Stack : %s', + ('%s . %s' % (' '.join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip()) + #--! DEBUG if t is not None: if t > 0: @@ -352,8 +439,9 @@ class LRParser: statestack.append(t) state = t - if debug: - debug.debug('Action : Shift and goto state %s', t) + #--! DEBUG + debug.debug('Action : Shift and goto state %s', t) + #--! DEBUG symstack.append(lookahead) lookahead = None @@ -374,19 +462,22 @@ class LRParser: sym.type = pname # Production name sym.value = None - if debug: - if plen: - debug.info('Action : Reduce rule [%s] with %s and goto state %d', p.str, - '['+','.join([format_stack_entry(_v.value) for _v in symstack[-plen:]])+']', - goto[statestack[-1-plen]][pname]) - else: - debug.info('Action : Reduce rule [%s] with %s and goto state %d', p.str, [], - goto[statestack[-1]][pname]) + #--! DEBUG + if plen: + debug.info('Action : Reduce rule [%s] with %s and goto state %d', p.str, + '['+','.join([format_stack_entry(_v.value) for _v in symstack[-plen:]])+']', + goto[statestack[-1-plen]][pname]) + else: + debug.info('Action : Reduce rule [%s] with %s and goto state %d', p.str, [], + goto[statestack[-1]][pname]) + + #--! DEBUG if plen: targ = symstack[-plen-1:] targ[0] = sym + #--! TRACKING if tracking: t1 = targ[1] sym.lineno = t1.lineno @@ -394,6 +485,7 @@ class LRParser: t1 = targ[-1] sym.endlineno = getattr(t1, 'endlineno', t1.lineno) sym.endlexpos = getattr(t1, 'endlexpos', t1.lexpos) + #--! TRACKING # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # The code enclosed in this section is duplicated @@ -408,8 +500,9 @@ class LRParser: self.state = state p.callable(pslice) del statestack[-plen:] - if debug: - debug.info('Result : %s', format_result(pslice[0])) + #--! DEBUG + debug.info('Result : %s', format_result(pslice[0])) + #--! DEBUG symstack.append(sym) state = goto[statestack[-1]][pname] statestack.append(state) @@ -426,12 +519,15 @@ class LRParser: self.errorok = False continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! else: + #--! TRACKING if tracking: sym.lineno = lexer.lineno sym.lexpos = lexer.lexpos + #--! TRACKING targ = [sym] @@ -446,8 +542,9 @@ class LRParser: # Call the grammar rule with our special slice object self.state = state p.callable(pslice) - if debug: - debug.info('Result : %s', format_result(pslice[0])) + #--! DEBUG + debug.info('Result : %s', format_result(pslice[0])) + #--! DEBUG symstack.append(sym) state = goto[statestack[-1]][pname] statestack.append(state) @@ -463,22 +560,23 @@ class LRParser: self.errorok = False continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! if t == 0: n = symstack[-1] result = getattr(n, 'value', None) - - if debug: - debug.info('Done : Returning %s', format_result(result)) - debug.info('PLY: PARSE DEBUG END') - + #--! DEBUG + debug.info('Done : Returning %s', format_result(result)) + debug.info('PLY: PARSE DEBUG END') + #--! DEBUG return result if t is None: - if debug: - debug.error('Error : %s', - ('%s . %s' % (' '.join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip()) + #--! DEBUG + debug.error('Error : %s', + ('%s . %s' % (' '.join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip()) + #--! DEBUG # We have some kind of parsing error here. To handle # this, we are going to push the current token onto @@ -500,7 +598,7 @@ class LRParser: if errtoken and not hasattr(errtoken, 'lexer'): errtoken.lexer = lexer self.state = state - tok = self.errorfunc(errtoken) + tok = call_errorfunc(self.errorfunc, errtoken, self) if self.errorok: # User must have done some kind of panic # mode recovery on their own. The @@ -550,9 +648,11 @@ class LRParser: if sym.type == 'error': # Hmmm. Error is on top of stack, we'll just nuke input # symbol and continue + #--! TRACKING if tracking: sym.endlineno = getattr(lookahead, 'lineno', sym.lineno) sym.endlexpos = getattr(lookahead, 'lexpos', sym.lexpos) + #--! TRACKING lookahead = None continue @@ -569,17 +669,609 @@ class LRParser: lookahead = t else: sym = symstack.pop() + #--! TRACKING if tracking: lookahead.lineno = sym.lineno lookahead.lexpos = sym.lexpos + #--! TRACKING statestack.pop() state = statestack[-1] continue - # If we'r here, something really bad happened + # Call an error function here raise RuntimeError('yacc: internal parser error!!!\n') + #--! parsedebug-end + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # parseopt(). + # + # Optimized version of parse() method. DO NOT EDIT THIS CODE DIRECTLY! + # This code is automatically generated by the ply/ygen.py script. Make + # changes to the parsedebug() method instead. + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + def parseopt(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None): + #--! parseopt-start + lookahead = None # Current lookahead symbol + lookaheadstack = [] # Stack of lookahead symbols + actions = self.action # Local reference to action table (to avoid lookup on self.) + goto = self.goto # Local reference to goto table (to avoid lookup on self.) + prod = self.productions # Local reference to production list (to avoid lookup on self.) + defaulted_states = self.defaulted_states # Local reference to defaulted states + pslice = YaccProduction(None) # Production object passed to grammar rules + errorcount = 0 # Used during error recovery + + + # If no lexer was given, we will try to use the lex module + if not lexer: + from . import lex + lexer = lex.lexer + + # Set up the lexer and parser objects on pslice + pslice.lexer = lexer + pslice.parser = self + + # If input was supplied, pass to lexer + if input is not None: + lexer.input(input) + + if tokenfunc is None: + # Tokenize function + get_token = lexer.token + else: + get_token = tokenfunc + + # Set the parser() token method (sometimes used in error recovery) + self.token = get_token + + # Set up the state and symbol stacks + + statestack = [] # Stack of parsing states + self.statestack = statestack + symstack = [] # Stack of grammar symbols + self.symstack = symstack + + pslice.stack = symstack # Put in the production + errtoken = None # Err token + + # The start state is assumed to be (0,$end) + + statestack.append(0) + sym = YaccSymbol() + sym.type = '$end' + symstack.append(sym) + state = 0 + while True: + # Get the next symbol on the input. If a lookahead symbol + # is already set, we just use that. Otherwise, we'll pull + # the next token off of the lookaheadstack or from the lexer + + + if state not in defaulted_states: + if not lookahead: + if not lookaheadstack: + lookahead = get_token() # Get the next token + else: + lookahead = lookaheadstack.pop() + if not lookahead: + lookahead = YaccSymbol() + lookahead.type = '$end' + + # Check the action table + ltype = lookahead.type + t = actions[state].get(ltype) + else: + t = defaulted_states[state] + + + if t is not None: + if t > 0: + # shift a symbol on the stack + statestack.append(t) + state = t + + + symstack.append(lookahead) + lookahead = None + + # Decrease error count on successful shift + if errorcount: + errorcount -= 1 + continue + + if t < 0: + # reduce a symbol on the stack, emit a production + p = prod[-t] + pname = p.name + plen = p.len + + # Get production function + sym = YaccSymbol() + sym.type = pname # Production name + sym.value = None + + + if plen: + targ = symstack[-plen-1:] + targ[0] = sym + + #--! TRACKING + if tracking: + t1 = targ[1] + sym.lineno = t1.lineno + sym.lexpos = t1.lexpos + t1 = targ[-1] + sym.endlineno = getattr(t1, 'endlineno', t1.lineno) + sym.endlexpos = getattr(t1, 'endlexpos', t1.lexpos) + #--! TRACKING + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # below as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + del symstack[-plen:] + self.state = state + p.callable(pslice) + del statestack[-plen:] + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) # Save the current lookahead token + symstack.extend(targ[1:-1]) # Put the production slice back on the stack + statestack.pop() # Pop back one state (before the reduce) + state = statestack[-1] + sym.type = 'error' + sym.value = 'error' + lookahead = sym + errorcount = error_count + self.errorok = False + + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + else: + + #--! TRACKING + if tracking: + sym.lineno = lexer.lineno + sym.lexpos = lexer.lexpos + #--! TRACKING + + targ = [sym] + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # above as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + self.state = state + p.callable(pslice) + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) # Save the current lookahead token + statestack.pop() # Pop back one state (before the reduce) + state = statestack[-1] + sym.type = 'error' + sym.value = 'error' + lookahead = sym + errorcount = error_count + self.errorok = False + + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + if t == 0: + n = symstack[-1] + result = getattr(n, 'value', None) + return result + + if t is None: + + + # We have some kind of parsing error here. To handle + # this, we are going to push the current token onto + # the tokenstack and replace it with an 'error' token. + # If there are any synchronization rules, they may + # catch it. + # + # In addition to pushing the error token, we call call + # the user defined p_error() function if this is the + # first syntax error. This function is only called if + # errorcount == 0. + if errorcount == 0 or self.errorok: + errorcount = error_count + self.errorok = False + errtoken = lookahead + if errtoken.type == '$end': + errtoken = None # End of file! + if self.errorfunc: + if errtoken and not hasattr(errtoken, 'lexer'): + errtoken.lexer = lexer + self.state = state + tok = call_errorfunc(self.errorfunc, errtoken, self) + if self.errorok: + # User must have done some kind of panic + # mode recovery on their own. The + # returned token is the next lookahead + lookahead = tok + errtoken = None + continue + else: + if errtoken: + if hasattr(errtoken, 'lineno'): + lineno = lookahead.lineno + else: + lineno = 0 + if lineno: + sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type)) + else: + sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type) + else: + sys.stderr.write('yacc: Parse error in input. EOF\n') + return + + else: + errorcount = error_count + + # case 1: the statestack only has 1 entry on it. If we're in this state, the + # entire parse has been rolled back and we're completely hosed. The token is + # discarded and we just keep going. + + if len(statestack) <= 1 and lookahead.type != '$end': + lookahead = None + errtoken = None + state = 0 + # Nuke the pushback stack + del lookaheadstack[:] + continue + + # case 2: the statestack has a couple of entries on it, but we're + # at the end of the file. nuke the top entry and generate an error token + + # Start nuking entries on the stack + if lookahead.type == '$end': + # Whoa. We're really hosed here. Bail out + return + + if lookahead.type != 'error': + sym = symstack[-1] + if sym.type == 'error': + # Hmmm. Error is on top of stack, we'll just nuke input + # symbol and continue + #--! TRACKING + if tracking: + sym.endlineno = getattr(lookahead, 'lineno', sym.lineno) + sym.endlexpos = getattr(lookahead, 'lexpos', sym.lexpos) + #--! TRACKING + lookahead = None + continue + + # Create the error symbol for the first time and make it the new lookahead symbol + t = YaccSymbol() + t.type = 'error' + + if hasattr(lookahead, 'lineno'): + t.lineno = t.endlineno = lookahead.lineno + if hasattr(lookahead, 'lexpos'): + t.lexpos = t.endlexpos = lookahead.lexpos + t.value = lookahead + lookaheadstack.append(lookahead) + lookahead = t + else: + sym = symstack.pop() + #--! TRACKING + if tracking: + lookahead.lineno = sym.lineno + lookahead.lexpos = sym.lexpos + #--! TRACKING + statestack.pop() + state = statestack[-1] + + continue + + # Call an error function here + raise RuntimeError('yacc: internal parser error!!!\n') + + #--! parseopt-end + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # parseopt_notrack(). + # + # Optimized version of parseopt() with line number tracking removed. + # DO NOT EDIT THIS CODE DIRECTLY. This code is automatically generated + # by the ply/ygen.py script. Make changes to the parsedebug() method instead. + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + def parseopt_notrack(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None): + #--! parseopt-notrack-start + lookahead = None # Current lookahead symbol + lookaheadstack = [] # Stack of lookahead symbols + actions = self.action # Local reference to action table (to avoid lookup on self.) + goto = self.goto # Local reference to goto table (to avoid lookup on self.) + prod = self.productions # Local reference to production list (to avoid lookup on self.) + defaulted_states = self.defaulted_states # Local reference to defaulted states + pslice = YaccProduction(None) # Production object passed to grammar rules + errorcount = 0 # Used during error recovery + + + # If no lexer was given, we will try to use the lex module + if not lexer: + from . import lex + lexer = lex.lexer + + # Set up the lexer and parser objects on pslice + pslice.lexer = lexer + pslice.parser = self + + # If input was supplied, pass to lexer + if input is not None: + lexer.input(input) + + if tokenfunc is None: + # Tokenize function + get_token = lexer.token + else: + get_token = tokenfunc + + # Set the parser() token method (sometimes used in error recovery) + self.token = get_token + + # Set up the state and symbol stacks + + statestack = [] # Stack of parsing states + self.statestack = statestack + symstack = [] # Stack of grammar symbols + self.symstack = symstack + + pslice.stack = symstack # Put in the production + errtoken = None # Err token + + # The start state is assumed to be (0,$end) + + statestack.append(0) + sym = YaccSymbol() + sym.type = '$end' + symstack.append(sym) + state = 0 + while True: + # Get the next symbol on the input. If a lookahead symbol + # is already set, we just use that. Otherwise, we'll pull + # the next token off of the lookaheadstack or from the lexer + + + if state not in defaulted_states: + if not lookahead: + if not lookaheadstack: + lookahead = get_token() # Get the next token + else: + lookahead = lookaheadstack.pop() + if not lookahead: + lookahead = YaccSymbol() + lookahead.type = '$end' + + # Check the action table + ltype = lookahead.type + t = actions[state].get(ltype) + else: + t = defaulted_states[state] + + + if t is not None: + if t > 0: + # shift a symbol on the stack + statestack.append(t) + state = t + + + symstack.append(lookahead) + lookahead = None + + # Decrease error count on successful shift + if errorcount: + errorcount -= 1 + continue + + if t < 0: + # reduce a symbol on the stack, emit a production + p = prod[-t] + pname = p.name + plen = p.len + + # Get production function + sym = YaccSymbol() + sym.type = pname # Production name + sym.value = None + + + if plen: + targ = symstack[-plen-1:] + targ[0] = sym + + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # below as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + del symstack[-plen:] + self.state = state + p.callable(pslice) + del statestack[-plen:] + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) # Save the current lookahead token + symstack.extend(targ[1:-1]) # Put the production slice back on the stack + statestack.pop() # Pop back one state (before the reduce) + state = statestack[-1] + sym.type = 'error' + sym.value = 'error' + lookahead = sym + errorcount = error_count + self.errorok = False + + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + else: + + + targ = [sym] + + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + # The code enclosed in this section is duplicated + # above as a performance optimization. Make sure + # changes get made in both locations. + + pslice.slice = targ + + try: + # Call the grammar rule with our special slice object + self.state = state + p.callable(pslice) + symstack.append(sym) + state = goto[statestack[-1]][pname] + statestack.append(state) + except SyntaxError: + # If an error was set. Enter error recovery state + lookaheadstack.append(lookahead) # Save the current lookahead token + statestack.pop() # Pop back one state (before the reduce) + state = statestack[-1] + sym.type = 'error' + sym.value = 'error' + lookahead = sym + errorcount = error_count + self.errorok = False + + continue + # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + if t == 0: + n = symstack[-1] + result = getattr(n, 'value', None) + return result + + if t is None: + + + # We have some kind of parsing error here. To handle + # this, we are going to push the current token onto + # the tokenstack and replace it with an 'error' token. + # If there are any synchronization rules, they may + # catch it. + # + # In addition to pushing the error token, we call call + # the user defined p_error() function if this is the + # first syntax error. This function is only called if + # errorcount == 0. + if errorcount == 0 or self.errorok: + errorcount = error_count + self.errorok = False + errtoken = lookahead + if errtoken.type == '$end': + errtoken = None # End of file! + if self.errorfunc: + if errtoken and not hasattr(errtoken, 'lexer'): + errtoken.lexer = lexer + self.state = state + tok = call_errorfunc(self.errorfunc, errtoken, self) + if self.errorok: + # User must have done some kind of panic + # mode recovery on their own. The + # returned token is the next lookahead + lookahead = tok + errtoken = None + continue + else: + if errtoken: + if hasattr(errtoken, 'lineno'): + lineno = lookahead.lineno + else: + lineno = 0 + if lineno: + sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type)) + else: + sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type) + else: + sys.stderr.write('yacc: Parse error in input. EOF\n') + return + + else: + errorcount = error_count + + # case 1: the statestack only has 1 entry on it. If we're in this state, the + # entire parse has been rolled back and we're completely hosed. The token is + # discarded and we just keep going. + + if len(statestack) <= 1 and lookahead.type != '$end': + lookahead = None + errtoken = None + state = 0 + # Nuke the pushback stack + del lookaheadstack[:] + continue + + # case 2: the statestack has a couple of entries on it, but we're + # at the end of the file. nuke the top entry and generate an error token + + # Start nuking entries on the stack + if lookahead.type == '$end': + # Whoa. We're really hosed here. Bail out + return + + if lookahead.type != 'error': + sym = symstack[-1] + if sym.type == 'error': + # Hmmm. Error is on top of stack, we'll just nuke input + # symbol and continue + lookahead = None + continue + + # Create the error symbol for the first time and make it the new lookahead symbol + t = YaccSymbol() + t.type = 'error' + + if hasattr(lookahead, 'lineno'): + t.lineno = t.endlineno = lookahead.lineno + if hasattr(lookahead, 'lexpos'): + t.lexpos = t.endlexpos = lookahead.lexpos + t.value = lookahead + lookaheadstack.append(lookahead) + lookahead = t + else: + sym = symstack.pop() + statestack.pop() + state = statestack[-1] + + continue + + # Call an error function here + raise RuntimeError('yacc: internal parser error!!!\n') + + #--! parseopt-notrack-end + # ----------------------------------------------------------------------------- # === Grammar Representation === # @@ -668,7 +1360,7 @@ class Production(object): p = LRItem(self, n) # Precompute the list of productions immediately following. try: - p.lr_after = self.Prodnames[p.prod[n+1]] + p.lr_after = Prodnames[p.prod[n+1]] except (IndexError, KeyError): p.lr_after = [] try: @@ -682,6 +1374,32 @@ class Production(object): if self.func: self.callable = pdict[self.func] +# This class serves as a minimal standin for Production objects when +# reading table data from files. It only contains information +# actually used by the LR parsing engine, plus some additional +# debugging information. +class MiniProduction(object): + def __init__(self, str, name, len, func, file, line): + self.name = name + self.len = len + self.func = func + self.callable = None + self.file = file + self.line = line + self.str = str + + def __str__(self): + return self.str + + def __repr__(self): + return 'MiniProduction(%s)' % self.str + + # Bind the production function name to a callable + def bind(self, pdict): + if self.func: + self.callable = pdict[self.func] + + # ----------------------------------------------------------------------------- # class LRItem # @@ -755,17 +1473,17 @@ class GrammarError(YaccError): class Grammar(object): def __init__(self, terminals): self.Productions = [None] # A list of all of the productions. The first - # entry is always reserved for the purpose of - # building an augmented grammar + # entry is always reserved for the purpose of + # building an augmented grammar self.Prodnames = {} # A dictionary mapping the names of nonterminals to a list of all - # productions of that nonterminal. + # productions of that nonterminal. self.Prodmap = {} # A dictionary that is only used to detect duplicate - # productions. + # productions. self.Terminals = {} # A dictionary mapping the names of terminal symbols to a - # list of the rules where they are used. + # list of the rules where they are used. for term in terminals: self.Terminals[term] = [] @@ -773,18 +1491,18 @@ class Grammar(object): self.Terminals['error'] = [] self.Nonterminals = {} # A dictionary mapping names of nonterminals to a list - # of rule numbers where they are used. + # of rule numbers where they are used. self.First = {} # A dictionary of precomputed FIRST(x) symbols self.Follow = {} # A dictionary of precomputed FOLLOW(x) symbols self.Precedence = {} # Precedence rules for each terminal. Contains tuples of the - # form ('right',level) or ('nonassoc', level) or ('left',level) + # form ('right',level) or ('nonassoc', level) or ('left',level) self.UsedPrecedence = set() # Precedence rules that were actually used by the grammer. - # This is only used to provide error checking and to generate - # a warning about unused precedence rules. + # This is only used to provide error checking and to generate + # a warning about unused precedence rules. self.Start = None # Starting symbol for the grammar @@ -1239,6 +1957,77 @@ class Grammar(object): i += 1 p.lr_items = lr_items +# ----------------------------------------------------------------------------- +# == Class LRTable == +# +# This basic class represents a basic table of LR parsing information. +# Methods for generating the tables are not defined here. They are defined +# in the derived class LRGeneratedTable. +# ----------------------------------------------------------------------------- + +class VersionError(YaccError): + pass + +class LRTable(object): + def __init__(self): + self.lr_action = None + self.lr_goto = None + self.lr_productions = None + self.lr_method = None + + def read_table(self, module): + if isinstance(module, types.ModuleType): + parsetab = module + else: + exec('import %s' % module) + parsetab = sys.modules[module] + + if parsetab._tabversion != __tabversion__: + raise VersionError('yacc table file version is out of date') + + self.lr_action = parsetab._lr_action + self.lr_goto = parsetab._lr_goto + + self.lr_productions = [] + for p in parsetab._lr_productions: + self.lr_productions.append(MiniProduction(*p)) + + self.lr_method = parsetab._lr_method + return parsetab._lr_signature + + def read_pickle(self, filename): + try: + import cPickle as pickle + except ImportError: + import pickle + + if not os.path.exists(filename): + raise ImportError + + in_f = open(filename, 'rb') + + tabversion = pickle.load(in_f) + if tabversion != __tabversion__: + raise VersionError('yacc table file version is out of date') + self.lr_method = pickle.load(in_f) + signature = pickle.load(in_f) + self.lr_action = pickle.load(in_f) + self.lr_goto = pickle.load(in_f) + productions = pickle.load(in_f) + + self.lr_productions = [] + for p in productions: + self.lr_productions.append(MiniProduction(*p)) + + in_f.close() + return signature + + # Bind all production function names to callable objects in pdict + def bind_callables(self, pdict): + for p in self.lr_productions: + p.bind(pdict) + + # ----------------------------------------------------------------------------- # === LR Generator === # @@ -1300,17 +2089,20 @@ def traverse(x, N, stack, F, X, R, FP): class LALRError(YaccError): pass - # ----------------------------------------------------------------------------- -# == LRTable == +# == LRGeneratedTable == # # This class implements the LR table generation algorithm. There are no -# public methods. +# public methods except for write() # ----------------------------------------------------------------------------- -class LRTable: - def __init__(self, grammar, log=None): +class LRGeneratedTable(LRTable): + def __init__(self, grammar, method='LALR', log=None): + if method not in ['SLR', 'LALR']: + raise LALRError('Unsupported method %s' % method) + self.grammar = grammar + self.lr_method = method # Set up the logger if not log: @@ -1326,7 +2118,7 @@ class LRTable: self._add_count = 0 # Internal counter used to detect cycles - # Diagnostic information filled in by the table generator + # Diagonistic information filled in by the table generator self.sr_conflict = 0 self.rr_conflict = 0 self.conflicts = [] # List of conflicts @@ -1340,11 +2132,6 @@ class LRTable: self.grammar.compute_follow() self.lr_parse_table() - # Bind all production function names to callable objects in pdict - def bind_callables(self, pdict): - for p in self.lr_productions: - p.bind(pdict) - # Compute the LR(0) closure operation on I, where I is a set of LR(0) items. def lr0_closure(self, I): @@ -1514,6 +2301,7 @@ class LRTable: # ----------------------------------------------------------------------------- def dr_relation(self, C, trans, nullable): + dr_set = {} state, N = trans terms = [] @@ -1751,11 +2539,15 @@ class LRTable: actionp = {} # Action production array (temporary) + log.info('Parsing method: %s', self.lr_method) + # Step 1: Construct C = { I0, I1, ... IN}, collection of LR(0) items # This determines the number of states C = self.lr0_items() - self.add_lalr_lookaheads(C) + + if self.lr_method == 'LALR': + self.add_lalr_lookaheads(C) # Build the parser table, state by state st = 0 @@ -1773,115 +2565,118 @@ class LRTable: log.info('') for p in I: - if p.len == p.lr_index + 1: - if p.name == "S'": - # Start symbol. Accept! - st_action['$end'] = 0 - st_actionp['$end'] = p + if p.len == p.lr_index + 1: + if p.name == "S'": + # Start symbol. Accept! + st_action['$end'] = 0 + st_actionp['$end'] = p + else: + # We are at the end of a production. Reduce! + if self.lr_method == 'LALR': + laheads = p.lookaheads[st] + else: + laheads = self.grammar.Follow[p.name] + for a in laheads: + actlist.append((a, p, 'reduce using rule %d (%s)' % (p.number, p))) + r = st_action.get(a) + if r is not None: + # Whoa. Have a shift/reduce or reduce/reduce conflict + if r > 0: + # Need to decide on shift or reduce here + # By default we favor shifting. Need to add + # some precedence rules here. + + # Shift precedence comes from the token + sprec, slevel = Precedence.get(a, ('right', 0)) + + # Reduce precedence comes from rule being reduced (p) + rprec, rlevel = Productions[p.number].prec + + if (slevel < rlevel) or ((slevel == rlevel) and (rprec == 'left')): + # We really need to reduce here. + st_action[a] = -p.number + st_actionp[a] = p + if not slevel and not rlevel: + log.info(' ! shift/reduce conflict for %s resolved as reduce', a) + self.sr_conflicts.append((st, a, 'reduce')) + Productions[p.number].reduced += 1 + elif (slevel == rlevel) and (rprec == 'nonassoc'): + st_action[a] = None + else: + # Hmmm. Guess we'll keep the shift + if not rlevel: + log.info(' ! shift/reduce conflict for %s resolved as shift', a) + self.sr_conflicts.append((st, a, 'shift')) + elif r < 0: + # Reduce/reduce conflict. In this case, we favor the rule + # that was defined first in the grammar file + oldp = Productions[-r] + pp = Productions[p.number] + if oldp.line > pp.line: + st_action[a] = -p.number + st_actionp[a] = p + chosenp, rejectp = pp, oldp + Productions[p.number].reduced += 1 + Productions[oldp.number].reduced -= 1 + else: + chosenp, rejectp = oldp, pp + self.rr_conflicts.append((st, chosenp, rejectp)) + log.info(' ! reduce/reduce conflict for %s resolved using rule %d (%s)', + a, st_actionp[a].number, st_actionp[a]) + else: + raise LALRError('Unknown conflict in state %d' % st) + else: + st_action[a] = -p.number + st_actionp[a] = p + Productions[p.number].reduced += 1 else: - # We are at the end of a production. Reduce! - laheads = p.lookaheads[st] - for a in laheads: - actlist.append((a, p, 'reduce using rule %d (%s)' % (p.number, p))) - r = st_action.get(a) - if r is not None: - # Whoa. Have a shift/reduce or reduce/reduce conflict - if r > 0: - # Need to decide on shift or reduce here - # By default we favor shifting. Need to add - # some precedence rules here. + i = p.lr_index + a = p.prod[i+1] # Get symbol right after the "." + if a in self.grammar.Terminals: + g = self.lr0_goto(I, a) + j = self.lr0_cidhash.get(id(g), -1) + if j >= 0: + # We are in a shift state + actlist.append((a, p, 'shift and go to state %d' % j)) + r = st_action.get(a) + if r is not None: + # Whoa have a shift/reduce or shift/shift conflict + if r > 0: + if r != j: + raise LALRError('Shift/shift conflict in state %d' % st) + elif r < 0: + # Do a precedence check. + # - if precedence of reduce rule is higher, we reduce. + # - if precedence of reduce is same and left assoc, we reduce. + # - otherwise we shift - # Shift precedence comes from the token - sprec, slevel = Precedence.get(a, ('right', 0)) + # Shift precedence comes from the token + sprec, slevel = Precedence.get(a, ('right', 0)) - # Reduce precedence comes from rule being reduced (p) - rprec, rlevel = Productions[p.number].prec + # Reduce precedence comes from the rule that could have been reduced + rprec, rlevel = Productions[st_actionp[a].number].prec + + if (slevel > rlevel) or ((slevel == rlevel) and (rprec == 'right')): + # We decide to shift here... highest precedence to shift + Productions[st_actionp[a].number].reduced -= 1 + st_action[a] = j + st_actionp[a] = p + if not rlevel: + log.info(' ! shift/reduce conflict for %s resolved as shift', a) + self.sr_conflicts.append((st, a, 'shift')) + elif (slevel == rlevel) and (rprec == 'nonassoc'): + st_action[a] = None + else: + # Hmmm. Guess we'll keep the reduce + if not slevel and not rlevel: + log.info(' ! shift/reduce conflict for %s resolved as reduce', a) + self.sr_conflicts.append((st, a, 'reduce')) - if (slevel < rlevel) or ((slevel == rlevel) and (rprec == 'left')): - # We really need to reduce here. - st_action[a] = -p.number - st_actionp[a] = p - if not slevel and not rlevel: - log.info(' ! shift/reduce conflict for %s resolved as reduce', a) - self.sr_conflicts.append((st, a, 'reduce')) - Productions[p.number].reduced += 1 - elif (slevel == rlevel) and (rprec == 'nonassoc'): - st_action[a] = None else: - # Hmmm. Guess we'll keep the shift - if not rlevel: - log.info(' ! shift/reduce conflict for %s resolved as shift', a) - self.sr_conflicts.append((st, a, 'shift')) - elif r < 0: - # Reduce/reduce conflict. In this case, we favor the rule - # that was defined first in the grammar file - oldp = Productions[-r] - pp = Productions[p.number] - if oldp.line > pp.line: - st_action[a] = -p.number - st_actionp[a] = p - chosenp, rejectp = pp, oldp - Productions[p.number].reduced += 1 - Productions[oldp.number].reduced -= 1 - else: - chosenp, rejectp = oldp, pp - self.rr_conflicts.append((st, chosenp, rejectp)) - log.info(' ! reduce/reduce conflict for %s resolved using rule %d (%s)', - a, st_actionp[a].number, st_actionp[a]) + raise LALRError('Unknown conflict in state %d' % st) else: - raise LALRError('Unknown conflict in state %d' % st) - else: - st_action[a] = -p.number - st_actionp[a] = p - Productions[p.number].reduced += 1 - else: - i = p.lr_index - a = p.prod[i+1] # Get symbol right after the "." - if a in self.grammar.Terminals: - g = self.lr0_goto(I, a) - j = self.lr0_cidhash.get(id(g), -1) - if j >= 0: - # We are in a shift state - actlist.append((a, p, 'shift and go to state %d' % j)) - r = st_action.get(a) - if r is not None: - # Whoa have a shift/reduce or shift/shift conflict - if r > 0: - if r != j: - raise LALRError('Shift/shift conflict in state %d' % st) - elif r < 0: - # Do a precedence check. - # - if precedence of reduce rule is higher, we reduce. - # - if precedence of reduce is same and left assoc, we reduce. - # - otherwise we shift - - # Shift precedence comes from the token - sprec, slevel = Precedence.get(a, ('right', 0)) - - # Reduce precedence comes from the rule that could have been reduced - rprec, rlevel = Productions[st_actionp[a].number].prec - - if (slevel > rlevel) or ((slevel == rlevel) and (rprec == 'right')): - # We decide to shift here... highest precedence to shift - Productions[st_actionp[a].number].reduced -= 1 - st_action[a] = j - st_actionp[a] = p - if not rlevel: - log.info(' ! shift/reduce conflict for %s resolved as shift', a) - self.sr_conflicts.append((st, a, 'shift')) - elif (slevel == rlevel) and (rprec == 'nonassoc'): - st_action[a] = None - else: - # Hmmm. Guess we'll keep the reduce - if not slevel and not rlevel: - log.info(' ! shift/reduce conflict for %s resolved as reduce', a) - self.sr_conflicts.append((st, a, 'reduce')) - - else: - raise LALRError('Unknown conflict in state %d' % st) - else: - st_action[a] = j - st_actionp[a] = p + st_action[a] = j + st_actionp[a] = p # Print the actions associated with each terminal _actprint = {} @@ -1922,6 +2717,154 @@ class LRTable: goto[st] = st_goto st += 1 + # ----------------------------------------------------------------------------- + # write() + # + # This function writes the LR parsing tables to a file + # ----------------------------------------------------------------------------- + + def write_table(self, tabmodule, outputdir='', signature=''): + if isinstance(tabmodule, types.ModuleType): + raise IOError("Won't overwrite existing tabmodule") + + basemodulename = tabmodule.split('.')[-1] + filename = os.path.join(outputdir, basemodulename) + '.py' + try: + f = open(filename, 'w') + + f.write(''' +# %s +# This file is automatically generated. Do not edit. +_tabversion = %r + +_lr_method = %r + +_lr_signature = %r + ''' % (os.path.basename(filename), __tabversion__, self.lr_method, signature)) + + # Change smaller to 0 to go back to original tables + smaller = 1 + + # Factor out names to try and make smaller + if smaller: + items = {} + + for s, nd in self.lr_action.items(): + for name, v in nd.items(): + i = items.get(name) + if not i: + i = ([], []) + items[name] = i + i[0].append(s) + i[1].append(v) + + f.write('\n_lr_action_items = {') + for k, v in items.items(): + f.write('%r:([' % k) + for i in v[0]: + f.write('%r,' % i) + f.write('],[') + for i in v[1]: + f.write('%r,' % i) + + f.write(']),') + f.write('}\n') + + f.write(''' +_lr_action = {} +for _k, _v in _lr_action_items.items(): + for _x,_y in zip(_v[0],_v[1]): + if not _x in _lr_action: _lr_action[_x] = {} + _lr_action[_x][_k] = _y +del _lr_action_items +''') + + else: + f.write('\n_lr_action = { ') + for k, v in self.lr_action.items(): + f.write('(%r,%r):%r,' % (k[0], k[1], v)) + f.write('}\n') + + if smaller: + # Factor out names to try and make smaller + items = {} + + for s, nd in self.lr_goto.items(): + for name, v in nd.items(): + i = items.get(name) + if not i: + i = ([], []) + items[name] = i + i[0].append(s) + i[1].append(v) + + f.write('\n_lr_goto_items = {') + for k, v in items.items(): + f.write('%r:([' % k) + for i in v[0]: + f.write('%r,' % i) + f.write('],[') + for i in v[1]: + f.write('%r,' % i) + + f.write(']),') + f.write('}\n') + + f.write(''' +_lr_goto = {} +for _k, _v in _lr_goto_items.items(): + for _x, _y in zip(_v[0], _v[1]): + if not _x in _lr_goto: _lr_goto[_x] = {} + _lr_goto[_x][_k] = _y +del _lr_goto_items +''') + else: + f.write('\n_lr_goto = { ') + for k, v in self.lr_goto.items(): + f.write('(%r,%r):%r,' % (k[0], k[1], v)) + f.write('}\n') + + # Write production table + f.write('_lr_productions = [\n') + for p in self.lr_productions: + if p.func: + f.write(' (%r,%r,%d,%r,%r,%d),\n' % (p.str, p.name, p.len, + p.func, os.path.basename(p.file), p.line)) + else: + f.write(' (%r,%r,%d,None,None,None),\n' % (str(p), p.name, p.len)) + f.write(']\n') + f.close() + + except IOError as e: + raise + + + # ----------------------------------------------------------------------------- + # pickle_table() + # + # This function pickles the LR parsing tables to a supplied file object + # ----------------------------------------------------------------------------- + + def pickle_table(self, filename, signature=''): + try: + import cPickle as pickle + except ImportError: + import pickle + with open(filename, 'wb') as outf: + pickle.dump(__tabversion__, outf, pickle_protocol) + pickle.dump(self.lr_method, outf, pickle_protocol) + pickle.dump(signature, outf, pickle_protocol) + pickle.dump(self.lr_action, outf, pickle_protocol) + pickle.dump(self.lr_goto, outf, pickle_protocol) + + outp = [] + for p in self.lr_productions: + if p.func: + outp.append((p.str, p.name, p.len, p.func, os.path.basename(p.file), p.line)) + else: + outp.append((str(p), p.name, p.len, None, None, None)) + pickle.dump(outp, outf, pickle_protocol) + # ----------------------------------------------------------------------------- # === INTROSPECTION === # @@ -2082,7 +3025,7 @@ class ParserReflect(object): # Validate the start symbol def validate_start(self): if self.start is not None: - if not isinstance(self.start, str): + if not isinstance(self.start, string_types): self.log.error("'start' must be a string") # Look for error handler @@ -2129,7 +3072,7 @@ class ParserReflect(object): self.error = True return - self.tokens = sorted(tokens) + self.tokens = tokens # Validate the tokens def validate_tokens(self): @@ -2168,12 +3111,12 @@ class ParserReflect(object): self.error = True return assoc = p[0] - if not isinstance(assoc, str): + if not isinstance(assoc, string_types): self.log.error('precedence associativity must be a string') self.error = True return for term in p[1:]: - if not isinstance(term, str): + if not isinstance(term, string_types): self.log.error('precedence items must be strings') self.error = True return @@ -2250,7 +3193,7 @@ class ParserReflect(object): if n.startswith('p_') and n != 'p_error': self.log.warning('%r not defined as a function', n) if ((isinstance(v, types.FunctionType) and v.__code__.co_argcount == 1) or - (isinstance(v, types.MethodType) and v.__func__.__code__.co_argcount == 2)): + (isinstance(v, types.MethodType) and v.__func__.__code__.co_argcount == 2)): if v.__doc__: try: doc = v.__doc__.split(' ') @@ -2268,13 +3211,20 @@ class ParserReflect(object): # Build a parser # ----------------------------------------------------------------------------- -def yacc(*, debug=yaccdebug, module=None, start=None, - check_recursion=True, optimize=False, debugfile=debug_file, - debuglog=None, errorlog=None): +def yacc(method='LALR', debug=yaccdebug, module=None, tabmodule=tab_module, start=None, + check_recursion=True, optimize=False, write_tables=True, debugfile=debug_file, + outputdir=None, debuglog=None, errorlog=None, picklefile=None): + + if tabmodule is None: + tabmodule = tab_module # Reference to the parsing method of the last built parser global parse + # If pickling is enabled, table files are not created + if picklefile: + write_tables = 0 + if errorlog is None: errorlog = PlyLogger(sys.stderr) @@ -2282,16 +3232,38 @@ def yacc(*, debug=yaccdebug, module=None, start=None, if module: _items = [(k, getattr(module, k)) for k in dir(module)] pdict = dict(_items) - # If no __file__ or __package__ attributes are available, try to obtain them - # from the __module__ instead + # If no __file__ attribute is available, try to obtain it from the __module__ instead if '__file__' not in pdict: pdict['__file__'] = sys.modules[pdict['__module__']].__file__ - if '__package__' not in pdict and '__module__' in pdict: - if hasattr(sys.modules[pdict['__module__']], '__package__'): - pdict['__package__'] = sys.modules[pdict['__module__']].__package__ else: pdict = get_caller_module_dict(2) + if outputdir is None: + # If no output directory is set, the location of the output files + # is determined according to the following rules: + # - If tabmodule specifies a package, files go into that package directory + # - Otherwise, files go in the same directory as the specifying module + if isinstance(tabmodule, types.ModuleType): + srcfile = tabmodule.__file__ + else: + if '.' not in tabmodule: + srcfile = pdict['__file__'] + else: + parts = tabmodule.split('.') + pkgname = '.'.join(parts[:-1]) + exec('import %s' % pkgname) + srcfile = getattr(sys.modules[pkgname], '__file__', '') + outputdir = os.path.dirname(srcfile) + + # Determine if the module is package of a package or not. + # If so, fix the tabmodule setting so that tables load correctly + pkg = pdict.get('__package__') + if pkg and isinstance(tabmodule, str): + if '.' not in tabmodule: + tabmodule = pkg + '.' + tabmodule + + + # Set start symbol if it's specified directly using an argument if start is not None: pdict['start'] = start @@ -2303,17 +3275,40 @@ def yacc(*, debug=yaccdebug, module=None, start=None, if pinfo.error: raise YaccError('Unable to build parser') + # Check signature against table files (if any) + signature = pinfo.signature() + + # Read the tables + try: + lr = LRTable() + if picklefile: + read_signature = lr.read_pickle(picklefile) + else: + read_signature = lr.read_table(tabmodule) + if optimize or (read_signature == signature): + try: + lr.bind_callables(pinfo.pdict) + parser = LRParser(lr, pinfo.error_func) + parse = parser.parse + return parser + except Exception as e: + errorlog.warning('There was a problem loading the table file: %r', e) + except VersionError as e: + errorlog.warning(str(e)) + except ImportError: + pass + if debuglog is None: if debug: try: - debuglog = PlyLogger(open(debugfile, 'w')) + debuglog = PlyLogger(open(os.path.join(outputdir, debugfile), 'w')) except IOError as e: errorlog.warning("Couldn't open %r. %s" % (debugfile, e)) debuglog = NullLogger() else: debuglog = NullLogger() - debuglog.info('Created by PLY (http://www.dabeaz.com/ply)') + debuglog.info('Created by PLY version %s (http://www.dabeaz.com/ply)', __version__) errors = False @@ -2430,8 +3425,11 @@ def yacc(*, debug=yaccdebug, module=None, start=None, if errors: raise YaccError('Unable to build parser') - # Run the LRTable on the grammar - lr = LRTable(grammar, debuglog) + # Run the LRGeneratedTable on the grammar + if debug: + errorlog.debug('Generating %s tables', method) + + lr = LRGeneratedTable(grammar, method, debuglog) if debug: num_sr = len(lr.sr_conflicts) @@ -2474,6 +3472,20 @@ def yacc(*, debug=yaccdebug, module=None, start=None, errorlog.warning('Rule (%s) is never reduced', rejected) warned_never.append(rejected) + # Write the table file if requested + if write_tables: + try: + lr.write_table(tabmodule, outputdir, signature) + except IOError as e: + errorlog.warning("Couldn't create %r. %s" % (tabmodule, e)) + + # Write a pickled version of the tables + if picklefile: + try: + lr.pickle_table(picklefile, signature) + except IOError as e: + errorlog.warning("Couldn't create %r. %s" % (picklefile, e)) + # Build the parser lr.bind_callables(pinfo.pdict) parser = LRParser(lr, pinfo.error_func) diff --git a/components/script/dom/bindings/codegen/ply/ply/ygen.py b/components/script/dom/bindings/codegen/ply/ply/ygen.py new file mode 100644 index 00000000000..acf5ca1a37b --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/ply/ygen.py @@ -0,0 +1,74 @@ +# ply: ygen.py +# +# This is a support program that auto-generates different versions of the YACC parsing +# function with different features removed for the purposes of performance. +# +# Users should edit the method LParser.parsedebug() in yacc.py. The source code +# for that method is then used to create the other methods. See the comments in +# yacc.py for further details. + +import os.path +import shutil + +def get_source_range(lines, tag): + srclines = enumerate(lines) + start_tag = '#--! %s-start' % tag + end_tag = '#--! %s-end' % tag + + for start_index, line in srclines: + if line.strip().startswith(start_tag): + break + + for end_index, line in srclines: + if line.strip().endswith(end_tag): + break + + return (start_index + 1, end_index) + +def filter_section(lines, tag): + filtered_lines = [] + include = True + tag_text = '#--! %s' % tag + for line in lines: + if line.strip().startswith(tag_text): + include = not include + elif include: + filtered_lines.append(line) + return filtered_lines + +def main(): + dirname = os.path.dirname(__file__) + shutil.copy2(os.path.join(dirname, 'yacc.py'), os.path.join(dirname, 'yacc.py.bak')) + with open(os.path.join(dirname, 'yacc.py'), 'r') as f: + lines = f.readlines() + + parse_start, parse_end = get_source_range(lines, 'parsedebug') + parseopt_start, parseopt_end = get_source_range(lines, 'parseopt') + parseopt_notrack_start, parseopt_notrack_end = get_source_range(lines, 'parseopt-notrack') + + # Get the original source + orig_lines = lines[parse_start:parse_end] + + # Filter the DEBUG sections out + parseopt_lines = filter_section(orig_lines, 'DEBUG') + + # Filter the TRACKING sections out + parseopt_notrack_lines = filter_section(parseopt_lines, 'TRACKING') + + # Replace the parser source sections with updated versions + lines[parseopt_notrack_start:parseopt_notrack_end] = parseopt_notrack_lines + lines[parseopt_start:parseopt_end] = parseopt_lines + + lines = [line.rstrip()+'\n' for line in lines] + with open(os.path.join(dirname, 'yacc.py'), 'w') as f: + f.writelines(lines) + + print('Updated yacc.py') + +if __name__ == '__main__': + main() + + + + + diff --git a/components/script/dom/bindings/codegen/ply/setup.cfg b/components/script/dom/bindings/codegen/ply/setup.cfg new file mode 100644 index 00000000000..4ec8a167da9 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/setup.cfg @@ -0,0 +1,11 @@ +[bdist_wheel] +universal = 1 + +[metadata] +description-file = README.md + +[egg_info] +tag_build = +tag_date = 0 +tag_svn_revision = 0 + diff --git a/components/script/dom/bindings/codegen/ply/setup.py b/components/script/dom/bindings/codegen/ply/setup.py new file mode 100644 index 00000000000..ee8ccd0ccf5 --- /dev/null +++ b/components/script/dom/bindings/codegen/ply/setup.py @@ -0,0 +1,31 @@ +try: + from setuptools import setup +except ImportError: + from distutils.core import setup + +setup(name = "ply", + description="Python Lex & Yacc", + long_description = """ +PLY is yet another implementation of lex and yacc for Python. Some notable +features include the fact that its implemented entirely in Python and it +uses LALR(1) parsing which is efficient and well suited for larger grammars. + +PLY provides most of the standard lex/yacc features including support for empty +productions, precedence rules, error recovery, and support for ambiguous grammars. + +PLY is extremely easy to use and provides very extensive error checking. +It is compatible with both Python 2 and Python 3. +""", + license="""BSD""", + version = "3.10", + author = "David Beazley", + author_email = "dave@dabeaz.com", + maintainer = "David Beazley", + maintainer_email = "dave@dabeaz.com", + url = "http://www.dabeaz.com/ply/", + packages = ['ply'], + classifiers = [ + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 2', + ] + ) diff --git a/components/script/dom/bindings/codegen/run.py b/components/script/dom/bindings/codegen/run.py index 7f58de15d69..4d8d05f0116 100644 --- a/components/script/dom/bindings/codegen/run.py +++ b/components/script/dom/bindings/codegen/run.py @@ -24,7 +24,7 @@ def main(): webidls = [name for name in os.listdir(webidls_dir) if name.endswith(".webidl")] for webidl in webidls: filename = os.path.join(webidls_dir, webidl) - with open(filename, "rb") as f: + with open(filename, "r", encoding="utf-8") as f: parser.parse(f.read(), filename) add_css_properties_attributes(css_properties_json, parser) @@ -72,7 +72,7 @@ def generate(config, name, filename): def add_css_properties_attributes(css_properties_json, parser): css_properties = json.load(open(css_properties_json, "rb")) idl = "partial interface CSSStyleDeclaration {\n%s\n};\n" % "\n".join( - " [%sCEReactions, SetterThrows] attribute [TreatNullAs=EmptyString] DOMString %s;" % ( + " [%sCEReactions, SetterThrows] attribute [LegacyNullToEmptyString] DOMString %s;" % ( ('Pref="%s", ' % data["pref"] if data["pref"] else ""), attribute_name ) @@ -80,7 +80,7 @@ def add_css_properties_attributes(css_properties_json, parser): for (property_name, data) in sorted(properties_list.items()) for attribute_name in attribute_names(property_name) ) - parser.parse(idl.encode("utf-8"), "CSSStyleDeclaration_generated.webidl") + parser.parse(idl, "CSSStyleDeclaration_generated.webidl") def attribute_names(property_name): diff --git a/components/script/dom/webidls/ANGLEInstancedArrays.webidl b/components/script/dom/webidls/ANGLEInstancedArrays.webidl index 4ec6cdffc4e..22ef2ee0d0e 100644 --- a/components/script/dom/webidls/ANGLEInstancedArrays.webidl +++ b/components/script/dom/webidls/ANGLEInstancedArrays.webidl @@ -6,7 +6,7 @@ * https://www.khronos.org/registry/webgl/extensions/ANGLE_instanced_arrays/ */ -[NoInterfaceObject, Exposed=Window] +[LegacyNoInterfaceObject, Exposed=Window] interface ANGLEInstancedArrays { const GLenum VERTEX_ATTRIB_ARRAY_DIVISOR_ANGLE = 0x88FE; undefined drawArraysInstancedANGLE(GLenum mode, GLint first, GLsizei count, GLsizei primcount); diff --git a/components/script/dom/webidls/CSSStyleDeclaration.webidl b/components/script/dom/webidls/CSSStyleDeclaration.webidl index 1b67ec6a7fa..73ee152a4ac 100644 --- a/components/script/dom/webidls/CSSStyleDeclaration.webidl +++ b/components/script/dom/webidls/CSSStyleDeclaration.webidl @@ -17,8 +17,8 @@ interface CSSStyleDeclaration { DOMString getPropertyValue(DOMString property); DOMString getPropertyPriority(DOMString property); [CEReactions, Throws] - undefined setProperty(DOMString property, [TreatNullAs=EmptyString] DOMString value, - optional [TreatNullAs=EmptyString] DOMString priority = ""); + undefined setProperty(DOMString property, [LegacyNullToEmptyString] DOMString value, + optional [LegacyNullToEmptyString] DOMString priority = ""); [CEReactions, Throws] DOMString removeProperty(DOMString property); // readonly attribute CSSRule? parentRule; diff --git a/components/script/dom/webidls/CanvasRenderingContext2D.webidl b/components/script/dom/webidls/CanvasRenderingContext2D.webidl index 2c2cc0a8e5d..436a39a57ee 100644 --- a/components/script/dom/webidls/CanvasRenderingContext2D.webidl +++ b/components/script/dom/webidls/CanvasRenderingContext2D.webidl @@ -93,7 +93,7 @@ interface mixin CanvasFillStrokeStyles { [Throws] CanvasGradient createRadialGradient(double x0, double y0, double r0, double x1, double y1, double r1); [Throws] - CanvasPattern? createPattern(CanvasImageSource image, [TreatNullAs=EmptyString] DOMString repetition); + CanvasPattern? createPattern(CanvasImageSource image, [LegacyNullToEmptyString] DOMString repetition); }; [Exposed=(PaintWorklet, Window, Worker)] diff --git a/components/script/dom/webidls/CharacterData.webidl b/components/script/dom/webidls/CharacterData.webidl index 34dc42c2db8..3f3d1fda356 100644 --- a/components/script/dom/webidls/CharacterData.webidl +++ b/components/script/dom/webidls/CharacterData.webidl @@ -11,7 +11,7 @@ [Exposed=Window, Abstract] interface CharacterData : Node { - [Pure] attribute [TreatNullAs=EmptyString] DOMString data; + [Pure] attribute [LegacyNullToEmptyString] DOMString data; [Pure] readonly attribute unsigned long length; [Pure, Throws] DOMString substringData(unsigned long offset, unsigned long count); diff --git a/components/script/dom/webidls/DOMImplementation.webidl b/components/script/dom/webidls/DOMImplementation.webidl index cf809b30f1a..af744e3a08c 100644 --- a/components/script/dom/webidls/DOMImplementation.webidl +++ b/components/script/dom/webidls/DOMImplementation.webidl @@ -17,7 +17,7 @@ interface DOMImplementation { DOMString systemId); [NewObject, Throws] XMLDocument createDocument(DOMString? namespace, - [TreatNullAs=EmptyString] DOMString qualifiedName, + [LegacyNullToEmptyString] DOMString qualifiedName, optional DocumentType? doctype = null); [NewObject] Document createHTMLDocument(optional DOMString title); diff --git a/components/script/dom/webidls/DOMStringMap.webidl b/components/script/dom/webidls/DOMStringMap.webidl index 39891f24096..1ac525325f7 100644 --- a/components/script/dom/webidls/DOMStringMap.webidl +++ b/components/script/dom/webidls/DOMStringMap.webidl @@ -3,7 +3,7 @@ * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ // https://html.spec.whatwg.org/multipage/#the-domstringmap-interface -[Exposed=Window, OverrideBuiltins] +[Exposed=Window, LegacyOverrideBuiltIns] interface DOMStringMap { getter DOMString (DOMString name); [CEReactions, Throws] diff --git a/components/script/dom/webidls/DissimilarOriginLocation.webidl b/components/script/dom/webidls/DissimilarOriginLocation.webidl index 20821e48fc6..e4101bd35e7 100644 --- a/components/script/dom/webidls/DissimilarOriginLocation.webidl +++ b/components/script/dom/webidls/DissimilarOriginLocation.webidl @@ -14,7 +14,7 @@ // way to enforce security policy. // https://html.spec.whatwg.org/multipage/#location -[Exposed=(Window,DissimilarOriginWindow), Unforgeable, NoInterfaceObject] +[Exposed=(Window,DissimilarOriginWindow), LegacyUnforgeable, LegacyNoInterfaceObject] interface DissimilarOriginLocation { [Throws] attribute USVString href; [Throws] undefined assign(USVString url); diff --git a/components/script/dom/webidls/DissimilarOriginWindow.webidl b/components/script/dom/webidls/DissimilarOriginWindow.webidl index 755409c7964..0f77aba7d05 100644 --- a/components/script/dom/webidls/DissimilarOriginWindow.webidl +++ b/components/script/dom/webidls/DissimilarOriginWindow.webidl @@ -13,15 +13,15 @@ // way to enforce security policy. // https://html.spec.whatwg.org/multipage/#window -[Global, Exposed=(Window,DissimilarOriginWindow), NoInterfaceObject] +[Global, Exposed=(Window,DissimilarOriginWindow), LegacyNoInterfaceObject] interface DissimilarOriginWindow : GlobalScope { - [Unforgeable] readonly attribute WindowProxy window; + [LegacyUnforgeable] readonly attribute WindowProxy window; [BinaryName="Self_", Replaceable] readonly attribute WindowProxy self; - [Unforgeable] readonly attribute WindowProxy? parent; - [Unforgeable] readonly attribute WindowProxy? top; + [LegacyUnforgeable] readonly attribute WindowProxy? parent; + [LegacyUnforgeable] readonly attribute WindowProxy? top; [Replaceable] readonly attribute WindowProxy frames; [Replaceable] readonly attribute unsigned long length; - [Unforgeable] readonly attribute DissimilarOriginLocation location; + [LegacyUnforgeable] readonly attribute DissimilarOriginLocation location; undefined close(); readonly attribute boolean closed; diff --git a/components/script/dom/webidls/Document.webidl b/components/script/dom/webidls/Document.webidl index 3f169a35153..e33600dd17b 100644 --- a/components/script/dom/webidls/Document.webidl +++ b/components/script/dom/webidls/Document.webidl @@ -84,10 +84,10 @@ dictionary ElementCreationOptions { }; // https://html.spec.whatwg.org/multipage/#the-document-object -// [OverrideBuiltins] +// [LegacyOverrideBuiltIns] partial /*sealed*/ interface Document { // resource metadata management - [PutForwards=href, Unforgeable] + [PutForwards=href, LegacyUnforgeable] readonly attribute Location? location; [SetterThrows] attribute DOMString domain; readonly attribute DOMString referrer; @@ -146,7 +146,7 @@ partial /*sealed*/ interface Document { // DOMString queryCommandValue(DOMString commandId); // special event handler IDL attributes that only apply to Document objects - [LenientThis] attribute EventHandler onreadystatechange; + [LegacyLenientThis] attribute EventHandler onreadystatechange; // also has obsolete members }; @@ -156,22 +156,22 @@ Document includes DocumentAndElementEventHandlers; // https://html.spec.whatwg.org/multipage/#Document-partial partial interface Document { [CEReactions] - attribute [TreatNullAs=EmptyString] DOMString fgColor; + attribute [LegacyNullToEmptyString] DOMString fgColor; // https://github.com/servo/servo/issues/8715 - // [CEReactions, TreatNullAs=EmptyString] + // [CEReactions, LegacyNullToEmptyString] // attribute DOMString linkColor; // https://github.com/servo/servo/issues/8716 - // [CEReactions, TreatNullAs=EmptyString] + // [CEReactions, LegacyNullToEmptyString] // attribute DOMString vlinkColor; // https://github.com/servo/servo/issues/8717 - // [CEReactions, TreatNullAs=EmptyString] + // [CEReactions, LegacyNullToEmptyString] // attribute DOMString alinkColor; [CEReactions] - attribute [TreatNullAs=EmptyString] DOMString bgColor; + attribute [LegacyNullToEmptyString] DOMString bgColor; [SameObject] readonly attribute HTMLCollection anchors; @@ -189,9 +189,9 @@ partial interface Document { // https://fullscreen.spec.whatwg.org/#api partial interface Document { - [LenientSetter] readonly attribute boolean fullscreenEnabled; - [LenientSetter] readonly attribute Element? fullscreenElement; - [LenientSetter] readonly attribute boolean fullscreen; // historical + [LegacyLenientSetter] readonly attribute boolean fullscreenEnabled; + [LegacyLenientSetter] readonly attribute Element? fullscreenElement; + [LegacyLenientSetter] readonly attribute boolean fullscreen; // historical Promise exitFullscreen(); diff --git a/components/script/dom/webidls/DynamicModuleOwner.webidl b/components/script/dom/webidls/DynamicModuleOwner.webidl index 924481d491b..20ad0e3367b 100644 --- a/components/script/dom/webidls/DynamicModuleOwner.webidl +++ b/components/script/dom/webidls/DynamicModuleOwner.webidl @@ -7,7 +7,7 @@ * so that we can hold a traceable owner for those dynamic modules which don't hold a owner. */ -[NoInterfaceObject, Exposed=Window] +[LegacyNoInterfaceObject, Exposed=Window] interface DynamicModuleOwner { readonly attribute Promise promise; }; diff --git a/components/script/dom/webidls/EXTBlendMinmax.webidl b/components/script/dom/webidls/EXTBlendMinmax.webidl index 767eace6923..b9f9f09fe9a 100644 --- a/components/script/dom/webidls/EXTBlendMinmax.webidl +++ b/components/script/dom/webidls/EXTBlendMinmax.webidl @@ -6,7 +6,7 @@ * https://www.khronos.org/registry/webgl/extensions/EXT_blend_minmax/ */ -[NoInterfaceObject, Exposed=Window] +[LegacyNoInterfaceObject, Exposed=Window] interface EXTBlendMinmax { const GLenum MIN_EXT = 0x8007; const GLenum MAX_EXT = 0x8008; diff --git a/components/script/dom/webidls/EXTColorBufferHalfFloat.webidl b/components/script/dom/webidls/EXTColorBufferHalfFloat.webidl index 77cf23c6cd0..0844ec6c328 100644 --- a/components/script/dom/webidls/EXTColorBufferHalfFloat.webidl +++ b/components/script/dom/webidls/EXTColorBufferHalfFloat.webidl @@ -6,7 +6,7 @@ * https://www.khronos.org/registry/webgl/extensions/EXT_color_buffer_half_float/ */ -[NoInterfaceObject, Exposed=Window] +[LegacyNoInterfaceObject, Exposed=Window] interface EXTColorBufferHalfFloat { const GLenum RGBA16F_EXT = 0x881A; const GLenum RGB16F_EXT = 0x881B; diff --git a/components/script/dom/webidls/EXTFragDepth.webidl b/components/script/dom/webidls/EXTFragDepth.webidl index 7fde7896c4c..22c8d25b214 100644 --- a/components/script/dom/webidls/EXTFragDepth.webidl +++ b/components/script/dom/webidls/EXTFragDepth.webidl @@ -6,6 +6,6 @@ * https://www.khronos.org/registry/webgl/extensions/EXT_frag_depth/ */ -[NoInterfaceObject, Exposed=Window] +[LegacyNoInterfaceObject, Exposed=Window] interface EXTFragDepth { }; // interface EXT_frag_depth diff --git a/components/script/dom/webidls/EXTShaderTextureLod.webidl b/components/script/dom/webidls/EXTShaderTextureLod.webidl index decb5ba86ac..a05370b42b2 100644 --- a/components/script/dom/webidls/EXTShaderTextureLod.webidl +++ b/components/script/dom/webidls/EXTShaderTextureLod.webidl @@ -6,6 +6,6 @@ * https://www.khronos.org/registry/webgl/extensions/EXT_shader_texture_lod/ */ -[NoInterfaceObject, Exposed=Window] +[LegacyNoInterfaceObject, Exposed=Window] interface EXTShaderTextureLod { }; diff --git a/components/script/dom/webidls/EXTTextureFilterAnisotropic.webidl b/components/script/dom/webidls/EXTTextureFilterAnisotropic.webidl index d2957500844..812ae3d8a8b 100644 --- a/components/script/dom/webidls/EXTTextureFilterAnisotropic.webidl +++ b/components/script/dom/webidls/EXTTextureFilterAnisotropic.webidl @@ -6,7 +6,7 @@ * https://www.khronos.org/registry/webgl/extensions/EXT_texture_filter_anisotropic/ */ -[NoInterfaceObject, Exposed=Window] +[LegacyNoInterfaceObject, Exposed=Window] interface EXTTextureFilterAnisotropic { const GLenum TEXTURE_MAX_ANISOTROPY_EXT = 0x84FE; const GLenum MAX_TEXTURE_MAX_ANISOTROPY_EXT = 0x84FF; diff --git a/components/script/dom/webidls/Element.webidl b/components/script/dom/webidls/Element.webidl index 319964a7e74..9c3980e527f 100644 --- a/components/script/dom/webidls/Element.webidl +++ b/components/script/dom/webidls/Element.webidl @@ -113,9 +113,9 @@ partial interface Element { // https://w3c.github.io/DOM-Parsing/#extensions-to-the-element-interface partial interface Element { [CEReactions, Throws] - attribute [TreatNullAs=EmptyString] DOMString innerHTML; + attribute [LegacyNullToEmptyString] DOMString innerHTML; [CEReactions, Throws] - attribute [TreatNullAs=EmptyString] DOMString outerHTML; + attribute [LegacyNullToEmptyString] DOMString outerHTML; }; // https://fullscreen.spec.whatwg.org/#api diff --git a/components/script/dom/webidls/Event.webidl b/components/script/dom/webidls/Event.webidl index e7888b38fb3..ae57dc1a8a8 100644 --- a/components/script/dom/webidls/Event.webidl +++ b/components/script/dom/webidls/Event.webidl @@ -34,7 +34,7 @@ interface Event { [Pure] readonly attribute boolean defaultPrevented; - [Unforgeable] + [LegacyUnforgeable] readonly attribute boolean isTrusted; [Constant] readonly attribute DOMHighResTimeStamp timeStamp; diff --git a/components/script/dom/webidls/EventHandler.webidl b/components/script/dom/webidls/EventHandler.webidl index 57138967792..4303bb19532 100644 --- a/components/script/dom/webidls/EventHandler.webidl +++ b/components/script/dom/webidls/EventHandler.webidl @@ -10,17 +10,17 @@ * and create derivative works of this document. */ -[TreatNonObjectAsNull] +[LegacyTreatNonObjectAsNull] callback EventHandlerNonNull = any (Event event); typedef EventHandlerNonNull? EventHandler; -[TreatNonObjectAsNull] +[LegacyTreatNonObjectAsNull] callback OnErrorEventHandlerNonNull = any ((Event or DOMString) event, optional DOMString source, optional unsigned long lineno, optional unsigned long column, optional any error); typedef OnErrorEventHandlerNonNull? OnErrorEventHandler; -[TreatNonObjectAsNull] +[LegacyTreatNonObjectAsNull] callback OnBeforeUnloadEventHandlerNonNull = DOMString? (Event event); typedef OnBeforeUnloadEventHandlerNonNull? OnBeforeUnloadEventHandler; @@ -62,8 +62,8 @@ interface mixin GlobalEventHandlers { attribute EventHandler onloadedmetadata; attribute EventHandler onloadstart; attribute EventHandler onmousedown; - [LenientThis] attribute EventHandler onmouseenter; - [LenientThis] attribute EventHandler onmouseleave; + [LegacyLenientThis] attribute EventHandler onmouseenter; + [LegacyLenientThis] attribute EventHandler onmouseleave; attribute EventHandler onmousemove; attribute EventHandler onmouseout; attribute EventHandler onmouseover; diff --git a/components/script/dom/webidls/HTMLAudioElement.webidl b/components/script/dom/webidls/HTMLAudioElement.webidl index bad06df5cfc..ad22f727668 100644 --- a/components/script/dom/webidls/HTMLAudioElement.webidl +++ b/components/script/dom/webidls/HTMLAudioElement.webidl @@ -3,7 +3,7 @@ * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ // https://html.spec.whatwg.org/multipage/#htmlaudioelement -[Exposed=Window, NamedConstructor=Audio(optional DOMString src)] +[Exposed=Window, LegacyFactoryFunction=Audio(optional DOMString src)] interface HTMLAudioElement : HTMLMediaElement { [HTMLConstructor] constructor(); }; diff --git a/components/script/dom/webidls/HTMLBodyElement.webidl b/components/script/dom/webidls/HTMLBodyElement.webidl index a84d800ff10..102f5bec41a 100644 --- a/components/script/dom/webidls/HTMLBodyElement.webidl +++ b/components/script/dom/webidls/HTMLBodyElement.webidl @@ -13,17 +13,17 @@ HTMLBodyElement includes WindowEventHandlers; // https://html.spec.whatwg.org/multipage/#HTMLBodyElement-partial partial interface HTMLBodyElement { - [CEReactions] attribute [TreatNullAs=EmptyString] DOMString text; + [CEReactions] attribute [LegacyNullToEmptyString] DOMString text; // https://github.com/servo/servo/issues/8715 - //[CEReactions, TreatNullAs=EmptyString] attribute DOMString link; + //[CEReactions, LegacyNullToEmptyString] attribute DOMString link; // https://github.com/servo/servo/issues/8716 - //[CEReactions, TreatNullAs=EmptyString] attribute DOMString vLink; + //[CEReactions, LegacyNullToEmptyString] attribute DOMString vLink; // https://github.com/servo/servo/issues/8717 - //[CEReactions, TreatNullAs=EmptyString] attribute DOMString aLink; + //[CEReactions, LegacyNullToEmptyString] attribute DOMString aLink; - [CEReactions] attribute [TreatNullAs=EmptyString] DOMString bgColor; + [CEReactions] attribute [LegacyNullToEmptyString] DOMString bgColor; [CEReactions] attribute DOMString background; }; diff --git a/components/script/dom/webidls/HTMLElement.webidl b/components/script/dom/webidls/HTMLElement.webidl index 961e79ec405..ebe52aa854b 100644 --- a/components/script/dom/webidls/HTMLElement.webidl +++ b/components/script/dom/webidls/HTMLElement.webidl @@ -48,7 +48,7 @@ interface HTMLElement : Element { // attribute boolean spellcheck; // void forceSpellCheck(); - attribute [TreatNullAs=EmptyString] DOMString innerText; + attribute [LegacyNullToEmptyString] DOMString innerText; // command API // readonly attribute DOMString? commandType; diff --git a/components/script/dom/webidls/HTMLFontElement.webidl b/components/script/dom/webidls/HTMLFontElement.webidl index c85a3cdd3f2..ca1a8680e5d 100644 --- a/components/script/dom/webidls/HTMLFontElement.webidl +++ b/components/script/dom/webidls/HTMLFontElement.webidl @@ -8,7 +8,7 @@ interface HTMLFontElement : HTMLElement { [HTMLConstructor] constructor(); [CEReactions] - attribute [TreatNullAs=EmptyString] DOMString color; + attribute [LegacyNullToEmptyString] DOMString color; [CEReactions] attribute DOMString face; [CEReactions] diff --git a/components/script/dom/webidls/HTMLFrameElement.webidl b/components/script/dom/webidls/HTMLFrameElement.webidl index fe6cab1a5c2..913ed9c0325 100644 --- a/components/script/dom/webidls/HTMLFrameElement.webidl +++ b/components/script/dom/webidls/HTMLFrameElement.webidl @@ -22,8 +22,8 @@ interface HTMLFrameElement : HTMLElement { // readonly attribute Document? contentDocument; // readonly attribute WindowProxy? contentWindow; - // [CEReactions, TreatNullAs=EmptyString] + // [CEReactions, LegacyNullToEmptyString] // attribute DOMString marginHeight; - // [CEReactions, TreatNullAs=EmptyString] + // [CEReactions, LegacyNullToEmptyString] // attribute DOMString marginWidth; }; diff --git a/components/script/dom/webidls/HTMLIFrameElement.webidl b/components/script/dom/webidls/HTMLIFrameElement.webidl index b9dd97bdea6..779e56f9ad8 100644 --- a/components/script/dom/webidls/HTMLIFrameElement.webidl +++ b/components/script/dom/webidls/HTMLIFrameElement.webidl @@ -42,8 +42,8 @@ partial interface HTMLIFrameElement { // [CEReactions] // attribute DOMString longDesc; - // [CEReactions, TreatNullAs=EmptyString] + // [CEReactions, LegacyNullToEmptyString] // attribute DOMString marginHeight; - // [CEReactions, TreatNullAs=EmptyString] + // [CEReactions, LegacyNullToEmptyString] // attribute DOMString marginWidth; }; diff --git a/components/script/dom/webidls/HTMLImageElement.webidl b/components/script/dom/webidls/HTMLImageElement.webidl index 81f6b4674f1..0d242fefb26 100644 --- a/components/script/dom/webidls/HTMLImageElement.webidl +++ b/components/script/dom/webidls/HTMLImageElement.webidl @@ -3,7 +3,7 @@ * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ // https://html.spec.whatwg.org/multipage/#htmlimageelement -[Exposed=Window, NamedConstructor=Image(optional unsigned long width, optional unsigned long height)] +[Exposed=Window, LegacyFactoryFunction=Image(optional unsigned long width, optional unsigned long height)] interface HTMLImageElement : HTMLElement { [HTMLConstructor] constructor(); @@ -48,7 +48,7 @@ partial interface HTMLImageElement { attribute DOMString longDesc; [CEReactions] - attribute [TreatNullAs=EmptyString] DOMString border; + attribute [LegacyNullToEmptyString] DOMString border; }; // https://drafts.csswg.org/cssom-view/#extensions-to-the-htmlimageelement-interface diff --git a/components/script/dom/webidls/HTMLInputElement.webidl b/components/script/dom/webidls/HTMLInputElement.webidl index f69dcb68eed..160b3904439 100644 --- a/components/script/dom/webidls/HTMLInputElement.webidl +++ b/components/script/dom/webidls/HTMLInputElement.webidl @@ -71,7 +71,7 @@ interface HTMLInputElement : HTMLElement { [CEReactions] attribute DOMString defaultValue; [CEReactions, SetterThrows] - attribute [TreatNullAs=EmptyString] DOMString value; + attribute [LegacyNullToEmptyString] DOMString value; [SetterThrows] attribute object? valueAsDate; [SetterThrows] diff --git a/components/script/dom/webidls/HTMLObjectElement.webidl b/components/script/dom/webidls/HTMLObjectElement.webidl index 4c47fcfb93f..fc7db3fe43f 100644 --- a/components/script/dom/webidls/HTMLObjectElement.webidl +++ b/components/script/dom/webidls/HTMLObjectElement.webidl @@ -49,5 +49,5 @@ partial interface HTMLObjectElement { // attribute DOMString codeBase; // attribute DOMString codeType; - //[TreatNullAs=EmptyString] attribute DOMString border; + //[LegacyNullToEmptyString] attribute DOMString border; }; diff --git a/components/script/dom/webidls/HTMLOptionElement.webidl b/components/script/dom/webidls/HTMLOptionElement.webidl index c995070d0e7..a10761c2428 100644 --- a/components/script/dom/webidls/HTMLOptionElement.webidl +++ b/components/script/dom/webidls/HTMLOptionElement.webidl @@ -3,7 +3,7 @@ * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ // https://html.spec.whatwg.org/multipage/#htmloptionelement -[Exposed=Window, NamedConstructor=Option(optional DOMString text = "", optional DOMString value, +[Exposed=Window, LegacyFactoryFunction=Option(optional DOMString text = "", optional DOMString value, optional boolean defaultSelected = false, optional boolean selected = false)] interface HTMLOptionElement : HTMLElement { diff --git a/components/script/dom/webidls/HTMLTableCellElement.webidl b/components/script/dom/webidls/HTMLTableCellElement.webidl index 3c955de66b9..7ec277d5bda 100644 --- a/components/script/dom/webidls/HTMLTableCellElement.webidl +++ b/components/script/dom/webidls/HTMLTableCellElement.webidl @@ -43,5 +43,5 @@ partial interface HTMLTableCellElement { // attribute DOMString vAlign; [CEReactions] - attribute [TreatNullAs=EmptyString] DOMString bgColor; + attribute [LegacyNullToEmptyString] DOMString bgColor; }; diff --git a/components/script/dom/webidls/HTMLTableElement.webidl b/components/script/dom/webidls/HTMLTableElement.webidl index bc389bfb796..72a151fc858 100644 --- a/components/script/dom/webidls/HTMLTableElement.webidl +++ b/components/script/dom/webidls/HTMLTableElement.webidl @@ -51,9 +51,9 @@ partial interface HTMLTableElement { attribute DOMString width; [CEReactions] - attribute [TreatNullAs=EmptyString] DOMString bgColor; - // [CEReactions, TreatNullAs=EmptyString] + attribute [LegacyNullToEmptyString] DOMString bgColor; + // [CEReactions, LegacyNullToEmptyString] // attribute DOMString cellPadding; - // [CEReactions, TreatNullAs=EmptyString] + // [CEReactions, LegacyNullToEmptyString] // attribute DOMString cellSpacing; }; diff --git a/components/script/dom/webidls/HTMLTableRowElement.webidl b/components/script/dom/webidls/HTMLTableRowElement.webidl index 3f2db6fb720..00d3fecd9e1 100644 --- a/components/script/dom/webidls/HTMLTableRowElement.webidl +++ b/components/script/dom/webidls/HTMLTableRowElement.webidl @@ -30,5 +30,5 @@ partial interface HTMLTableRowElement { // attribute DOMString vAlign; [CEReactions] - attribute [TreatNullAs=EmptyString] DOMString bgColor; + attribute [LegacyNullToEmptyString] DOMString bgColor; }; diff --git a/components/script/dom/webidls/HTMLTextAreaElement.webidl b/components/script/dom/webidls/HTMLTextAreaElement.webidl index e9ffbd0a9e6..a8e79095680 100644 --- a/components/script/dom/webidls/HTMLTextAreaElement.webidl +++ b/components/script/dom/webidls/HTMLTextAreaElement.webidl @@ -40,7 +40,7 @@ interface HTMLTextAreaElement : HTMLElement { readonly attribute DOMString type; [CEReactions] attribute DOMString defaultValue; - attribute [TreatNullAs=EmptyString] DOMString value; + attribute [LegacyNullToEmptyString] DOMString value; readonly attribute unsigned long textLength; readonly attribute boolean willValidate; diff --git a/components/script/dom/webidls/Location.webidl b/components/script/dom/webidls/Location.webidl index b56cc220932..6c5833cfdcf 100644 --- a/components/script/dom/webidls/Location.webidl +++ b/components/script/dom/webidls/Location.webidl @@ -3,7 +3,7 @@ * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ // https://html.spec.whatwg.org/multipage/#location -[Exposed=Window, Unforgeable] interface Location { +[Exposed=Window, LegacyUnforgeable] interface Location { [Throws, CrossOriginWritable] stringifier attribute USVString href; [Throws] readonly attribute USVString origin; diff --git a/components/script/dom/webidls/MediaList.webidl b/components/script/dom/webidls/MediaList.webidl index e3c9cd6e680..d3a5527130f 100644 --- a/components/script/dom/webidls/MediaList.webidl +++ b/components/script/dom/webidls/MediaList.webidl @@ -5,7 +5,7 @@ // https://drafts.csswg.org/cssom/#the-medialist-interface [Exposed=Window] interface MediaList { - stringifier attribute [TreatNullAs=EmptyString] DOMString mediaText; + stringifier attribute [LegacyNullToEmptyString] DOMString mediaText; readonly attribute unsigned long length; getter DOMString? item(unsigned long index); undefined appendMedium(DOMString medium); diff --git a/components/script/dom/webidls/OESElementIndexUint.webidl b/components/script/dom/webidls/OESElementIndexUint.webidl index 01a441e9946..a1e631e47b8 100644 --- a/components/script/dom/webidls/OESElementIndexUint.webidl +++ b/components/script/dom/webidls/OESElementIndexUint.webidl @@ -6,6 +6,6 @@ * https://www.khronos.org/registry/webgl/extensions/OES_element_index_uint/ */ -[NoInterfaceObject, Exposed=Window] +[LegacyNoInterfaceObject, Exposed=Window] interface OESElementIndexUint { }; diff --git a/components/script/dom/webidls/OESStandardDerivatives.webidl b/components/script/dom/webidls/OESStandardDerivatives.webidl index 0e4c51e4df3..e0190a4075a 100644 --- a/components/script/dom/webidls/OESStandardDerivatives.webidl +++ b/components/script/dom/webidls/OESStandardDerivatives.webidl @@ -6,7 +6,7 @@ * https://www.khronos.org/registry/webgl/extensions/OES_standard_derivatives/ */ -[NoInterfaceObject, Exposed=Window] +[LegacyNoInterfaceObject, Exposed=Window] interface OESStandardDerivatives { const GLenum FRAGMENT_SHADER_DERIVATIVE_HINT_OES = 0x8B8B; }; diff --git a/components/script/dom/webidls/OESTextureFloat.webidl b/components/script/dom/webidls/OESTextureFloat.webidl index f053a405977..63d968522d5 100644 --- a/components/script/dom/webidls/OESTextureFloat.webidl +++ b/components/script/dom/webidls/OESTextureFloat.webidl @@ -6,6 +6,6 @@ * https://www.khronos.org/registry/webgl/extensions/OES_texture_float/ */ -[NoInterfaceObject, Exposed=Window] +[LegacyNoInterfaceObject, Exposed=Window] interface OESTextureFloat { }; diff --git a/components/script/dom/webidls/OESTextureFloatLinear.webidl b/components/script/dom/webidls/OESTextureFloatLinear.webidl index f0abf5a1aac..84a21cbfe65 100644 --- a/components/script/dom/webidls/OESTextureFloatLinear.webidl +++ b/components/script/dom/webidls/OESTextureFloatLinear.webidl @@ -6,6 +6,6 @@ * https://www.khronos.org/registry/webgl/extensions/OES_texture_float_linear/ */ -[NoInterfaceObject, Exposed=Window] +[LegacyNoInterfaceObject, Exposed=Window] interface OESTextureFloatLinear { }; diff --git a/components/script/dom/webidls/OESTextureHalfFloat.webidl b/components/script/dom/webidls/OESTextureHalfFloat.webidl index cba71c8cb56..55045ba7b4e 100644 --- a/components/script/dom/webidls/OESTextureHalfFloat.webidl +++ b/components/script/dom/webidls/OESTextureHalfFloat.webidl @@ -6,7 +6,7 @@ * https://www.khronos.org/registry/webgl/extensions/OES_texture_half_float/ */ -[NoInterfaceObject, Exposed=Window] +[LegacyNoInterfaceObject, Exposed=Window] interface OESTextureHalfFloat { const GLenum HALF_FLOAT_OES = 0x8D61; }; diff --git a/components/script/dom/webidls/OESTextureHalfFloatLinear.webidl b/components/script/dom/webidls/OESTextureHalfFloatLinear.webidl index 61454dd9cdb..fe8d44e8752 100644 --- a/components/script/dom/webidls/OESTextureHalfFloatLinear.webidl +++ b/components/script/dom/webidls/OESTextureHalfFloatLinear.webidl @@ -6,6 +6,6 @@ * https://www.khronos.org/registry/webgl/extensions/OES_texture_half_float_linear/ */ -[NoInterfaceObject, Exposed=Window] +[LegacyNoInterfaceObject, Exposed=Window] interface OESTextureHalfFloatLinear { }; diff --git a/components/script/dom/webidls/OESVertexArrayObject.webidl b/components/script/dom/webidls/OESVertexArrayObject.webidl index 0996ef45d91..48e2daeb31a 100644 --- a/components/script/dom/webidls/OESVertexArrayObject.webidl +++ b/components/script/dom/webidls/OESVertexArrayObject.webidl @@ -6,7 +6,7 @@ * https://www.khronos.org/registry/webgl/extensions/OES_vertex_array_object/ */ -[NoInterfaceObject, Exposed=Window] +[LegacyNoInterfaceObject, Exposed=Window] interface OESVertexArrayObject { const unsigned long VERTEX_ARRAY_BINDING_OES = 0x85B5; diff --git a/components/script/dom/webidls/Promise.webidl b/components/script/dom/webidls/Promise.webidl index f4f6616f107..2e402d1a54e 100644 --- a/components/script/dom/webidls/Promise.webidl +++ b/components/script/dom/webidls/Promise.webidl @@ -10,7 +10,7 @@ callback PromiseJobCallback = undefined(); [TreatNonCallableAsNull] callback AnyCallback = any (any value); -[NoInterfaceObject, Exposed=(Window,Worker)] +[LegacyNoInterfaceObject, Exposed=(Window,Worker)] // Need to escape "Promise" so it's treated as an identifier. interface _Promise { }; diff --git a/components/script/dom/webidls/PromiseNativeHandler.webidl b/components/script/dom/webidls/PromiseNativeHandler.webidl index 7d5f35e3223..aff2e43a855 100644 --- a/components/script/dom/webidls/PromiseNativeHandler.webidl +++ b/components/script/dom/webidls/PromiseNativeHandler.webidl @@ -7,7 +7,7 @@ // Hack to allow us to have JS owning and properly tracing/CCing/etc a // PromiseNativeHandler. -[NoInterfaceObject, +[LegacyNoInterfaceObject, Exposed=(Window,Worker)] interface PromiseNativeHandler { }; diff --git a/components/script/dom/webidls/ReadableStream.webidl b/components/script/dom/webidls/ReadableStream.webidl index 3662ca75ab8..d03212e3e9e 100644 --- a/components/script/dom/webidls/ReadableStream.webidl +++ b/components/script/dom/webidls/ReadableStream.webidl @@ -5,7 +5,7 @@ // This interface is entirely internal to Servo, and should not be accessible to // web pages. -[NoInterfaceObject, Exposed=(Window,Worker)] +[LegacyNoInterfaceObject, Exposed=(Window,Worker)] // Need to escape "ReadableStream" so it's treated as an identifier. interface _ReadableStream { }; diff --git a/components/script/dom/webidls/ServoParser.webidl b/components/script/dom/webidls/ServoParser.webidl index 89350130919..0e30e557905 100644 --- a/components/script/dom/webidls/ServoParser.webidl +++ b/components/script/dom/webidls/ServoParser.webidl @@ -6,5 +6,5 @@ // web pages. [Exposed=(Window,Worker), - NoInterfaceObject] + LegacyNoInterfaceObject] interface ServoParser {}; diff --git a/components/script/dom/webidls/WEBGLColorBufferFloat.webidl b/components/script/dom/webidls/WEBGLColorBufferFloat.webidl index dd041927ec1..72004df6167 100644 --- a/components/script/dom/webidls/WEBGLColorBufferFloat.webidl +++ b/components/script/dom/webidls/WEBGLColorBufferFloat.webidl @@ -6,7 +6,7 @@ * https://www.khronos.org/registry/webgl/extensions/WEBGL_color_buffer_float/ */ -[NoInterfaceObject, Exposed=Window] +[LegacyNoInterfaceObject, Exposed=Window] interface WEBGLColorBufferFloat { const GLenum RGBA32F_EXT = 0x8814; const GLenum FRAMEBUFFER_ATTACHMENT_COMPONENT_TYPE_EXT = 0x8211; diff --git a/components/script/dom/webidls/WEBGLCompressedTextureETC1.webidl b/components/script/dom/webidls/WEBGLCompressedTextureETC1.webidl index 77f80197c97..479139fdbe9 100644 --- a/components/script/dom/webidls/WEBGLCompressedTextureETC1.webidl +++ b/components/script/dom/webidls/WEBGLCompressedTextureETC1.webidl @@ -6,7 +6,7 @@ * https://www.khronos.org/registry/webgl/extensions/WEBGL_compressed_texture_etc1/ */ -[NoInterfaceObject, Exposed=Window] +[LegacyNoInterfaceObject, Exposed=Window] interface WEBGLCompressedTextureETC1 { /* Compressed Texture Format */ const GLenum COMPRESSED_RGB_ETC1_WEBGL = 0x8D64; diff --git a/components/script/dom/webidls/WEBGLCompressedTextureS3TC.webidl b/components/script/dom/webidls/WEBGLCompressedTextureS3TC.webidl index f940028bf5d..c04957f6bd4 100644 --- a/components/script/dom/webidls/WEBGLCompressedTextureS3TC.webidl +++ b/components/script/dom/webidls/WEBGLCompressedTextureS3TC.webidl @@ -6,7 +6,7 @@ * https://www.khronos.org/registry/webgl/extensions/WEBGL_compressed_texture_s3tc/ */ -[NoInterfaceObject, Exposed=Window] +[LegacyNoInterfaceObject, Exposed=Window] interface WEBGLCompressedTextureS3TC { /* Compressed Texture Formats */ const GLenum COMPRESSED_RGB_S3TC_DXT1_EXT = 0x83F0; diff --git a/components/script/dom/webidls/WebGLVertexArrayObjectOES.webidl b/components/script/dom/webidls/WebGLVertexArrayObjectOES.webidl index e576bd6089d..e33e058f2b4 100644 --- a/components/script/dom/webidls/WebGLVertexArrayObjectOES.webidl +++ b/components/script/dom/webidls/WebGLVertexArrayObjectOES.webidl @@ -6,6 +6,6 @@ * https://www.khronos.org/registry/webgl/extensions/OES_vertex_array_object/ */ -[NoInterfaceObject, Exposed=Window] +[LegacyNoInterfaceObject, Exposed=Window] interface WebGLVertexArrayObjectOES: WebGLObject { }; diff --git a/components/script/dom/webidls/Window.webidl b/components/script/dom/webidls/Window.webidl index 23aaa8fa07d..64855d42860 100644 --- a/components/script/dom/webidls/Window.webidl +++ b/components/script/dom/webidls/Window.webidl @@ -6,13 +6,13 @@ [Global=Window, Exposed=Window /*, LegacyUnenumerableNamedProperties */] /*sealed*/ interface Window : GlobalScope { // the current browsing context - [Unforgeable, CrossOriginReadable] readonly attribute WindowProxy window; + [LegacyUnforgeable, CrossOriginReadable] readonly attribute WindowProxy window; [BinaryName="Self_", Replaceable, CrossOriginReadable] readonly attribute WindowProxy self; - [Unforgeable] readonly attribute Document document; + [LegacyUnforgeable] readonly attribute Document document; attribute DOMString name; - [PutForwards=href, Unforgeable, CrossOriginReadable, CrossOriginWritable] + [PutForwards=href, LegacyUnforgeable, CrossOriginReadable, CrossOriginWritable] readonly attribute Location location; readonly attribute History history; [Pref="dom.customelements.enabled"] @@ -35,7 +35,7 @@ [Replaceable, CrossOriginReadable] readonly attribute unsigned long length; // Note that this can return null in the case that the browsing context has been discarded. // https://github.com/whatwg/html/issues/2115 - [Unforgeable, CrossOriginReadable] readonly attribute WindowProxy? top; + [LegacyUnforgeable, CrossOriginReadable] readonly attribute WindowProxy? top; [CrossOriginReadable] attribute any opener; // Note that this can return null in the case that the browsing context has been discarded. // https://github.com/whatwg/html/issues/2115 diff --git a/components/script/dom/webidls/WindowProxy.webidl b/components/script/dom/webidls/WindowProxy.webidl index a4fd2a3f79c..8f102540bba 100644 --- a/components/script/dom/webidls/WindowProxy.webidl +++ b/components/script/dom/webidls/WindowProxy.webidl @@ -3,5 +3,5 @@ * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ // https://html.spec.whatwg.org/multipage/#the-windowproxy-exotic-object -[Exposed=(Window,DissimilarOriginWindow), NoInterfaceObject] +[Exposed=(Window,DissimilarOriginWindow), LegacyNoInterfaceObject] interface WindowProxy {};