diff --git a/index.bs b/index.bs index ff9e38b1..ec832ab3 100644 --- a/index.bs +++ b/index.bs @@ -24,10 +24,15 @@ urlPrefix: https://gpuweb.github.io/gpuweb/; spec: WEBGPU type: interface text: GPUDevice; url: gpu-device text: GPUBuffer; url: buffer-interface + for: GPUBuffer; text: size; url: dom-gpubuffer-size text: GPUTexture; url: texture-interface text: GPUQueue; url: queues text: GPUCommandBuffer; url: command-buffers text: GPUCommandBufferDescriptor; url: dictdef-gpucommandbufferdescriptor +urlPrefix: https://tc39.es/ecma262/; spec: ECMA-262 + type: dfn + text: element size; url: table-the-typedarray-constructors + text: element type; url: table-the-typedarray-constructors urlPrefix: https://webidl.spec.whatwg.org/; spec: WEBIDL type: interface text: Promise; url: idl-promise @@ -499,8 +504,8 @@ time the operation is successfully completed on the offloaded timeline at which signaled. This type of execution supports both the CPU and GPU device. In both the {{MLContext}}.{{MLContext/compute()}} and {{MLContext}}.{{MLContext/computeAsync()}} execution methods, the caller supplies -the input values using {{MLNamedArrayInputs}}, binding the input {{MLOperand}}s to their values. The caller -then supplies pre-allocated buffers for output {{MLOperand}}s using {{MLNamedArrayOutputs}}. +the input values using {{MLNamedArrayBufferViews}}, binding the input {{MLOperand}}s to their values. The caller +then supplies pre-allocated buffers for output {{MLOperand}}s using {{MLNamedArrayBufferViews}}. The {{MLCommandEncoder}} interface created by the {{MLContext}}.{{MLContext/createCommandEncoder()}} method supports a graph execution method that provides the maximum flexibility to callers that also utilize WebGPU in their @@ -621,13 +626,7 @@ The power preference indicates preference as related to power consump @@ -665,8 +665,8 @@ partial interface MLContext { **Arguments:** - *graph*: an {{MLGraph}}. The compiled graph to be executed. - - *inputs*: an {{MLNamedArrayInputs}}. The resources and optional dimensions of inputs. - - *outputs*: an {{MLNamedArrayOutputs}}. The pre-allocated resources of required outputs. + - *inputs*: an {{MLNamedArrayBufferViews}}. The resources of inputs. + - *outputs*: an {{MLNamedArrayBufferViews}}. The pre-allocated resources of required outputs. **Returns:** {{undefined}}. @@ -675,40 +675,20 @@ partial interface MLContext { 1. For each |key| -> |value| of |inputs|: 1. |graph|.{{MLGraph/[[inputDescriptors]]}}[|key|] must exist. 1. Let |inputDesc| be |graph|.{{MLGraph/[[inputDescriptors]]}}[|key|]. - 1. Let |inputSize| be 1. - 1. If |value| is an {{MLArrayInput}}, then: - 1. The length of |value|.{{MLArrayInput/dimensions}} must be the same as the length of |inputDesc|.{{MLOperandDescriptor/dimensions}}. - 1. Let |i| be 0. - 1. While true: - 1. Let |dimension| be |value|.{{MLArrayInput/dimensions}}[|i|]. - 1. |dimension| must be greater than 0. - 1. If |inputDesc|.{{MLOperandDescriptor/dimensions}}[|i|] is greater than 0, then |dimension| must be equal to |inputDesc|.{{MLOperandDescriptor/dimensions}}[|i|]. - 1. Set |inputSize| to the product of |inputSize| and |dimension|. - 1. Increment |i| by 1. - 1. If |i| if equal to the length of |value|.{{MLArrayInput/dimensions}}, then break. - 1. Else: - 1. For each |dimension| of |inputDesc|.{{MLOperandDescriptor/dimensions}}: - 1. The value of |dimension| must be greater than 0. - 1. Set |inputSize| to the product of |inputSize| and |dimension|. - 1. If |value| is an {{MLArrayInput}}, then let |resource| be |value|.{{MLArrayInput/resource}}. - 1. If |value| is an {{ArrayBufferView}}, then let |resource| be |value|. - 1. If |resource| is an {{ArrayBufferView}}, then: - 1. The kind of |resource| must be compatible with |inputDesc|.{{MLOperandDescriptor/type}} according to [this table](#appendices-mloperandtype-arraybufferview-compatibility). - 1. The length of |resource| must be the same as |inputSize|. + 1. The type of {{ArrayBufferView}} |value| must match |inputDesc|.{{MLOperandDescriptor/type}} according to [this table](#appendices-mloperandtype-arraybufferview-compatibility). + 1. |value|.\[[ByteLength]] must equal to [=byte length=] of |inputDesc|. 1. For each |key| -> |value| of |outputs|: - 1. |graph|.{{MLGraph/[[outputNames]]}}[|key|] must exist. + 1. |graph|.{{MLGraph/[[outputDescriptors]]}}[|key|] must exist. + 1. Let |outputDesc| be |graph|.{{MLGraph/[[outputDescriptors]]}}[|key|]. + 1. The type of {{ArrayBufferView}} |value| must match |outputDesc|.{{MLOperandDescriptor/type}} according to [this table](#appendices-mloperandtype-arraybufferview-compatibility). + 1. |value|.\[[ByteLength]] must equal to [=byte length=] of |outputDesc|. 1. For each |key| -> |value| of |inputs|: 1. Let |inputDesc| be |graph|.{{MLGraph/[[inputDescriptors]]}}[|key|]. - 1. Let |inputTensor| be a new tensor for |graph|.{{MLGraph/[[implementation]]}} of data type that is compatible with |inputDesc|.{{MLOperandDescriptor/type}}. - 1. If |value| is an {{MLArrayInput}}, then: - 1. Set the dimensions of |inputTensor| to |value|.{{MLArrayInput/dimensions}}. - 1. Else: - 1. Set the dimensions of |inputTensor| to |inputDesc|.{{MLOperandDescriptor/dimensions}}. - 1. If |value| is an {{MLArrayInput}}, then: - 1. Set the values of |inputTensor| to the values of |value|.{{MLArrayInput/resource}}. - 1. If |value| is an {{ArrayBufferView}}, then: - 1. Set the values of |inputTensor| to the values of |value|. + 1. Let |inputTensor| be a new tensor for |graph|.{{MLGraph/[[implementation]]}}. + 1. Set the data type of |inputTensor| to the one that matches the [=element type=] of {{ArrayBufferView}} |value|. + 1. Set the dimensions of |inputTensor| to |inputDesc|.{{MLOperandDescriptor/dimensions}}. + 1. Set the values of |inputTensor| to the values of |value|. 1. Set the input of |graph|.{{MLGraph/[[implementation]]}} that is associated with |key| to |inputTensor|. 1. For each |key| -> |value| of |outputs|: 1. Issue a compute request for output of |graph|.{{MLGraph/[[implementation]]}} that is associated with |key|. @@ -717,11 +697,8 @@ partial interface MLContext { 1. Throw an {{OperationError}} {{DOMException}} and stop. 1. Else: 1. Let |outputTensor| be the output tensor returned by |graph|.{{MLGraph/[[implementation]]}}. - 1. If the kind of |value| is not compatible with the value type of |outputTensor|, then throw a {{DataError}} {{DOMException}} and stop. - 1. Let |outputSize| be 1. - 1. For each |dimension| of dimensions of |outputTensor|: - 1. Set |outputSize| to the product of |outputSize| and |dimension|. - 1. If |outputSize| is greater than the length of |value|, then: + 1. If the data type of |outputTensor| doesn't match the [=element type=] of {{ArrayBufferView}} |value|, then throw a {{DataError}} {{DOMException}} and stop. + 1. If the byte length of |outputTensor| is not equal to |value|.\[[ByteLength]], then: 1. Throw a {{DataError}} {{DOMException}} and stop. 1. Else: 1. Set the values of |value| to the values of |outputTensor|. @@ -731,47 +708,7 @@ partial interface MLContext { #### Examples #### {#compilation-examples}
-The following code showcases the computation with dynamic input dimensions. -
-function sizeOfShape(array) {
-  return array.reduce(
-      (accumulator, currentValue) => accumulator * currentValue);
-}
-
-const context = navigator.ml.createContext();
-
-// Create a graph with dynamic shaped inputs.
-const builder = new MLGraphBuilder(context);
-const descA = {type: 'float32', dimensions: [-1, 4]};
-const a = builder.input('a', descA);
-const descB = {type: 'float32', dimensions: [4, -1]};
-const b = builder.input('b', descB);
-const c = builder.matmul(a, b);
-const graph = builder.build({'c': c});
-
-function allocateAndCompute(shapeA, shapeB, shapeC) {
-  const bufferA = new Float32Array(sizeOfShape(shapeA)).fill(0.5);
-  const bufferB = new Float32Array(sizeOfShape(shapeB)).fill(0.5);
-  const bufferC = new Float32Array(sizeOfShape(shapeC));
-
-  // Specify the shape of inputs when computing.
-  const inputs = {
-    'a': {resource: bufferA, dimensions: shapeA},
-    'b': {resource: bufferB, dimensions: shapeB},
-  };
-  const outputs = {'c': bufferC};
-  context.compute(graph, inputs, outputs);
-  console.log(`values: ${bufferC}`);
-}
-
-allocateAndCompute([3, 4], [4, 3], [3, 3]);
-allocateAndCompute([4, 4], [4, 4], [4, 4]);
-allocateAndCompute([5, 4], [4, 5], [5, 5]);
-
-
- -
-The following code showcases the computation with optional outputs. +The following code showcases the synchronous computation with optional outputs in a worker.
 const context = navigator.ml.createContext();
 
@@ -809,8 +746,8 @@ Asynchronously carries out the computational workload of a compiled graph {{MLGr
 
 
 
@@ -818,8 +755,8 @@ partial interface MLContext {
 
     **Arguments:**
       - *graph*: an {{MLGraph}}. The compiled graph to be executed.
-      - *inputs*: an {{MLNamedArrayInputs}}. The resources and optional dimensions of inputs.
-      - *outputs*: an {{MLNamedArrayOutputs}}. The pre-allocated resources of required outputs.
+      - *inputs*: an {{MLNamedArrayBufferViews}}. The resources of inputs.
+      - *outputs*: an {{MLNamedArrayBufferViews}}. The pre-allocated resources of required outputs.
 
     **Returns:** Promise<{{undefined}}>.
     
@@ -828,54 +765,37 @@ partial interface MLContext {
             1. For each |key| -> |value| of |inputs|:
                 1. |graph|.{{MLGraph/[[inputDescriptors]]}}[|key|] must exist.
                 1. Let |inputDesc| be |graph|.{{MLGraph/[[inputDescriptors]]}}[|key|].
-                1. Let |inputSize| be 1.
-                1. If |value| is an {{MLArrayInput}}, then:
-                    1. The length of |value|.{{MLArrayInput/dimensions}} must be the same as the length of |inputDesc|.{{MLOperandDescriptor/dimensions}}.
-                    1. Let |i| be 0.
-                    1. While true:
-                        1. Let |dimension| be |value|.{{MLArrayInput/dimensions}}[|i|].
-                        1. |dimension| must be greater than 0.
-                        1. If |inputDesc|.{{MLOperandDescriptor/dimensions}}[|i|] is greater than 0, then |dimension| must be equal to |inputDesc|.{{MLOperandDescriptor/dimensions}}[|i|].
-                        1. Set |inputSize| to the product of |inputSize| and |dimension|.
-                        1. Increment |i| by 1.
-                        1. If |i| if equal to the length of |value|.{{MLArrayInput/dimensions}}, then break.
-                1. Else:
-                    1. For each |dimension| of |inputDesc|.{{MLOperandDescriptor/dimensions}}:
-                        1. The value of |dimension| must be greater than 0.
-                        1. Set |inputSize| to the product of |inputSize| and |dimension|.
-                1. If |value| is an {{MLArrayInput}}, then let |resource| be |value|.{{MLArrayInput/resource}}.
-                1. If |resource| is an {{ArrayBufferView}}, then:
-                    1. The kind of |resource| must be compatible with |inputDesc|.{{MLOperandDescriptor/type}} according to [this table](#appendices-mloperandtype-arraybufferview-compatibility).
-                    1. The length of |resource| must be the same as |inputSize|.
+                1. The type of {{ArrayBufferView}} |value| must match |inputDesc|.{{MLOperandDescriptor/type}} according to [this table](#appendices-mloperandtype-arraybufferview-compatibility).
+                1. |value|.\[[ByteLength]] must equal to [=byte length=] of |inputDesc|.
             1. For each |key| -> |value| of |outputs|:
-                1. |graph|.{{MLGraph/[[outputNames]]}}[|key|] must exist.
+                1. |graph|.{{MLGraph/[[outputDescriptors]]}}[|key|] must exist.
+                1. Let |outputDesc| be |graph|.{{MLGraph/[[outputDescriptors]]}}[|key|].
+                1. The type of {{ArrayBufferView}} |value| must match |outputDesc|.{{MLOperandDescriptor/type}} according to [this table](#appendices-mloperandtype-arraybufferview-compatibility).
+                1. |value|.\[[ByteLength]] must equal to [=byte length=] of |outputDesc|.
         
+ 1. Let |promise| be [=a new promise=]. 1. For each |key| -> |value| of |inputs|: 1. Let |inputDesc| be |graph|.{{MLGraph/[[inputDescriptors]]}}[|key|]. - 1. Let |inputTensor| be a new tensor for |graph|.{{MLGraph/[[implementation]]}} of data type that is compatible with |inputDesc|.{{MLOperandDescriptor/type}}. - 1. If |value| is an {{MLArrayInput}}, then: - 1. Set the dimensions of |inputTensor| to |value|.{{MLArrayInput/dimensions}}. - 1. Else: - 1. Set the dimensions of |inputTensor| to |inputDesc|.{{MLOperandDescriptor/dimensions}}. - 1. If |value| is an {{MLArrayInput}}, then: - 1. Set the values of |inputTensor| to the values of |value|.{{MLArrayInput/resource}}. + 1. Let |inputTensor| be a new tensor for |graph|.{{MLGraph/[[implementation]]}}. + 1. Set the data type of |inputTensor| to the one that matches the [=element type=] of {{ArrayBufferView}} |value|. + 1. Set the dimensions of |inputTensor| to |inputDesc|.{{MLOperandDescriptor/dimensions}}. + 1. Set the values of |inputTensor| to the values of |value|. 1. Set the input of |graph|.{{MLGraph/[[implementation]]}} that is associated with |key| to |inputTensor|. 1. For each |key| -> |value| of |outputs|: 1. Issue a compute request for output of |graph|.{{MLGraph/[[implementation]]}} that is associated with |key|. 1. Wait for the compute request to be completed. 1. If there is an error returned by |graph|.{{MLGraph/[[implementation]]}}, then: - 1. Throw an {{OperationError}} {{DOMException}} and stop. + 1. [=reject=] |promise| with an {{OperationError}} and stop. 1. Else: 1. Let |outputTensor| be the output tensor returned by |graph|.{{MLGraph/[[implementation]]}}. - 1. If the kind of |value| is not compatible with the value type of |outputTensor|, then throw a {{DataError}} {{DOMException}} and stop. - 1. Let |outputSize| be 1. - 1. For each |dimension| of dimensions of |outputTensor|: - 1. Set |outputSize| to the product of |outputSize| and |dimension|. - 1. If |outputSize| is greater than the length of |value|, then: - 1. Throw a {{DataError}} {{DOMException}} and stop. + 1. Let |outputDesc| be |graph|.{{MLGraph/[[outputDescriptors]]}}[|key|]. + 1. If the data type of |outputTensor| doesn't match the [=element type=] of {{ArrayBufferView}} |value|, then throw a {{DataError}} {{DOMException}} and stop. + 1. If the byte length of |outputTensor| is not equal to [=byte length=] of |outputDesc|, then: + 1. [=reject=] |promise| with an {{OperationError}} and stop. 1. Else: 1. Set the values of |value| to the values of |outputTensor|. - 1. Return Promise<{{undefined}}>. + 1. If all compute requests are completed, [=Resolve=] |promise| and stop. + 1. Return |promise|. ### WebGPU Interoperability ### {#api-mlcontext-webgpu-interop} @@ -912,11 +832,20 @@ dictionary MLOperandDescriptor { required MLOperandType type; // The dimensions field is only required for tensor operands. - // The negative value means an unknown dimension. - sequence dimensions; + sequence dimensions; }; +
+ The byte length of an {{MLOperandDescriptor}} |desc| is the value returned by the following steps: + + 1. Let |elementLength| be 1. + 1. For each |dimension| of |desc|.{{MLOperandDescriptor/dimensions}}: + 1. Set |elementLength| to |elementLength| × |dimension|. + 1. Let |elementSize| be the [=element size=] of one of the {{ArrayBufferView}} types that matches |desc|.{{MLOperandDescriptor/type}} according to [this table](#appendices-mloperandtype-arraybufferview-compatibility). + 1. Return |elementLength| × |elementSize|. +
+ ## MLOperand ## {#api-mloperand} An {{MLOperand}} represents an intermediary graph being constructed as a result of compositing parts of an operation into a fully composed operation. @@ -2446,9 +2375,9 @@ interface MLGraph {}; :: Maps the name of an input {{MLOperand}} to its {{MLOperandDescriptor}} for all input {{MLOperand}}s of this {{MLGraph}}. - : \[[outputNames]] of type [=sequence=]<{{DOMString}}> + : \[[outputDescriptors]] of type [=record=]<{{DOMString}}, {{MLOperandDescriptor}}> :: - Contains the names of all output {{MLOperand}}s of this {{MLGraph}}. + Maps the name of an output {{MLOperand}} to its {{MLOperandDescriptor}} for all output {{MLOperand}}s of this {{MLGraph}}. : \[[implementation]] :: @@ -2461,13 +2390,7 @@ The {{MLCommandEncoder}} interface represents a method of execution that synchro
**Arguments:** - *graph*: an {{MLGraph}}. The compiled graph to be executed. - - *inputs*: an {{MLNamedGPUInputs}}. The resources and optional dimensions of inputs. - - *outputs*: an {{MLNamedGPUOutputs}}. The pre-allocated resources of required outputs. + - *inputs*: an {{MLNamedGPUResources}}. The resources of inputs. + - *outputs*: an {{MLNamedGPUResources}}. The pre-allocated resources of required outputs. **Returns:** {{undefined}}. 1. If any of the following requirements are unmet, then throw a {{DataError}} {{DOMException}} and stop.
- 1. For each |key| -> |value| of |inputs|: - 1. |graph|.{{MLGraph/[[inputDescriptors]]}}[|key|] must exist. - 1. Let |inputDesc| be |graph|.{{MLGraph/[[inputDescriptors]]}}[|key|]. - 1. Let |inputSize| be 1. - 1. If |value| is an {{MLGPUInput}}, then: - 1. The length of |value|.{{MLGPUInput/dimensions}} must be the same as the length of |inputDesc|.{{MLOperandDescriptor/dimensions}}. - 1. Let |i| be 0. - 1. While true: - 1. Let |dimension| be |value|.{{MLGPUInput/dimensions}}[|i|]. - 1. |dimension| must be greater than 0. - 1. If |inputDesc|.{{MLOperandDescriptor/dimensions}}[|i|] is greater than 0, then |dimension| must be equal to |inputDesc|.{{MLOperandDescriptor/dimensions}}[|i|]. - 1. Set |inputSize| to the product of |inputSize| and |dimension|. - 1. Increment |i| by 1. - 1. If |i| if equal to the length of |value|.{{MLGPUInput/dimensions}}, then break. - 1. Else: - 1. For each |dimension| of |inputDesc|.{{MLOperandDescriptor/dimensions}}: - 1. The value of |dimension| must be greater than 0. - 1. Set |inputSize| to the product of |inputSize| and |dimension|. - 1. If |value| is an {{MLGPUInput}}, then let |resource| be |value|.{{MLGPUInput/resource}}. - 1. If |value| is an {{MLGPUResource}}, then let |resource| be |value|. - 1. For each |key| -> |value| of |outputs|: - 1. |graph|.{{MLGraph/[[outputNames]]}}[|key|] must exist. -
+ 1. For each |key| -> |value| of |inputs|: + 1. |graph|.{{MLGraph/[[inputDescriptors]]}}[|key|] must exist. + 1. Let |inputDesc| be |graph|.{{MLGraph/[[inputDescriptors]]}}[|key|]. + 1. If |value| is a {{GPUBuffer}}, then: + 1. |value|.{{GPUBuffer/size}} must equal to [=byte length=] of |inputDesc|. + 1. For each |key| -> |value| of |outputs|: + 1. |graph|.{{MLGraph/[[outputDescriptors]]}}[|key|] must exist. + 1. Let |outputDesc| be |graph|.{{MLGraph/[[outputDescriptors]]}}[|key|]. + 1. If |value| is a {{GPUBuffer}}, then: + 1. |value|.{{GPUBuffer/size}} must equal to [=byte length=] of |outputDesc|. +
1. For each |key| -> |value| of |inputs|: - 1. Let |inputDesc| be |graph|.{{MLGraph/[[inputDescriptors]]}}[|key|]. - 1. Let |inputTensor| be a new tensor for |graph|.{{MLGraph/[[implementation]]}} of data type that is compatible with |inputDesc|.{{MLOperandDescriptor/type}}. - 1. If |value| is an {{MLGPUInput}}, then: - 1. Set the dimensions of |inputTensor| to |value|.{{MLGPUInput/dimensions}}. - 1. Else: - 1. Set the dimensions of |inputTensor| to |inputDesc|.{{MLOperandDescriptor/dimensions}}. - 1. If |value| is an {{MLGPUInput}}, then: - 1. Set the values of |inputTensor| to the values of |value|.{{MLGPUInput/resource}}. - 1. If |value| is an {{MLGPUResource}}, then: - 1. Set the values of |inputTensor| to the values of |value|. - 1. Set the input of |graph|.{{MLGraph/[[implementation]]}} that is associated with |key| to |inputTensor|. + 1. Set the input of |graph|.{{MLGraph/[[implementation]]}} that is associated with |key| to |value|. 1. For each |key| -> |value| of |outputs|: - 1. Issue a compute request for output of |graph|.{{MLGraph/[[implementation]]}} that is associated with |key|. - 1. Wait for the compute request to be completed. - 1. If there is an error returned by |graph|.{{MLGraph/[[implementation]]}}, then: - 1. Throw an {{OperationError}} {{DOMException}} and stop. - 1. Else: - 1. Let |outputTensor| be the output tensor returned by |graph|.{{MLGraph/[[implementation]]}}. - 1. If the kind of |value| is not compatible with the value type of |outputTensor|, then throw a {{DataError}} {{DOMException}} and stop. - 1. Let |outputSize| be 1. - 1. For each |dimension| of dimensions of |outputTensor|: - 1. Set |outputSize| to the product of |outputSize| and |dimension|. - 1. If |outputSize| is greater than the length of |value|, then: - 1. Throw a {{DataError}} {{DOMException}} and stop. - 1. Else: - 1. Set the values of |value| to the values of |outputTensor|. + 1. Set the output of |graph|.{{MLGraph/[[implementation]]}} that is associated with |key| to |value|. + 1. Issue a compute request of |graph|.{{MLGraph/[[implementation]]}}. + 1. If there is an error returned by |graph|.{{MLGraph/[[implementation]]}}, then: + 1. Throw an {{OperationError}} {{DOMException}} and stop. 1. Return {{undefined}}.