Skip to content

Commit

Permalink
Update ncnn.js (#1314)
Browse files Browse the repository at this point in the history
  • Loading branch information
lutzroeder committed Sep 6, 2024
1 parent dbbe0ed commit 3b43032
Show file tree
Hide file tree
Showing 5 changed files with 168 additions and 39 deletions.
148 changes: 137 additions & 11 deletions source/ncnn.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import * as base from './base.js';

const ncnn = {};
const pnnx = {};

// https://github.com/Tencent/ncnn/wiki/param-and-model-file-structure
// https://github.com/Tencent/ncnn/wiki/operation-param-weight-table
Expand Down Expand Up @@ -45,7 +46,7 @@ ncnn.ModelFactory = class {
context.type = 'ncnn.weights';
} else if (identifier.endsWith('.pnnx.bin')) {
const entries = context.peek('zip');
if (entries && entries.size > 0) {
if (entries) { // can be empty
context.type = 'pnnx.weights';
context.target = entries;
}
Expand Down Expand Up @@ -87,7 +88,12 @@ ncnn.ModelFactory = class {
}

async open(context) {
const metadata = await context.metadata('ncnn-metadata.json');
let metadata = null;
if (context.type.startsWith('pnnx.')) {
metadata = await pnnx.Metadata.open(context);
} else {
metadata = await context.metadata('ncnn-metadata.json');
}
const identifier = context.identifier.toLowerCase();
const format = context.type.split('.').shift();
switch (context.type) {
Expand Down Expand Up @@ -204,8 +210,16 @@ ncnn.Graph = class {
const dimensions = Array.from(layer.params.values()).map((value) => isNaN(parseInt(value, 10)) ? value : parseInt(value, 10));
const shape = new ncnn.TensorShape(dimensions);
const type = new ncnn.TensorType('float32', shape);
const input = new ncnn.Argument(layer.name, layer.outputs.map((output) => values.map(output, type)));
this.inputs.push(input);
const argument = new ncnn.Argument(layer.name, layer.outputs.map((output) => values.map(output, type)));
this.inputs.push(argument);
} else if (layer.type === 'pnnx.Input' && layer.params) {
const type = ncnn.Utility.route(layer.params, '0');
const argument = new ncnn.Argument(layer.name, layer.outputs.map((output) => values.map(output, type)));
this.inputs.push(argument);
} else if (layer.type === 'pnnx.Output' && layer.params) {
const type = ncnn.Utility.route(layer.params, '0');
const argument = new ncnn.Argument(layer.name, layer.inputs.map((input) => values.map(input, type)));
this.outputs.push(argument);
} else {
const node = new ncnn.Node(metadata, format, blobs, layer, values);
this.nodes.push(node);
Expand Down Expand Up @@ -251,37 +265,52 @@ ncnn.Node = class {
const params = layer.params;
const inputs = layer.inputs || [];
let inputIndex = 0;
const names = new Map();
if (params) {
for (const [key, value] of params) {
if (key.startsWith('$')) {
names.set(value, key.substring(1));
params.delete(key);
}
}
}
if (this.type && Array.isArray(this.type.inputs)) {
for (const input of this.type.inputs) {
if (inputIndex < inputs.length || input.optional === false) {
const count = (input.type === 'Tensor[]') ? (inputs.length - inputIndex) : 1;
const list = inputs.slice(inputIndex, inputIndex + count).filter((id) => id !== '' || input.option !== 'optional').map((id) => values.map(id));
const list = inputs.slice(inputIndex, inputIndex + count).filter((id) => id !== '' || input.option !== 'optional').map((id) => values.map(id, ncnn.Utility.route(params, id)));
const argument = new ncnn.Argument(input.name, list);
this.inputs.push(argument);
inputIndex += count;
}
}
}
this.inputs.push(...inputs.slice(inputIndex).map((input, index) => {
const name = ((inputIndex + index) === 0) ? 'input' : (inputIndex + index).toString();
return new ncnn.Argument(name, [values.map(input)]);
index = inputIndex + index;
let name = 'input';
if (names.has(input)) {
name = names.get(input);
} else if (index !== 0) {
name = index.toString();
}
return new ncnn.Argument(name, [values.map(input, ncnn.Utility.route(params, input))]);
}));
const outputs = layer.outputs || [];
let outputIndex = 0;
if (this.type && Array.isArray(this.type.outputs)) {
for (const output of this.type.outputs) {
if (outputIndex < outputs.length || output.option !== 'optional') {
const count = (output.type === 'Tensor[]') ? (outputs.length - outputIndex) : 1;
const list = outputs.slice(outputIndex, outputIndex + count).map((id) => values.map(id));
const list = outputs.slice(outputIndex, outputIndex + count).map((id) => values.map(id, ncnn.Utility.route(params, id)));
const argument = new ncnn.Argument(output.name, list);
this.outputs.push(argument);
outputIndex += count;
}
}
}
this.outputs.push(...outputs.slice(outputIndex).map((output, index) => {
const name = ((outputIndex + index) === 0) ? 'output' : (outputIndex + index).toString();
return new ncnn.Argument(name, [values.map(output)]);
const name = (outputIndex + index) === 0 ? 'output' : (outputIndex + index).toString();
return new ncnn.Argument(name, [values.map(output, ncnn.Utility.route(params, output))]);
}));
blobs.weight = (name, shape, code) => {
const blob = blobs.load(shape, code || 0);
Expand Down Expand Up @@ -623,7 +652,20 @@ ncnn.Node = class {
}
}
if (params && params.size > 0) {
const attributes = this.type && Array.isArray(this.type.attributes) ? this.type.attributes : [];
for (const [key, signature] of params) {
if (key.startsWith('@')) {
const name = key.substring(1);
const identifier = `${this.name}.${name}`;
const data = blobs.entry(identifier);
const type = ncnn.Utility.type(signature);
const tensor = new ncnn.Tensor(type, data, null);
const value = new ncnn.Value(identifier, null, tensor);
const argument = new ncnn.Argument(name, [value]);
this.inputs.push(argument);
params.delete(key);
}
}
const attributes = Array.isArray(this.type.attributes) ? this.type.attributes : [];
for (const [index, obj] of params) {
const metadata = attributes[index];
let name = index;
Expand Down Expand Up @@ -731,6 +773,24 @@ ncnn.Utility = class {
}
return value;
}

static type(signature) {
const match = signature.match(/\(([^)]+)\)(\w+)/);
const shape = new ncnn.TensorShape(match[1].split(',').map((v) => parseInt(v, 10)));
const dataTypes = new Map([['f32', 'float32'], ['f16', 'float16']]);
const dataType = dataTypes.get(match[2]) || match[2];
return new ncnn.TensorType(dataType, shape);
}

static route(params, id) {
const key = `#${id}`;
if (params && params.has(key)) {
const signature = params.get(key);
params.delete(key);
return ncnn.Utility.type(signature);
}
return null;
}
};

ncnn.TextParamReader = class {
Expand Down Expand Up @@ -919,6 +979,72 @@ ncnn.BlobReader = class {
throw new ncnn.Error('Invalid weights data size.');
}
}

entry(identifier) {
if (this._entires && this._entires.has(identifier)) {
const reader = this._entires.get(identifier);
return reader.peek();
}
return null;
}
};

pnnx.Metadata = class {

static async open(context) {
if (!pnnx.Metadata._metadata) {
let data = null;
try {
data = await context.request('pytorch-metadata.json');
} catch {
// continue regardless of error
}
pnnx.Metadata._metadata = new pnnx.Metadata(data);
}
return pnnx.Metadata._metadata;
}

constructor(data) {
this._types = new Map();
this._attributes = new Map();
this._index = new Map();
if (data) {
const items = JSON.parse(data);
for (const item of items) {
item.name = item.name.replace(/^torch\.nn\.modules\.(\w)+\./, 'nn.');
item.name = item.name.replace(/aten::([a-z_]+)(\.\w+)?/g, (match, p1) => `torch.${p1}`);
this._types.set(item.name, { name: item.name, category: item.category });
}
}
}

type(name) {
if (!this._types.has(name)) {
this._types.set(name, { name: name.toString() });
}
return this._types.get(name);
}

attribute(type, name) {
const key = `${type}:${name}`;
if (!this._attributes.has(key)) {
this._attributes.set(key, null);
const metadata = this.type(type);
if (metadata) {
if (metadata.inputs) {
for (const input of metadata.inputs) {
this._attributes.set(`${type}:${input.name}`, input);
}
}
if (metadata.attributes) {
for (const attribute of metadata.attributes) {
this._attributes.set(`${type}:${attribute.name}`, attribute);
}
}
}
}
return this._attributes.get(key);
}
};

ncnn.Error = class extends Error {
Expand Down
17 changes: 8 additions & 9 deletions source/onnx.js
Original file line number Diff line number Diff line change
Expand Up @@ -912,17 +912,16 @@ onnx.Context.Model = class {
onnx.Metadata = class {

static async open(context) {
if (onnx.Metadata._metadata) {
return onnx.Metadata._metadata;
}
try {
const data = await context.request('onnx-metadata.json');
if (!onnx.Metadata._metadata) {
let data = null;
try {
data = await context.request('onnx-metadata.json');
} catch {
// continue regardless of error
}
onnx.Metadata._metadata = new onnx.Metadata(data);
return onnx.Metadata._metadata;
} catch {
onnx.Metadata._metadata = new onnx.Metadata(null);
return onnx.Metadata._metadata;
}
return onnx.Metadata._metadata;
}

constructor(data) {
Expand Down
7 changes: 7 additions & 0 deletions source/pytorch-metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -2853,6 +2853,7 @@
},
{
"name": "aten::cat.names_out",
"category": "Tensor",
"inputs": [
{ "name": "tensors", "type": "Tensor[]" },
{ "name": "dim", "type": "Dimname" }
Expand All @@ -2863,6 +2864,7 @@
},
{
"name": "aten::cat.out",
"category": "Tensor",
"inputs": [
{ "name": "tensors", "type": "Tensor[]" },
{ "name": "dim", "type": "int64", "default": 0 }
Expand Down Expand Up @@ -5770,6 +5772,7 @@
},
{
"name": "aten::gather",
"category": "Transform",
"inputs": [
{ "name": "self", "type": "Tensor" },
{ "name": "dim", "type": "int64" },
Expand All @@ -5782,6 +5785,7 @@
},
{
"name": "aten::gather.dimname",
"category": "Transform",
"inputs": [
{ "name": "self", "type": "Tensor" },
{ "name": "dim", "type": "Dimname" },
Expand All @@ -5794,6 +5798,7 @@
},
{
"name": "aten::gather.dimname_out",
"category": "Transform",
"inputs": [
{ "name": "self", "type": "Tensor" },
{ "name": "dim", "type": "Dimname" },
Expand All @@ -5806,6 +5811,7 @@
},
{
"name": "aten::gather.out",
"category": "Transform",
"inputs": [
{ "name": "self", "type": "Tensor" },
{ "name": "dim", "type": "int64" },
Expand Down Expand Up @@ -12961,6 +12967,7 @@
},
{
"name": "aten::squeeze.dims",
"category": "Transform",
"inputs": [
{ "name": "self", "type": "Tensor" },
{ "name": "dim", "type": "int64[]" }
Expand Down
18 changes: 8 additions & 10 deletions source/pytorch.js
Original file line number Diff line number Diff line change
Expand Up @@ -3954,17 +3954,15 @@ pytorch.nnapi.Metadata = class {
pytorch.Metadata = class {

static async open(context) {
if (pytorch.Metadata._metadata) {
return pytorch.Metadata._metadata;
}
try {
const data = await context.request('pytorch-metadata.json');
pytorch.Metadata._metadata = new pytorch.Metadata(data);
return pytorch.Metadata._metadata;
} catch {
pytorch.Metadata._metadata = new pytorch.Metadata(null);
return pytorch.Metadata._metadata;
if (!pytorch.Metadata._metadata) {
try {
const data = await context.request('pytorch-metadata.json');
pytorch.Metadata._metadata = new pytorch.Metadata(data);
} catch {
pytorch.Metadata._metadata = new pytorch.Metadata(null);
}
}
return pytorch.Metadata._metadata;
}

constructor(data) {
Expand Down
17 changes: 8 additions & 9 deletions source/tengine.js
Original file line number Diff line number Diff line change
Expand Up @@ -187,17 +187,16 @@ tengine.TensorShape = class {
tengine.Metadata = class {

static async open(context) {
if (tengine.Metadata._metadata) {
return tengine.Metadata._metadata;
}
try {
const data = await context.request('tengine-metadata.json');
if (!tengine.Metadata._metadata) {
let data = null;
try {
data = await context.request('tengine-metadata.json');
} catch {
// continue regardless of error
}
tengine.Metadata._metadata = new tengine.Metadata(data);
return tengine.Metadata._metadata;
} catch {
tengine.Metadata._metadata = new tengine.Metadata(null);
return tengine.Metadata._metadata;
}
return tengine.Metadata._metadata;
}

constructor(data) {
Expand Down

0 comments on commit 3b43032

Please sign in to comment.