From 7054c1fee9d4c04e974158d256999b3b17e776ea Mon Sep 17 00:00:00 2001 From: numb3r3 Date: Mon, 21 Feb 2022 11:40:16 +0800 Subject: [PATCH] refactor: migration docarray --- README.md | 2 +- executor.py | 4 ++-- finetune.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index aea392c..31666c2 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # 3D Mesh Encoder -An Executor that receives Documents containing point sets data in its blob attribute, with shape `(N, 3)` and encodes it to embeddings of shape `(D,)`. +An Executor that receives Documents containing point sets data in its `tensor` attribute, with shape `(N, 3)` and encodes it to embeddings of shape `(D,)`. Now, the following pretrained models are ready to be used to create embeddings: - **PointConv-Shapenet-d512**: A **PointConv** model resulted in **512** dimension of embeddings, which is finetuned based on ShapeNet dataset. diff --git a/executor.py b/executor.py index 700f1ce..93ca2a5 100644 --- a/executor.py +++ b/executor.py @@ -38,11 +38,11 @@ def normalize(doc: 'Document'): - points = doc.blob + points = doc.tensor points = points - np.expand_dims(np.mean(points, axis=0), 0) # center dist = np.max(np.sqrt(np.sum(points ** 2, axis=1)), 0) points = points / dist # scale - doc.blob = points.astype(np.float32) + doc.tensor = points.astype(np.float32) return doc diff --git a/finetune.py b/finetune.py index a5739ae..f65c751 100644 --- a/finetune.py +++ b/finetune.py @@ -24,7 +24,7 @@ def random_sample(pc, num): def preprocess(doc: 'Document', num_points: int = 1024, data_aug: bool = True): - points = random_sample(doc.blob, num_points) + points = random_sample(doc.tensor, num_points) # points = np.transpose(points) points = points - np.expand_dims(np.mean(points, axis=0), 0) # center