English | 简体中文
A PaddlePaddle version image model zoo.
Model Zoo | |||||
CNN | Transformer | MLP | |||
-
Install by pip:
$ pip install ppim
-
Install by wheel package:【Releases Packages】
import paddle
from ppim import rednet_26
# Load the model with PPIM wheel package
model, val_transforms = rednet_26(pretrained=True, return_transforms=True)
# Load the model with paddle.hub API
# paddlepaddle >= 2.1.0
'''
model, val_transforms = paddle.hub.load(
'AgentMaker/Paddle-Image-Models:dev',
'rednet_26',
source='github',
force_reload=False,
pretrained=True,
return_transforms=True
)
'''
# Model summary
paddle.summary(model, input_size=(1, 3, 224, 224))
# Random a input
x = paddle.randn(shape=(1, 3, 224, 224))
# Model forword
out = model(x)
import paddle
import paddle.nn as nn
import paddle.vision.transforms as T
from paddle.vision import Cifar100
from ppim import rexnet_1_0
# Load the model
model, val_transforms = rexnet_1_0(pretrained=True, return_transforms=True, class_dim=100)
# Use the PaddleHapi Model
model = paddle.Model(model)
# Set the optimizer
opt = paddle.optimizer.Adam(learning_rate=0.001, parameters=model.parameters())
# Set the loss function
loss = nn.CrossEntropyLoss()
# Set the evaluate metric
metric = paddle.metric.Accuracy(topk=(1, 5))
# Prepare the model
model.prepare(optimizer=opt, loss=loss, metrics=metric)
# Set the data preprocess
train_transforms = T.Compose([
T.Resize(256, interpolation='bicubic'),
T.RandomCrop(224),
T.ToTensor(),
T.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
])
# Load the Cifar100 dataset
train_dataset = Cifar100(mode='train', transform=train_transforms, backend='pil')
val_dataset = Cifar100(mode='test', transform=val_transforms, backend='pil')
# Finetune the model
model.fit(
train_data=train_dataset,
eval_data=val_dataset,
batch_size=256,
epochs=2,
eval_freq=1,
log_freq=1,
save_dir='save_models',
save_freq=1,
verbose=1,
drop_last=False,
shuffle=True,
num_workers=0
)
Email : [email protected]
QQ Group : 1005109853