From 54d704d0bf2b3dd4297a58f0f775e2e8b92e24ac Mon Sep 17 00:00:00 2001 From: 199******99 <9200608+doglikegodness@user.noreply.gitee.com> Date: Sat, 10 Jul 2021 09:48:35 +0000 Subject: [PATCH] =?UTF-8?q?add=20code/2021=5Fspring/=E5=9C=BA=E6=99=AF?= =?UTF-8?q?=E8=AF=86=E5=88=AB/=E9=BB=84=E7=8E=AE=E7=90=AA2019302110409/con?= =?UTF-8?q?vert=5Fmodel.py.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../convert_model.py" | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 "code/2021_spring/\345\234\272\346\231\257\350\257\206\345\210\253/\351\273\204\347\216\256\347\220\2522019302110409/convert_model.py" diff --git "a/code/2021_spring/\345\234\272\346\231\257\350\257\206\345\210\253/\351\273\204\347\216\256\347\220\2522019302110409/convert_model.py" "b/code/2021_spring/\345\234\272\346\231\257\350\257\206\345\210\253/\351\273\204\347\216\256\347\220\2522019302110409/convert_model.py" new file mode 100644 index 0000000..f95ebec --- /dev/null +++ "b/code/2021_spring/\345\234\272\346\231\257\350\257\206\345\210\253/\351\273\204\347\216\256\347\220\2522019302110409/convert_model.py" @@ -0,0 +1,26 @@ +import torch +from torch.autograd import Variable as V +import torchvision.models as models +from PIL import Image +from torchvision import transforms as trn +from torch.nn import functional as F +import os + +# th architecture to use +arch = 'resnet18' +model_weight = '/data/vision/torralba/deepscene/moments/models/2stream-simple/model/kinetics_rgb_resnet18_2d_single_stack1_fromscratch_best.pth.tar' +model_name = 'resnet18_kinetics_fromscratch' + +# create the network architecture +model = models.__dict__[arch](num_classes=400) + +#model_weight = '%s_places365.pth.tar' % arch + +checkpoint = torch.load(model_weight, map_location=lambda storage, loc: storage) # model trained in GPU could be deployed in CPU machine like this! +state_dict = {str.replace(k,'module.',''): v for k,v in checkpoint['state_dict'].items()} # the data parallel layer will add 'module' before each layer name +model.load_state_dict(state_dict) +model.eval() + +model.cpu() +torch.save(model, 'whole_' + model_name + '.pth.tar') +print('save to ' + 'whole_' + model_name + '.pth.tar') -- Gitee