From a6e731169d742b101b873d20ee1eff4f7d520a43 Mon Sep 17 00:00:00 2001 From: lifangtian Date: Fri, 30 May 2025 09:58:12 +0000 Subject: [PATCH 1/2] =?UTF-8?q?=E6=9B=B4=E6=96=B0README=E4=B8=AD=E7=9A=84?= =?UTF-8?q?=E8=AE=BE=E7=BD=AE=E7=8E=AF=E5=A2=83=E5=8F=98=E9=87=8F=E7=9A=84?= =?UTF-8?q?=E5=91=BD=E4=BB=A4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: lifangtian --- ACL_PyTorch/contrib/cv/image_retrieval/BLIP/readme.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ACL_PyTorch/contrib/cv/image_retrieval/BLIP/readme.md b/ACL_PyTorch/contrib/cv/image_retrieval/BLIP/readme.md index b002bfde7f..3bf14836af 100644 --- a/ACL_PyTorch/contrib/cv/image_retrieval/BLIP/readme.md +++ b/ACL_PyTorch/contrib/cv/image_retrieval/BLIP/readme.md @@ -202,7 +202,7 @@ BLIP模型为一种新的Vision-Language Pre-training框架,它可以灵活地 1. 配置环境变量。 ``` - source /usr/local/Ascend/...... + source /usr/local/Ascend/ascend-toolkit/set_env.sh ``` 2. 执行命令查看芯片名称($\{chip\_name\})。 -- Gitee From bfb77693821d4f45c941c1dd8b370bf3473cce0f Mon Sep 17 00:00:00 2001 From: lifangtian Date: Tue, 3 Jun 2025 03:02:12 +0000 Subject: [PATCH 2/2] =?UTF-8?q?=E4=BF=AE=E6=AD=A3onnx=E7=89=88=E6=9C=AC?= =?UTF-8?q?=E9=94=99=E8=AF=AF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ACL_PyTorch/contrib/cv/image_retrieval/BLIP/requirement.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ACL_PyTorch/contrib/cv/image_retrieval/BLIP/requirement.txt b/ACL_PyTorch/contrib/cv/image_retrieval/BLIP/requirement.txt index f9acda5fe1..53ed61d2ff 100644 --- a/ACL_PyTorch/contrib/cv/image_retrieval/BLIP/requirement.txt +++ b/ACL_PyTorch/contrib/cv/image_retrieval/BLIP/requirement.txt @@ -4,4 +4,4 @@ torchvision==0.8.1 transformers==4.18.0 fairscale==0.4.0 timm==0.6.11 -onnx==0.12.0 \ No newline at end of file +onnx==1.12.0 \ No newline at end of file -- Gitee