加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
文件
该仓库未声明开源许可证文件(LICENSE),使用请关注具体项目描述及其代码上游依赖。
克隆/下载
model 8.40 KB
一键复制 编辑 原始数据 按行查看 历史
penn 提交于 2023-01-28 02:36 . add model.
dict_keys(
['y_embedder.embedding_table.weight',
't_embedder.mlp.0.weight', 't_embedder.mlp.0.bias', 't_embedder.mlp.2.weight', 't_embedder.mlp.2.bias',
'x_embedder.proj.weight', 'x_embedder.proj.bias', 'pos_embed',
'blocks.0.attn.qkv.weight', 'blocks.0.attn.qkv.bias', 'blocks.0.attn.proj.weight', 'blocks.0.attn.proj.bias', 'blocks.0.mlp.fc1.weight', 'blocks.0.mlp.fc1.bias', 'blocks.0.mlp.fc2.weight', 'blocks.0.mlp.fc2.bias', 'blocks.0.adaLN_modulation.1.weight', 'blocks.0.adaLN_modulation.1.bias', 'blocks.1.attn.qkv.weight', 'blocks.1.attn.qkv.bias', 'blocks.1.attn.proj.weight', 'blocks.1.attn.proj.bias', 'blocks.1.mlp.fc1.weight', 'blocks.1.mlp.fc1.bias', 'blocks.1.mlp.fc2.weight', 'blocks.1.mlp.fc2.bias', 'blocks.1.adaLN_modulation.1.weight', 'blocks.1.adaLN_modulation.1.bias', 'blocks.2.attn.qkv.weight', 'blocks.2.attn.qkv.bias', 'blocks.2.attn.proj.weight', 'blocks.2.attn.proj.bias', 'blocks.2.mlp.fc1.weight', 'blocks.2.mlp.fc1.bias', 'blocks.2.mlp.fc2.weight', 'blocks.2.mlp.fc2.bias', 'blocks.2.adaLN_modulation.1.weight', 'blocks.2.adaLN_modulation.1.bias', 'blocks.3.attn.qkv.weight', 'blocks.3.attn.qkv.bias', 'blocks.3.attn.proj.weight', 'blocks.3.attn.proj.bias', 'blocks.3.mlp.fc1.weight', 'blocks.3.mlp.fc1.bias', 'blocks.3.mlp.fc2.weight', 'blocks.3.mlp.fc2.bias', 'blocks.3.adaLN_modulation.1.weight', 'blocks.3.adaLN_modulation.1.bias', 'blocks.4.attn.qkv.weight', 'blocks.4.attn.qkv.bias', 'blocks.4.attn.proj.weight', 'blocks.4.attn.proj.bias', 'blocks.4.mlp.fc1.weight', 'blocks.4.mlp.fc1.bias', 'blocks.4.mlp.fc2.weight', 'blocks.4.mlp.fc2.bias', 'blocks.4.adaLN_modulation.1.weight', 'blocks.4.adaLN_modulation.1.bias', 'blocks.5.attn.qkv.weight', 'blocks.5.attn.qkv.bias', 'blocks.5.attn.proj.weight', 'blocks.5.attn.proj.bias', 'blocks.5.mlp.fc1.weight', 'blocks.5.mlp.fc1.bias', 'blocks.5.mlp.fc2.weight', 'blocks.5.mlp.fc2.bias', 'blocks.5.adaLN_modulation.1.weight', 'blocks.5.adaLN_modulation.1.bias', 'blocks.6.attn.qkv.weight', 'blocks.6.attn.qkv.bias', 'blocks.6.attn.proj.weight', 'blocks.6.attn.proj.bias', 'blocks.6.mlp.fc1.weight', 'blocks.6.mlp.fc1.bias', 'blocks.6.mlp.fc2.weight', 'blocks.6.mlp.fc2.bias', 'blocks.6.adaLN_modulation.1.weight', 'blocks.6.adaLN_modulation.1.bias', 'blocks.7.attn.qkv.weight', 'blocks.7.attn.qkv.bias', 'blocks.7.attn.proj.weight', 'blocks.7.attn.proj.bias', 'blocks.7.mlp.fc1.weight', 'blocks.7.mlp.fc1.bias', 'blocks.7.mlp.fc2.weight', 'blocks.7.mlp.fc2.bias', 'blocks.7.adaLN_modulation.1.weight', 'blocks.7.adaLN_modulation.1.bias', 'blocks.8.attn.qkv.weight', 'blocks.8.attn.qkv.bias', 'blocks.8.attn.proj.weight', 'blocks.8.attn.proj.bias', 'blocks.8.mlp.fc1.weight', 'blocks.8.mlp.fc1.bias', 'blocks.8.mlp.fc2.weight', 'blocks.8.mlp.fc2.bias', 'blocks.8.adaLN_modulation.1.weight', 'blocks.8.adaLN_modulation.1.bias', 'blocks.9.attn.qkv.weight', 'blocks.9.attn.qkv.bias', 'blocks.9.attn.proj.weight', 'blocks.9.attn.proj.bias', 'blocks.9.mlp.fc1.weight', 'blocks.9.mlp.fc1.bias', 'blocks.9.mlp.fc2.weight', 'blocks.9.mlp.fc2.bias', 'blocks.9.adaLN_modulation.1.weight', 'blocks.9.adaLN_modulation.1.bias', 'blocks.10.attn.qkv.weight', 'blocks.10.attn.qkv.bias', 'blocks.10.attn.proj.weight', 'blocks.10.attn.proj.bias', 'blocks.10.mlp.fc1.weight', 'blocks.10.mlp.fc1.bias', 'blocks.10.mlp.fc2.weight', 'blocks.10.mlp.fc2.bias', 'blocks.10.adaLN_modulation.1.weight', 'blocks.10.adaLN_modulation.1.bias', 'blocks.11.attn.qkv.weight', 'blocks.11.attn.qkv.bias', 'blocks.11.attn.proj.weight', 'blocks.11.attn.proj.bias', 'blocks.11.mlp.fc1.weight', 'blocks.11.mlp.fc1.bias', 'blocks.11.mlp.fc2.weight', 'blocks.11.mlp.fc2.bias', 'blocks.11.adaLN_modulation.1.weight', 'blocks.11.adaLN_modulation.1.bias', 'blocks.12.attn.qkv.weight', 'blocks.12.attn.qkv.bias', 'blocks.12.attn.proj.weight', 'blocks.12.attn.proj.bias', 'blocks.12.mlp.fc1.weight', 'blocks.12.mlp.fc1.bias', 'blocks.12.mlp.fc2.weight', 'blocks.12.mlp.fc2.bias', 'blocks.12.adaLN_modulation.1.weight', 'blocks.12.adaLN_modulation.1.bias', 'blocks.13.attn.qkv.weight', 'blocks.13.attn.qkv.bias', 'blocks.13.attn.proj.weight', 'blocks.13.attn.proj.bias', 'blocks.13.mlp.fc1.weight', 'blocks.13.mlp.fc1.bias', 'blocks.13.mlp.fc2.weight', 'blocks.13.mlp.fc2.bias', 'blocks.13.adaLN_modulation.1.weight', 'blocks.13.adaLN_modulation.1.bias', 'blocks.14.attn.qkv.weight', 'blocks.14.attn.qkv.bias', 'blocks.14.attn.proj.weight', 'blocks.14.attn.proj.bias', 'blocks.14.mlp.fc1.weight', 'blocks.14.mlp.fc1.bias', 'blocks.14.mlp.fc2.weight', 'blocks.14.mlp.fc2.bias', 'blocks.14.adaLN_modulation.1.weight', 'blocks.14.adaLN_modulation.1.bias', 'blocks.15.attn.qkv.weight', 'blocks.15.attn.qkv.bias', 'blocks.15.attn.proj.weight', 'blocks.15.attn.proj.bias', 'blocks.15.mlp.fc1.weight', 'blocks.15.mlp.fc1.bias', 'blocks.15.mlp.fc2.weight', 'blocks.15.mlp.fc2.bias', 'blocks.15.adaLN_modulation.1.weight', 'blocks.15.adaLN_modulation.1.bias', 'blocks.16.attn.qkv.weight', 'blocks.16.attn.qkv.bias', 'blocks.16.attn.proj.weight', 'blocks.16.attn.proj.bias', 'blocks.16.mlp.fc1.weight', 'blocks.16.mlp.fc1.bias', 'blocks.16.mlp.fc2.weight', 'blocks.16.mlp.fc2.bias', 'blocks.16.adaLN_modulation.1.weight', 'blocks.16.adaLN_modulation.1.bias', 'blocks.17.attn.qkv.weight', 'blocks.17.attn.qkv.bias', 'blocks.17.attn.proj.weight', 'blocks.17.attn.proj.bias', 'blocks.17.mlp.fc1.weight', 'blocks.17.mlp.fc1.bias', 'blocks.17.mlp.fc2.weight', 'blocks.17.mlp.fc2.bias', 'blocks.17.adaLN_modulation.1.weight', 'blocks.17.adaLN_modulation.1.bias', 'blocks.18.attn.qkv.weight', 'blocks.18.attn.qkv.bias', 'blocks.18.attn.proj.weight', 'blocks.18.attn.proj.bias', 'blocks.18.mlp.fc1.weight', 'blocks.18.mlp.fc1.bias', 'blocks.18.mlp.fc2.weight', 'blocks.18.mlp.fc2.bias', 'blocks.18.adaLN_modulation.1.weight', 'blocks.18.adaLN_modulation.1.bias', 'blocks.19.attn.qkv.weight', 'blocks.19.attn.qkv.bias', 'blocks.19.attn.proj.weight', 'blocks.19.attn.proj.bias', 'blocks.19.mlp.fc1.weight', 'blocks.19.mlp.fc1.bias', 'blocks.19.mlp.fc2.weight', 'blocks.19.mlp.fc2.bias', 'blocks.19.adaLN_modulation.1.weight', 'blocks.19.adaLN_modulation.1.bias', 'blocks.20.attn.qkv.weight', 'blocks.20.attn.qkv.bias', 'blocks.20.attn.proj.weight', 'blocks.20.attn.proj.bias', 'blocks.20.mlp.fc1.weight', 'blocks.20.mlp.fc1.bias', 'blocks.20.mlp.fc2.weight', 'blocks.20.mlp.fc2.bias', 'blocks.20.adaLN_modulation.1.weight', 'blocks.20.adaLN_modulation.1.bias', 'blocks.21.attn.qkv.weight', 'blocks.21.attn.qkv.bias', 'blocks.21.attn.proj.weight', 'blocks.21.attn.proj.bias', 'blocks.21.mlp.fc1.weight', 'blocks.21.mlp.fc1.bias', 'blocks.21.mlp.fc2.weight', 'blocks.21.mlp.fc2.bias', 'blocks.21.adaLN_modulation.1.weight', 'blocks.21.adaLN_modulation.1.bias', 'blocks.22.attn.qkv.weight', 'blocks.22.attn.qkv.bias', 'blocks.22.attn.proj.weight', 'blocks.22.attn.proj.bias', 'blocks.22.mlp.fc1.weight', 'blocks.22.mlp.fc1.bias', 'blocks.22.mlp.fc2.weight', 'blocks.22.mlp.fc2.bias', 'blocks.22.adaLN_modulation.1.weight', 'blocks.22.adaLN_modulation.1.bias', 'blocks.23.attn.qkv.weight', 'blocks.23.attn.qkv.bias', 'blocks.23.attn.proj.weight', 'blocks.23.attn.proj.bias', 'blocks.23.mlp.fc1.weight', 'blocks.23.mlp.fc1.bias', 'blocks.23.mlp.fc2.weight', 'blocks.23.mlp.fc2.bias', 'blocks.23.adaLN_modulation.1.weight', 'blocks.23.adaLN_modulation.1.bias', 'blocks.24.attn.qkv.weight', 'blocks.24.attn.qkv.bias', 'blocks.24.attn.proj.weight', 'blocks.24.attn.proj.bias', 'blocks.24.mlp.fc1.weight', 'blocks.24.mlp.fc1.bias', 'blocks.24.mlp.fc2.weight', 'blocks.24.mlp.fc2.bias', 'blocks.24.adaLN_modulation.1.weight', 'blocks.24.adaLN_modulation.1.bias', 'blocks.25.attn.qkv.weight', 'blocks.25.attn.qkv.bias', 'blocks.25.attn.proj.weight', 'blocks.25.attn.proj.bias', 'blocks.25.mlp.fc1.weight', 'blocks.25.mlp.fc1.bias', 'blocks.25.mlp.fc2.weight', 'blocks.25.mlp.fc2.bias', 'blocks.25.adaLN_modulation.1.weight', 'blocks.25.adaLN_modulation.1.bias', 'blocks.26.attn.qkv.weight', 'blocks.26.attn.qkv.bias', 'blocks.26.attn.proj.weight', 'blocks.26.attn.proj.bias', 'blocks.26.mlp.fc1.weight', 'blocks.26.mlp.fc1.bias', 'blocks.26.mlp.fc2.weight', 'blocks.26.mlp.fc2.bias', 'blocks.26.adaLN_modulation.1.weight', 'blocks.26.adaLN_modulation.1.bias', 'blocks.27.attn.qkv.weight', 'blocks.27.attn.qkv.bias', 'blocks.27.attn.proj.weight', 'blocks.27.attn.proj.bias', 'blocks.27.mlp.fc1.weight', 'blocks.27.mlp.fc1.bias', 'blocks.27.mlp.fc2.weight', 'blocks.27.mlp.fc2.bias', 'blocks.27.adaLN_modulation.1.weight', 'blocks.27.adaLN_modulation.1.bias',
'final_layer.linear.weight', 'final_layer.linear.bias', 'final_layer.adaLN_modulation.1.weight', 'final_layer.adaLN_modulation.1.bias'])
Loading...
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化