hipnologo commited on
Commit
114da3f
1 Parent(s): 981d443

Upload RWForCausalLM

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/content/falcon-7b-instruct-sharded",
3
  "alibi": false,
4
  "apply_residual_connection_post_layernorm": false,
5
  "architectures": [
 
1
  {
2
+ "_name_or_path": "/content/falcon-7b-instruct-qlora-truthful-qa",
3
  "alibi": false,
4
  "apply_residual_connection_post_layernorm": false,
5
  "architectures": [
pytorch_model-00001-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4f3c974769f975ed5334d8b77f9f73c7ccd5a51e608409fa007395ff288de59b
3
  size 9951028257
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:be36cfc58e42e1fb024ab36b8d227f2f33b5aaa651184f283db79d8cd55ef10f
3
  size 9951028257
pytorch_model-00002-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:55efa252197f18634844ff1e3c68c9769ad34bdc699b70553556ce7c6e1a2b13
3
  size 3892483153
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c571e4741a0839a57598f9ac011acdd8ff21a5dd3f21b2dba08e414957fc6612
3
  size 3892483153