rudr4sarkar commited on
Commit
c08ad66
1 Parent(s): 9e377e7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +70 -0
app.py CHANGED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig
3
+ from peft import PeftModel
4
+ import bitsandbytes as bnb
5
+
6
+ model_name = "peterxyz/detect-llama-34b"
7
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
8
+
9
+ nf4_config = BitsAndBytesConfig(
10
+ load_in_4bit=True,
11
+ bnb_4bit_quant_type="nf4",
12
+ bnb_4bit_use_double_quant=True,
13
+ bnb_4bit_compute_dtype=torch.bfloat16
14
+ )
15
+
16
+ import gc
17
+ torch.cuda.empty_cache()
18
+ gc.collect()
19
+
20
+ model_nf4 = AutoModelForCausalLM.from_pretrained(model_name, quantization_config=nf4_config)
21
+ model = PeftModel.from_pretrained(model_nf4, model_name)
22
+
23
+ from datasets import load_dataset
24
+
25
+ dataset = load_dataset("peterxyz/smart-contract-vuln-detection")
26
+
27
+ input_text = """
28
+ pragma solidity ^0.5.0;
29
+
30
+ contract ModifierEntrancy {
31
+
32
+ mapping (address => uint) public tokenBalance;
33
+ string constant name = "Nu Token";
34
+ Bank bank;
35
+
36
+ constructor() public{
37
+ bank = new Bank();
38
+ }
39
+
40
+ //If a contract has a zero balance and supports the token give them some token
41
+ function airDrop() hasNoBalance supportsToken public{
42
+ tokenBalance[msg.sender] += 20;
43
+ }
44
+
45
+ //Checks that the contract responds the way we want
46
+ modifier supportsToken() {
47
+ require(keccak256(abi.encodePacked("Nu Token")) == bank.supportsToken());
48
+ _;
49
+ }
50
+
51
+ //Checks that the caller has a zero balance
52
+ modifier hasNoBalance {
53
+ require(tokenBalance[msg.sender] == 0);
54
+ _;
55
+ }
56
+ }
57
+
58
+ contract Bank{
59
+
60
+ function supportsToken() external returns(bytes32) {
61
+ return keccak256(abi.encodePacked("Nu Token"));
62
+ }
63
+
64
+ }
65
+
66
+ identify vulnerability of this code given above
67
+ """
68
+ inputs = tokenizer(input_text, return_tensors="pt").to("cuda")
69
+ outputs = model.generate(**inputs)
70
+ print(tokenizer.decode(outputs[0], skip_special_tokens=True))