guoday commited on
Commit
1aa20ce
1 Parent(s): 1c90537

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +1 -19
README.md CHANGED
@@ -40,21 +40,6 @@ inputs = tokenizer(input_text, return_tensors="pt").cuda()
40
  outputs = model.generate(**inputs, max_length=128)
41
  print(tokenizer.decode(outputs[0], skip_special_tokens=True))
42
  ```
43
- This code will output the following result:
44
- ```
45
- def quick_sort(arr):
46
- if len(arr) <= 1:
47
- return arr
48
- pivot = arr[0]
49
- left = []
50
- right = []
51
- for i in range(1, len(arr)):
52
- if arr[i] < pivot:
53
- left.append(arr[i])
54
- else:
55
- right.append(arr[i])
56
- return quick_sort(left) + [pivot] + quick_sort(right)
57
- ```
58
 
59
  #### 2)Code Insertion
60
  ```python
@@ -78,10 +63,7 @@ inputs = tokenizer(input_text, return_tensors="pt").cuda()
78
  outputs = model.generate(**inputs, max_length=128)
79
  print(tokenizer.decode(outputs[0], skip_special_tokens=True)[len(input_text):])
80
  ```
81
- This code will output the following result:
82
- ```
83
- for i in range(1, len(arr)):
84
- ```
85
  #### 3)Repository Level Code Completion
86
  ```python
87
  from transformers import AutoTokenizer, AutoModelForCausalLM
 
40
  outputs = model.generate(**inputs, max_length=128)
41
  print(tokenizer.decode(outputs[0], skip_special_tokens=True))
42
  ```
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
 
44
  #### 2)Code Insertion
45
  ```python
 
63
  outputs = model.generate(**inputs, max_length=128)
64
  print(tokenizer.decode(outputs[0], skip_special_tokens=True)[len(input_text):])
65
  ```
66
+
 
 
 
67
  #### 3)Repository Level Code Completion
68
  ```python
69
  from transformers import AutoTokenizer, AutoModelForCausalLM