carlesonielfa commited on
Commit
50589b7
1 Parent(s): 192d6ae

Update to Sentis 2.1.1

Browse files
Files changed (2) hide show
  1. README.md +2 -3
  2. RunPhi15.cs +20 -20
README.md CHANGED
@@ -5,8 +5,7 @@ pipeline_tag: text-generation
5
  ---
6
 
7
 
8
- # Phi 1.5 Model in Unity Sentis (Version 1.5.0-pre.2)
9
- *Version 1.3.0 Sentis files are not compatible with Sentis 1.5.0 and need to be recreated/downloaded
10
 
11
  This is the [Microsoft Phi 1.5](https://huggingface.co/microsoft/phi-1_5) model checked to run on Unity 2023. Phi 1.5 is a Large Language Model that was trained on synthesized data. Please see their page for more information about the model and license.
12
  The model has 1.3 billion parameters.
@@ -14,7 +13,7 @@ The model has 1.3 billion parameters.
14
 
15
  ## How to Use
16
  * Create a new scene in Unity 2023
17
- * Install `com.unity.sentis` version `1.5.0-pre.2` and `com.unity.nuget.newtonsoft-json` packages
18
  * Add the RunPhi15.cs file to the Main Camera
19
  * Put `phi15.sentis`, `vocab.json` and `merges.txt` in the Assets/StreamingAssets folder
20
  * Adjust some of the variables such as the `outputText` string to set the prompt
 
5
  ---
6
 
7
 
8
+ # Phi 1.5 Model in Unity Sentis (Version 2.1.1)
 
9
 
10
  This is the [Microsoft Phi 1.5](https://huggingface.co/microsoft/phi-1_5) model checked to run on Unity 2023. Phi 1.5 is a Large Language Model that was trained on synthesized data. Please see their page for more information about the model and license.
11
  The model has 1.3 billion parameters.
 
13
 
14
  ## How to Use
15
  * Create a new scene in Unity 2023
16
+ * Install `com.unity.sentis` version `2.1.1` and `com.unity.nuget.newtonsoft-json` packages
17
  * Add the RunPhi15.cs file to the Main Camera
18
  * Put `phi15.sentis`, `vocab.json` and `merges.txt` in the Assets/StreamingAssets folder
19
  * Adjust some of the variables such as the `outputText` string to set the prompt
RunPhi15.cs CHANGED
@@ -45,7 +45,7 @@ public class RunPhi15: MonoBehaviour
45
  //Store the vocabulary
46
  string[] tokens;
47
 
48
- IWorker engine;
49
 
50
  int currentToken = 0;
51
  int[] outputTokens = new int[maxTokens];
@@ -76,16 +76,14 @@ public class RunPhi15: MonoBehaviour
76
  int outputIndex = model1.outputs.Count - 1;
77
  //var model1 = ModelLoader.Load(asset);
78
  //Create a new model to select the random token:
79
- var model2 = FF.Compile(
80
- (input, currentToken) =>
81
- {
82
- var row = FF.Select(model1.Forward(input)[outputIndex], 1, currentToken);
83
- return FF.Multinomial(predictability * row, 1);
84
- },
85
- (model1.inputs[0], InputDef.Int(new TensorShape()))
86
- );
87
 
88
- engine = WorkerFactory.CreateWorker(backend, model2);
89
 
90
  DecodePrompt(outputString);
91
 
@@ -103,17 +101,19 @@ public class RunPhi15: MonoBehaviour
103
 
104
  void RunInference()
105
  {
106
- using var tokensSoFar = new TensorInt(new TensorShape(1, maxTokens), outputTokens);
107
- using var index = new TensorInt(currentToken);
108
-
109
- engine.Execute(new Dictionary<string, Tensor> { {"input_0", tokensSoFar }, { "input_1", index }});
110
-
111
- var probs = engine.PeekOutput() as TensorInt;
 
 
112
  //Debug.Log(probs.shape);
113
 
114
- probs.CompleteOperationsAndDownload();
115
-
116
- int ID = probs[0];
117
 
118
  //shift window down if got to the end
119
  if (currentToken >= maxTokens - 1)
@@ -266,4 +266,4 @@ public class RunPhi15: MonoBehaviour
266
  engine?.Dispose();
267
  }
268
 
269
- }
 
45
  //Store the vocabulary
46
  string[] tokens;
47
 
48
+ Worker engine;
49
 
50
  int currentToken = 0;
51
  int[] outputTokens = new int[maxTokens];
 
76
  int outputIndex = model1.outputs.Count - 1;
77
  //var model1 = ModelLoader.Load(asset);
78
  //Create a new model to select the random token:
79
+ FunctionalGraph graph = new FunctionalGraph();
80
+ FunctionalTensor input_0 = graph.AddInput<int>(new TensorShape(1, maxTokens));
81
+ FunctionalTensor input_1 = graph.AddInput<int>(new TensorShape(1));
82
+ FunctionalTensor row = Functional.Select(Functional.Forward(model1, input_0)[outputIndex], 1, input_1);
83
+ FunctionalTensor output = Functional.Multinomial(predictability * row, 1);
84
+ Model model2 = graph.Compile(output);
 
 
85
 
86
+ engine = new Worker(model2, backend);
87
 
88
  DecodePrompt(outputString);
89
 
 
101
 
102
  void RunInference()
103
  {
104
+ using var tokensSoFar = new Tensor<int>(new TensorShape(1, maxTokens), outputTokens);
105
+ using var index = new Tensor<int>(new TensorShape(1));
106
+ index[0] = currentToken;
107
+ engine.SetInput("input_0", tokensSoFar);
108
+ engine.SetInput("input_1", index);
109
+ engine.Schedule();
110
+
111
+ var probs = engine.PeekOutput() as Tensor<int>;
112
  //Debug.Log(probs.shape);
113
 
114
+ probs.CompleteAllPendingOperations();
115
+ var result = probs.ReadbackAndClone();
116
+ int ID = result[0];
117
 
118
  //shift window down if got to the end
119
  if (currentToken >= maxTokens - 1)
 
266
  engine?.Dispose();
267
  }
268
 
269
+ }