zhjohnchan commited on
Commit
4331d41
1 Parent(s): d5bf817

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +61 -3
README.md CHANGED
@@ -1,3 +1,61 @@
1
- ---
2
- license: apache-2.0
3
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!-- markdownlint-disable first-line-h1 -->
2
+ <!-- markdownlint-disable html -->
3
+
4
+ <div align="center">
5
+ <h1>
6
+ CheXagent
7
+ </h1>
8
+ </div>
9
+
10
+ <p align="center">
11
+ 📝 <a href="https://arxiv.org/" target="_blank">Paper</a> • 🤗 <a href="https://huggingface.co/StanfordAIMI/CheXagent-8b/" target="_blank">Hugging Face</a> • 🧩 <a href="https://github.com/Stanford-AIMI/CheXagent" target="_blank">Project</a>
12
+ </p>
13
+
14
+ <div align="center">
15
+ </div>
16
+
17
+ ## ✨ Latest News
18
+
19
+ - [12/15/2023]: Model released in [Hugging Face](https://huggingface.co/StanfordAIMI/CheXagent-8b/).
20
+
21
+ ## 🎬 Get Started
22
+
23
+ ```python
24
+ import io
25
+
26
+ import requests
27
+ import torch
28
+ from PIL import Image
29
+ from transformers import AutoModelForCausalLM, AutoProcessor, GenerationConfig
30
+
31
+ # step 1: Setup constant
32
+ device = "cuda"
33
+ dtype = torch.float16
34
+
35
+ # step 2: Load Processor and Model
36
+ processor = AutoProcessor.from_pretrained("StanfordAIMI/CheXagent-8b", trust_remote_code=True)
37
+ generation_config = GenerationConfig.from_pretrained("StanfordAIMI/CheXagent-8b")
38
+ model = AutoModelForCausalLM.from_pretrained("StanfordAIMI/CheXagent-8b", torch_dtype=dtype, trust_remote_code=True)
39
+
40
+ # step 3: Fetch the images
41
+ image_path = "https://upload.wikimedia.org/wikipedia/commons/3/3b/Pleural_effusion-Metastatic_breast_carcinoma_Case_166_%285477628658%29.jpg"
42
+ images = [Image.open(io.BytesIO(requests.get(image_path).content)).convert("RGB")]
43
+
44
+ # step 4: Generate the Findings section
45
+ prompt = f'Describe "Airway"'
46
+ inputs = processor(images=images, text=f" USER: <s>{prompt} ASSISTANT: <s>", return_tensors="pt").to(device=device, dtype=dtype)
47
+ output = model.generate(**inputs, generation_config=generation_config)[0]
48
+ response = processor.tokenizer.decode(output, skip_special_tokens=True)
49
+ ```
50
+
51
+ ## ✏️ Citation
52
+
53
+ ```
54
+ @article{chexagent-2024,
55
+ title={CheXagent: Towards a Foundation Model for Chest X-Ray Interpretation},
56
+ author={Chen, Zhihong and Varma, Maya and Delbrouck, Jean-Benoit and Paschali, Magdalini and Blankemeier, Louis and Veen, Dave Van and Valanarasu, Jeya Maria Jose and Youssef, Alaa and Cohen, Joseph Paul and Reis, Eduardo Pontes and Tsai, Emily B. and Johnston, Andrew and Olsen, Cameron and Abraham, Tanishq Mathew and Gatidis, Sergios and Chaudhari, Akshay S and Langlotz, Curtis},
57
+ journal={arXiv preprint arXiv:xxxx.xxxxx},
58
+ url={https://arxiv.org/abs/xxxx.xxxxx},
59
+ year={2024}
60
+ }
61
+ ```