Skip to content

Commit 35dc73a

Browse files
committed
Update Derm and CXR snippets to the latest state of feedback/discussions
1 parent fa5cead commit 35dc73a

File tree

1 file changed

+18
-24
lines changed

1 file changed

+18
-24
lines changed

packages/tasks/src/model-libraries-snippets.ts

Lines changed: 18 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -96,23 +96,26 @@ retriever = BM25HF.load_from_hub("${model.id}")`,
9696
];
9797

9898
export const cxr_foundation = (model: ModelData): string[] => [
99-
`# Install library
100-
!git clone https://github.com/Google-Health/cxr-foundation.git
101-
import tensorflow as tf, sys
99+
`!git clone https://github.com/Google-Health/cxr-foundation.git
100+
import tensorflow as tf, sys, requests
102101
sys.path.append('cxr-foundation/python/')
103102
104103
# Install dependencies
105104
major_version = tf.__version__.rsplit(".", 1)[0]
106105
!pip install tensorflow-text=={major_version} pypng && pip install --no-deps pydicom hcls_imaging_ml_toolkit retrying
107106
108-
# Run inference
107+
# Load image (Stillwaterising, CC0, via Wikimedia Commons)
109108
from PIL import Image
110-
from clientside.clients import make_hugging_face_client
109+
from io import BytesIO
110+
image_url = "https://upload.wikimedia.org/wikipedia/commons/c/c8/Chest_Xray_PA_3-8-2010.png"
111+
response = requests.get(image_url, headers={'User-Agent': 'Demo'}, stream=True)
112+
response.raw.decode_content = True # Ensure correct decoding
113+
img = Image.open(BytesIO(response.content)).convert('L') # Convert to grayscale
111114
115+
# Run inference
116+
from clientside.clients import make_hugging_face_client
112117
cxr_client = make_hugging_face_client('cxr_model')
113-
!wget -nc -q https://upload.wikimedia.org/wikipedia/commons/c/c8/Chest_Xray_PA_3-8-2010.png
114-
115-
print(cxr_client.get_image_embeddings_from_images([Image.open("Chest_Xray_PA_3-8-2010.png")]))`,
118+
print(cxr_client.get_image_embeddings_from_images([img]))`,
116119
];
117120

118121
export const depth_anything_v2 = (model: ModelData): string[] => {
@@ -189,34 +192,25 @@ focallength_px = prediction["focallength_px"]`;
189192
};
190193

191194
export const derm_foundation = (model: ModelData): string[] => [
192-
`from PIL import Image
193-
from io import BytesIO
194-
from huggingface_hub import from_pretrained_keras
195-
import tensorflow as tf
196-
import requests
195+
`from huggingface_hub import from_pretrained_keras
196+
import tensorflow as tf, requests
197197
198-
response = requests.get("https://storage.googleapis.com/dx-scin-public-data/dataset/images/3445096909671059178.png")
199-
# Load the image into a PIL Image object
200-
image = Image.open(response.raw)
201-
202-
buf = BytesIO()
203-
image.convert("RGB").save(buf, "PNG")
204-
image_bytes = buf.getvalue()
205-
# Format input
198+
# Load and format input
199+
IMAGE_URL = "https://storage.googleapis.com/dx-scin-public-data/dataset/images/3445096909671059178.png"
206200
input_tensor = tf.train.Example(
207201
features=tf.train.Features(
208202
feature={
209203
"image/encoded": tf.train.Feature(
210-
bytes_list=tf.train.BytesList(value=[image_bytes])
204+
bytes_list=tf.train.BytesList(value=[requests.get(IMAGE_URL, stream=True).content])
211205
)
212206
}
213207
)
214208
).SerializeToString()
215209
210+
# Load model and run inference
216211
loaded_model = from_pretrained_keras("google/derm-foundation")
217-
218212
infer = loaded_model.signatures["serving_default"]
219-
output = infer(inputs=tf.constant([input_tensor]))`,
213+
print(infer(inputs=tf.constant([input_tensor])))`,
220214
]
221215

222216
const diffusersDefaultPrompt = "Astronaut in a jungle, cold color palette, muted colors, detailed, 8k";

0 commit comments

Comments
 (0)