haotiz's picture
initial commit
708dec4
raw
history blame contribute delete
308 Bytes
from maskrcnn_benchmark.modeling.language_backbone import build_tokenizer
if __name__ == '__main__':
tokenizer2 = build_tokenizer("clip")
tokenized2 = tokenizer2(
["Detectest : fishid. jellyfishioasod. penguinasd. puffin.asd shark. starfish. round stingray"])
print(tokenized2)