Spaces:
Sleeping
Sleeping
okeowo1014
commited on
Commit
•
d76b25e
1
Parent(s):
fe64ea7
Update prd.py
Browse files
prd.py
CHANGED
@@ -1,12 +1,15 @@
|
|
1 |
from transformers import TFAutoModel, AutoTokenizer
|
|
|
|
|
|
|
2 |
|
3 |
# Load the model from the Hugging Face Model Hub
|
4 |
-
model = TFAutoModel.from_pretrained('okeowo1014/catsandogs')
|
5 |
|
6 |
# Now you can use the loaded model for inference or fine-tuning
|
7 |
# Example prediction
|
8 |
new_texts = ["I'm feeling great!", "This book is boring."]
|
9 |
-
tokenizer=tokenizer = AutoTokenizer.from_pretrained('okeowo1014/
|
10 |
sequences = tokenizer.texts_to_sequences(new_texts)
|
11 |
padded_sequences = pad_sequences(sequences, maxlen=10, padding='post', truncating='post')
|
12 |
predictions = model.predict(padded_sequences)
|
|
|
1 |
from transformers import TFAutoModel, AutoTokenizer
|
2 |
+
from huggingface_hub import from_pretrained_keras
|
3 |
+
|
4 |
+
model = from_pretrained_keras("okeowo1014/kerascatanddog")
|
5 |
|
6 |
# Load the model from the Hugging Face Model Hub
|
7 |
+
# model = TFAutoModel.from_pretrained('okeowo1014/catsandogs')
|
8 |
|
9 |
# Now you can use the loaded model for inference or fine-tuning
|
10 |
# Example prediction
|
11 |
new_texts = ["I'm feeling great!", "This book is boring."]
|
12 |
+
tokenizer=tokenizer = AutoTokenizer.from_pretrained('okeowo1014/kerascatanddog')
|
13 |
sequences = tokenizer.texts_to_sequences(new_texts)
|
14 |
padded_sequences = pad_sequences(sequences, maxlen=10, padding='post', truncating='post')
|
15 |
predictions = model.predict(padded_sequences)
|