File size: 897 Bytes
845c287
b464613
 
2d23981
845c287
 
 
 
b464613
 
2d23981
b464613
845c287
 
 
 
 
 
 
 
 
 
2d23981
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
from typing import Dict, List, Any
# from transformers import GPT2Tokenizer
# from model import GPT
import pipeline

class EndpointHandler():
    def __init__(self, path=""):
        # Preload all the elements you are going to need at inference.
        # model = GPT.from_pretrained(path)
        # tokenizer = GPT2Tokenizer.from_pretrained('gpt2')
        # self.pipeline = pipeline("text-generation", model=model, tokenizer=tokenizer)
        a = 1

    def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
        """
       data args:
            inputs (:obj: `str` | `PIL.Image` | `np.array`)
            kwargs
      Return:
            A :obj:`list` | `dict`: will be serialized and returned
        """
        inputs = data.pop("inputs", data)
        pipeline.start = inputs
        output = pipeline.infer()
        # isinstance(output,str)
        return {"Ans": output}