adhvaithprasad commited on
Commit
4ac374e
·
1 Parent(s): 383f3e0

removed last token

Browse files
Files changed (2) hide show
  1. app.py +46 -0
  2. customerSupport.py +4 -1
app.py CHANGED
@@ -25,6 +25,31 @@ def calculate_func(input:User_input):
25
  res= calculate(input.operation, input.x, input.y)
26
  return res
27
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  @app.post("/sentimentAnalysis")
29
  def sentimentAnalysis_func(input:User_input):
30
  res= sentimentAnalysis(input.sentence)
@@ -34,6 +59,27 @@ def sentimentAnalysis_func(input:User_input):
34
  def getReply_func(input:User_input):
35
  res= customerConverstaion(input.sentence)
36
  return res
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37
 
38
 
39
 
 
25
  res= calculate(input.operation, input.x, input.y)
26
  return res
27
 
28
+ import requests
29
+ # def query(API_URL, headers, payload):
30
+ # response = requests.post(API_URL, headers=headers, json=payload)
31
+ # print(response)
32
+ # return response
33
+ @app.post("/HFAPI")
34
+ def HF_API():
35
+ # API_TOKEN=""
36
+ # API_URL = "https://api-inference.huggingface.co/models/openai-community/gpt2"
37
+ # headers = {"Authorization": f"Bearer {API_TOKEN}"}
38
+
39
+ # data = query(API_URL,headers, {
40
+ # "inputs": "Can you please let us know more details about your ",
41
+ # })
42
+ API_URL = "https://api-inference.huggingface.co/models/openai-community/gpt2"
43
+ headers = {"Authorization": "Bearer ......................q"}
44
+ def query(payload):
45
+ response = requests.post(API_URL, headers=headers, json=payload)
46
+ return response.json()
47
+ output = query({
48
+ "inputs": "Can you please let us know more details about India? ",
49
+ })
50
+ return output[0]["generated_text"]
51
+
52
+
53
  @app.post("/sentimentAnalysis")
54
  def sentimentAnalysis_func(input:User_input):
55
  res= sentimentAnalysis(input.sentence)
 
59
  def getReply_func(input:User_input):
60
  res= customerConverstaion(input.sentence)
61
  return res
62
+ @app.post("/hf_spaces")
63
+ def HF_interact():
64
+ from huggingface_hub import HfApi
65
+ # Initialize API client
66
+ api = HfApi()
67
+
68
+ # Replace these with your values
69
+ repo_id = 'DSU-FDP/Sample-API'
70
+ token = ''
71
+
72
+ # Authenticate
73
+
74
+ api.pause_space(repo_id=repo_id)
75
+
76
+
77
+ # List all Spaces (not pausing, just showing how to interact)
78
+ spaces = api.list_spaces()
79
+ print(spaces)
80
+
81
+ # Example action: delete a space (be cautious with this!)
82
+ # api.delete_repo(repo_id, token=token)
83
 
84
 
85
 
customerSupport.py CHANGED
@@ -12,8 +12,11 @@ def input_data_preprocessing(example):
12
 
13
 
14
  def customerConverstaion(prompt):
 
 
 
15
  config = PeftConfig.from_pretrained("DSU-FDP/customer-support")
16
- base_model = AutoModelForCausalLM.from_pretrained("TheBloke/zephyr-7B-beta-GPTQ")
17
  model = PeftModel.from_pretrained(base_model, "DSU-FDP/customer-support")
18
  from transformers import AutoTokenizer,GPTQConfig
19
  tokenizer=AutoTokenizer.from_pretrained(base_model, trust_remote_code=True)
 
12
 
13
 
14
  def customerConverstaion(prompt):
15
+ # Check GPU availability
16
+ print("Available GPU devices:", torch.cuda.device_count())
17
+ print("Name of the first available GPU:", torch.cuda.get_device_name(0))
18
  config = PeftConfig.from_pretrained("DSU-FDP/customer-support")
19
+ base_model = AutoModelForCausalLM.from_pretrained("TheBloke/zephyr-7B-beta-GPTQ", device_map='cuda')
20
  model = PeftModel.from_pretrained(base_model, "DSU-FDP/customer-support")
21
  from transformers import AutoTokenizer,GPTQConfig
22
  tokenizer=AutoTokenizer.from_pretrained(base_model, trust_remote_code=True)