adhvaithprasad commited on
Commit
c6f40e8
·
1 Parent(s): b09c694

added customer conversation

Browse files
Files changed (3) hide show
  1. app.py +8 -2
  2. customerSupport.py +41 -0
  3. requirements.txt +1 -0
app.py CHANGED
@@ -2,6 +2,7 @@ from fastapi import FastAPI
2
  from pydantic import BaseModel
3
  from calculator import calculate
4
  from sentimentAnalysis import sentimentAnalysis
 
5
 
6
 
7
  class User_input(BaseModel):
@@ -20,14 +21,19 @@ def greet_json():
20
 
21
 
22
  @app.post("/calculate")
23
- def operate(input:User_input):
24
  res= calculate(input.operation, input.x, input.y)
25
  return res
26
 
27
  @app.post("/sentimentAnalysis")
28
- def operate(input:User_input):
29
  res= sentimentAnalysis(input.sentence)
30
  return res
31
 
 
 
 
 
 
32
 
33
 
 
2
  from pydantic import BaseModel
3
  from calculator import calculate
4
  from sentimentAnalysis import sentimentAnalysis
5
+ from customerSupport import customerConverstaion
6
 
7
 
8
  class User_input(BaseModel):
 
21
 
22
 
23
  @app.post("/calculate")
24
+ def calculate(input:User_input):
25
  res= calculate(input.operation, input.x, input.y)
26
  return res
27
 
28
  @app.post("/sentimentAnalysis")
29
+ def sentimentAnalysis(input:User_input):
30
  res= sentimentAnalysis(input.sentence)
31
  return res
32
 
33
+ @app.post("/getReply")
34
+ def getReply(input:User_input):
35
+ res= customerConverstaion(input.sentence)
36
+ return res
37
+
38
 
39
 
customerSupport.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ from transformers import AutoModelForCausalLM,GenerationConfig
3
+ from peft import AutoPeftModelForCausalLM
4
+ from peft import PeftModel, PeftConfig
5
+
6
+ def input_data_preprocessing(example):
7
+
8
+ processed_example = "<|system|>\n You are a support chatbot who helps with user queries chatbot who always responds in the style of a professional.\n<|user|>\n" + example["instruction"] + "\n<|assistant|>\n"
9
+
10
+ return processed_example
11
+
12
+
13
+
14
+ def customerConverstaion(prompt):
15
+ config = PeftConfig.from_pretrained("DSU-FDP/customer-support")
16
+ base_model = AutoModelForCausalLM.from_pretrained("TheBloke/zephyr-7B-beta-GPTQ")
17
+ model = PeftModel.from_pretrained(base_model, "DSU-FDP/customer-support")
18
+ from transformers import AutoTokenizer,GPTQConfig
19
+ tokenizer=AutoTokenizer.from_pretrained(base_model, trust_remote_code=True)
20
+ tokenizer.padding_side = 'right'
21
+ tokenizer.pad_token = tokenizer.eos_token
22
+ tokenizer.add_eos_token = True
23
+ tokenizer.add_bos_token, tokenizer.add_eos_token
24
+ tokenizer = AutoTokenizer.from_pretrained("DSU-FDP/customer-support")
25
+ input_string = input_data_preprocessing(
26
+ {
27
+ "instruction": "i have a question about cancelling order {{Order Number}}",
28
+ }
29
+ )
30
+ inputs = tokenizer(input_string, return_tensors="pt").to("cuda")
31
+ generation_config = GenerationConfig(
32
+ do_sample=True,
33
+ top_k=1,
34
+ temperature=0.1,
35
+ max_new_tokens=256,
36
+ pad_token_id=tokenizer.eos_token_id
37
+ )
38
+ outputs = model.generate(**inputs, generation_config=generation_config)
39
+ return outputs
40
+
41
+
requirements.txt CHANGED
@@ -3,3 +3,4 @@ torch
3
  transformers
4
  pydantic
5
  uvicorn[standard]
 
 
3
  transformers
4
  pydantic
5
  uvicorn[standard]
6
+ peft