Spaces:
Sleeping
Sleeping
Commit
·
9e0755a
1
Parent(s):
ebfec2a
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,153 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
import json
|
3 |
+
from peft import AutoPeftModelForCausalLM
|
4 |
+
from transformers import GenerationConfig, AutoTokenizer
|
5 |
+
import torch
|
6 |
+
import re
|
7 |
+
|
8 |
+
|
9 |
+
def process_data_sample(example):
|
10 |
+
processed_example = "You have to generate api developer documentation json object which helps the user to create api documentation. ### Instruction : " + example["Instruction"] + ". ### Response :"
|
11 |
+
return processed_example
|
12 |
+
|
13 |
+
def processing_ouput(model_response):
|
14 |
+
pattern = r"### Response :(.*)"
|
15 |
+
|
16 |
+
# Find the first match
|
17 |
+
match = re.search(pattern, model_response, re.DOTALL)
|
18 |
+
|
19 |
+
# Extract and process the match
|
20 |
+
extracted_response = ''
|
21 |
+
if match:
|
22 |
+
extracted_response = match.group(1).strip()
|
23 |
+
|
24 |
+
print(extracted_response)
|
25 |
+
|
26 |
+
original_json_str = extracted_response
|
27 |
+
original_json = json.loads(original_json_str)
|
28 |
+
|
29 |
+
# New JSON structure
|
30 |
+
new_json = {
|
31 |
+
"Name": "API Name will come here",
|
32 |
+
"Endpoint": original_json["Endpoint"],
|
33 |
+
"Method": original_json["Method"],
|
34 |
+
"Description": original_json["Description"],
|
35 |
+
"Headers": original_json["Headers"],
|
36 |
+
"Request_Body": {
|
37 |
+
# Assuming this information needs to be manually added or transformed
|
38 |
+
"ProductID": "Unique identifier of the product for which the price is to be updated.",
|
39 |
+
"NewPrice": "New price to be set for the specified product."
|
40 |
+
},
|
41 |
+
"Response_Body": original_json["Response_Body"],
|
42 |
+
"Steps_To_Use": original_json["Steps_To_Use"],
|
43 |
+
"Edge_Cases": {
|
44 |
+
# Assuming this information needs to be manually added or transformed
|
45 |
+
"Invalid_ProductID": "If the specified product ID is invalid, an error message will be returned.",
|
46 |
+
"Negative_Price": "If the new price is negative, an error message will be returned."
|
47 |
+
},
|
48 |
+
"Exceptions": original_json["Exceptions"],
|
49 |
+
"Usage_Examples": original_json["Usage_Examples"]
|
50 |
+
}
|
51 |
+
# Convert the new JSON object to a string
|
52 |
+
new_json_str = json.dumps(new_json, indent=4)
|
53 |
+
return new_json_str
|
54 |
+
|
55 |
+
|
56 |
+
|
57 |
+
def model_function(input_data):
|
58 |
+
model_response = input_data
|
59 |
+
tokenizer = AutoTokenizer.from_pretrained("Shubhang999/shu3")
|
60 |
+
|
61 |
+
inp_str = process_data_sample(
|
62 |
+
{
|
63 |
+
"Instruction": input_data,
|
64 |
+
}
|
65 |
+
)
|
66 |
+
inputs = tokenizer(inp_str, return_tensors="pt").to("cuda")
|
67 |
+
|
68 |
+
model = AutoPeftModelForCausalLM.from_pretrained(
|
69 |
+
"Shubhang999/shu3",
|
70 |
+
low_cpu_mem_usage=True,
|
71 |
+
return_dict=True,
|
72 |
+
torch_dtype=torch.float16,
|
73 |
+
device_map="cuda")
|
74 |
+
|
75 |
+
generation_config = GenerationConfig(
|
76 |
+
do_sample=True,
|
77 |
+
top_k=1,
|
78 |
+
temperature=0.1,
|
79 |
+
max_new_tokens=800,
|
80 |
+
pad_token_id=tokenizer.eos_token_id
|
81 |
+
)
|
82 |
+
|
83 |
+
outputs = model.generate(**inputs, generation_config=generation_config)
|
84 |
+
#print(tokenizer.decode(outputs[0], skip_special_tokens=True))
|
85 |
+
processed_ouput = processing_ouput(outputs)
|
86 |
+
return processed_ouput
|
87 |
+
|
88 |
+
|
89 |
+
# Streamlit UI code
|
90 |
+
def key_value_input(container, label, key, value, index):
|
91 |
+
common_headers = [
|
92 |
+
"Host", "User-Agent", "Accept", "Accept-Language",
|
93 |
+
"Accept-Encoding", "Connection", "Referer", "Cookie",
|
94 |
+
"Authorization", "Cache-Control", "Content-Type"
|
95 |
+
]
|
96 |
+
col1, col2 = container.columns(2)
|
97 |
+
|
98 |
+
# Adding a selectbox for common headers
|
99 |
+
key_input = col1.selectbox(f"{label} Key {index}", options=[''] + common_headers, index=common_headers.index(key) if key in common_headers else 0, key=f"{label}_key_{index}")
|
100 |
+
value_input = col2.text_input(f"{label} Value {index}", value, key=f"{label}_value_{index}")
|
101 |
+
|
102 |
+
return key_input, value_input
|
103 |
+
|
104 |
+
def dynamic_key_value_pairs(label):
|
105 |
+
container = st.container()
|
106 |
+
all_pairs = []
|
107 |
+
|
108 |
+
if label not in st.session_state:
|
109 |
+
st.session_state[label] = [{'key': '', 'value': ''}]
|
110 |
+
|
111 |
+
for i, pair in enumerate(st.session_state[label]):
|
112 |
+
key, value = key_value_input(container, label, pair['key'], pair['value'], i)
|
113 |
+
all_pairs.append((key, value))
|
114 |
+
|
115 |
+
if container.button(f"Add more to {label}"):
|
116 |
+
st.session_state[label].append({'key': '', 'value': ''})
|
117 |
+
|
118 |
+
return all_pairs
|
119 |
+
|
120 |
+
# Streamlit UI layout
|
121 |
+
st.title('API Documentation Generator')
|
122 |
+
|
123 |
+
# Text input for API Endpoint
|
124 |
+
api_endpoint = st.text_input("API Endpoint", "https://example.com/api")
|
125 |
+
|
126 |
+
# Dropdown for API Method
|
127 |
+
api_methods = ["GET", "POST", "PUT", "DELETE", "PATCH"]
|
128 |
+
api_method = st.selectbox("API Method", api_methods)
|
129 |
+
|
130 |
+
# Dynamic key-value pairs for Request Header, Body, and Response Object
|
131 |
+
request_header_pairs = dynamic_key_value_pairs("Request Header")
|
132 |
+
request_body_pairs = dynamic_key_value_pairs("Request Body")
|
133 |
+
response_object_pairs = dynamic_key_value_pairs("Response Object")
|
134 |
+
|
135 |
+
# Button to Generate Documentation
|
136 |
+
if st.button('Generate Documentation'):
|
137 |
+
request_header = {k: v for k, v in request_header_pairs if k and v}
|
138 |
+
request_body = {k: v for k, v in request_body_pairs if k and v}
|
139 |
+
response_object = {k: v for k, v in response_object_pairs if k and v}
|
140 |
+
|
141 |
+
user_input = {
|
142 |
+
"API_Endpoint": api_endpoint,
|
143 |
+
"API_Method": api_method,
|
144 |
+
"Request_Object": request_header,
|
145 |
+
"Response_Object": response_object
|
146 |
+
}
|
147 |
+
|
148 |
+
# Call the model function with the processed input
|
149 |
+
documentation = model_function(user_input)
|
150 |
+
|
151 |
+
# Display the model output on the UI
|
152 |
+
st.write(documentation)
|
153 |
+
|