AshenClock commited on
Commit
4c52594
·
verified ·
1 Parent(s): 21119e3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +35 -26
app.py CHANGED
@@ -3,6 +3,11 @@ from fastapi import FastAPI, HTTPException
3
  from huggingface_hub import InferenceClient
4
  from rdflib import Graph
5
  from pydantic import BaseModel
 
 
 
 
 
6
 
7
  # Configurazione API Hugging Face
8
  API_KEY = os.getenv("HF_API_KEY")
@@ -14,36 +19,39 @@ RDF_FILE = "Ontologia.rdf"
14
  # Carica un riassunto del file RDF
15
  def load_rdf_summary():
16
  if os.path.exists(RDF_FILE):
17
- g = Graph()
18
- g.parse(RDF_FILE, format="xml")
19
-
20
- classes = set()
21
- properties = set()
22
-
23
- for s, _, o in g.triples((None, None, None)):
24
- if "Class" in str(o) or "rdfs:Class" in str(o):
25
- classes.add(s)
26
- if "Property" in str(o):
27
- properties.add(s)
28
-
29
- classes_summary = "\n".join([f"- Classe: {cls}" for cls in classes])
30
- properties_summary = "\n".join([f"- Proprietà: {prop}" for prop in properties])
31
- return f"Classi:\n{classes_summary}\n\nProprietà:\n{properties_summary}"
 
 
 
 
32
  return "Nessun dato RDF trovato."
33
 
34
  rdf_context = load_rdf_summary()
35
- print("RDF Summary:", rdf_context) # Debug
36
 
37
  # Valida le query SPARQL
38
  def validate_sparql_query(query, rdf_file_path):
39
  try:
40
  g = Graph()
41
- # Caricamento del file RDF dal percorso
42
  g.parse(rdf_file_path, format="xml")
43
  g.query(query) # Prova ad eseguire la query
44
  return True
45
  except Exception as e:
46
- print(f"Errore durante la validazione della query SPARQL: {e}")
47
  return False
48
 
49
  # FastAPI app
@@ -70,8 +78,8 @@ Il tuo compito:
70
 
71
  async def generate_response(message, max_tokens, temperature):
72
  system_message = create_system_message(rdf_context)
73
- print("System Message:", system_message) # Debug
74
- print("User Message:", message) # Debug
75
 
76
  messages = [
77
  {"role": "system", "content": system_message},
@@ -85,20 +93,21 @@ async def generate_response(message, max_tokens, temperature):
85
  temperature=temperature,
86
  max_tokens=max_tokens,
87
  top_p=0.7,
88
- stream=False
 
89
  )
90
- print("Raw Response:", response) # Debug risposta grezza
91
  return response['choices'][0]['message']['content'].replace("\n", " ").strip()
92
  except Exception as e:
93
- print(f"Errore nell'elaborazione: {str(e)}")
94
  raise HTTPException(status_code=500, detail=f"Errore nell'elaborazione: {str(e)}")
95
 
96
  # Endpoint per generare query SPARQL
97
  @app.post("/generate-query/")
98
  async def generate_query(request: QueryRequest):
99
  response = await generate_response(request.message, request.max_tokens, request.temperature)
100
- print("Risposta generata dal modello:", response) # Debug
101
-
102
  if not (response.startswith("SELECT") or response.startswith("ASK")):
103
  return {
104
  "query": None,
@@ -116,4 +125,4 @@ async def generate_query(request: QueryRequest):
116
  # Endpoint di test
117
  @app.get("/")
118
  async def root():
119
- return {"message": "Il server è attivo e pronto a generare query SPARQL!"}
 
3
  from huggingface_hub import InferenceClient
4
  from rdflib import Graph
5
  from pydantic import BaseModel
6
+ import logging
7
+
8
+ # Configurazione logging
9
+ logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
10
+ logger = logging.getLogger(__name__)
11
 
12
  # Configurazione API Hugging Face
13
  API_KEY = os.getenv("HF_API_KEY")
 
19
  # Carica un riassunto del file RDF
20
  def load_rdf_summary():
21
  if os.path.exists(RDF_FILE):
22
+ try:
23
+ g = Graph()
24
+ g.parse(RDF_FILE, format="xml")
25
+
26
+ classes = set()
27
+ properties = set()
28
+
29
+ for s, _, o in g.triples((None, None, None)):
30
+ if "Class" in str(o) or "rdfs:Class" in str(o):
31
+ classes.add(s)
32
+ if "Property" in str(o):
33
+ properties.add(s)
34
+
35
+ classes_summary = "\n".join([f"- Classe: {cls}" for cls in classes])
36
+ properties_summary = "\n".join([f"- Proprietà: {prop}" for prop in properties])
37
+ return f"Classi:\n{classes_summary}\n\nProprietà:\n{properties_summary}"
38
+ except Exception as e:
39
+ logger.error(f"Errore durante il parsing del file RDF: {e}")
40
+ return "Errore nel caricamento del file RDF."
41
  return "Nessun dato RDF trovato."
42
 
43
  rdf_context = load_rdf_summary()
44
+ logger.info("RDF Summary: %s", rdf_context)
45
 
46
  # Valida le query SPARQL
47
  def validate_sparql_query(query, rdf_file_path):
48
  try:
49
  g = Graph()
 
50
  g.parse(rdf_file_path, format="xml")
51
  g.query(query) # Prova ad eseguire la query
52
  return True
53
  except Exception as e:
54
+ logger.error(f"Errore durante la validazione della query SPARQL: {e}")
55
  return False
56
 
57
  # FastAPI app
 
78
 
79
  async def generate_response(message, max_tokens, temperature):
80
  system_message = create_system_message(rdf_context)
81
+ logger.debug("System Message: %s", system_message)
82
+ logger.info("User Message: %s", message)
83
 
84
  messages = [
85
  {"role": "system", "content": system_message},
 
93
  temperature=temperature,
94
  max_tokens=max_tokens,
95
  top_p=0.7,
96
+ stream=False,
97
+ timeout=60 # Aumenta il timeout
98
  )
99
+ logger.info("Raw Response: %s", response)
100
  return response['choices'][0]['message']['content'].replace("\n", " ").strip()
101
  except Exception as e:
102
+ logger.error(f"Errore nell'elaborazione: {str(e)}")
103
  raise HTTPException(status_code=500, detail=f"Errore nell'elaborazione: {str(e)}")
104
 
105
  # Endpoint per generare query SPARQL
106
  @app.post("/generate-query/")
107
  async def generate_query(request: QueryRequest):
108
  response = await generate_response(request.message, request.max_tokens, request.temperature)
109
+ logger.info("Risposta generata dal modello: %s", response)
110
+
111
  if not (response.startswith("SELECT") or response.startswith("ASK")):
112
  return {
113
  "query": None,
 
125
  # Endpoint di test
126
  @app.get("/")
127
  async def root():
128
+ return {"message": "Il server è attivo e pronto a generare query SPARQL!"}