Mauro24 commited on
Commit
946662e
·
verified ·
1 Parent(s): 82e46e0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +125 -11
app.py CHANGED
@@ -1,13 +1,127 @@
1
 
2
- # Use a pipeline as a high-level helper
3
- from transformers import pipeline
4
- testo= """6.2_ KNIFE CHANGE When the software indicates a knife change (see figure 63), follow these steps: 1. Press the emergency button to turn off the motors. 2. Press the cover lift button on the cutting head (figure 63b). 3. Rotate the plate to access the two screws comfortably that secure the knife to the knife holder. 4. Use a 2.5 mm hex key to remove the 2 screws from the knife holder (fig. 64) by turning counterclock‐ wise. Figure 63—knife change notice Figure 63b —buttons that raise and lower the cover Cutting head cover opening button 6 — 63 NOTICE The knife must pass through all its guides. Repeat the insertion process into the guide sleeve if the knife appears to be forced or is not aligned correctly. 5. Detach the knife from the knife holder (fig. 65) by moving the knife downward. 6. Rotate the plate to a position that makes it easy to remove the knife: place the cutting edge towards the beam. 7. Pull the knife upwards from the guide sleeve, being careful not to cut yourself (fig. 66). The knife is sharp! WARNING The knife is sharp: exercise extreme caution and avoid touching the sharp edge. It is recom‐ mended to use specialized gloves for maintenance operations. NOTICE The knife cannot be secured if inserted the wrong way, as the screw holes would not align. Pay attention to the insertion direction of the knife: the noncutting edge must face towards the plate support. 8. Insert the new knife from the top to the bottom, ensuring that you align the guides located inside the guide sleeve and on the plate support. 9. Position the knife so that the holes on its upper part align with the holes on the knife holder. 10. Tighten the two securing screws using the 2.5 mm hex key. Figure 65—knife removal Figure 66—knife extraction 6 — 64 11. Lower the cutting head cover by using the dedicated button on the operator panel and release the pre‐ viously pressed emergency button. 12. Start the machine's motors using the appropriate switch on the computer console (see 4.2). 13. Open the VCUT program and navigate to the PARAMETERSKNIFE section. Click on the "NEW KNIFE" button to restore the values related to the knife size and consumption (figure 67). NOTICE Failure to confirm the knife change in the VCUT program can lead to inaccuracies in cutting. The knife dimensions entered in the VCUT program must always match the actual knife dimensions. PARAMETERS button KNIFE button Figure 67—setting “new knife ” in the VCUT software 6 — 65
5
- """
6
- query = "how to change the knife ?"
7
- messages=[
8
-
9
- {"role": "system", "content": "Sei un esperto tecnico specializzato in macchine da taglio. Rispondi in italiano alla domanda con chiarezza, fornendo dettagli tecnici accurati e, se possibile, esempi pratici. Usa un tono professionale ma accessibile."},
10
- {"role": "user", "content": f"Testo: '{testo}'\n\nDomanda: {query}"},
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  ]
12
- pipe = pipeline("text-generation", model="deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B")
13
- pipe(messages)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
 
2
+
3
+
4
+
5
+ client = OpenAI(api_key=api_key)
6
+ # Configura MarianMT per la traduzione multilingua
7
+ model_name = "Helsinki-NLP/opus-mt-it-en"
8
+ translator_tokenizer = MarianTokenizer.from_pretrained(model_name)
9
+ translator_model = MarianMTModel.from_pretrained(model_name)
10
+
11
+ # Percorsi dei file
12
+ faiss_index_path = "manual_embeddings.index" # Percorso del file FAISS
13
+ chunk_file_path = "manual_chunks.txt" # Percorso del file con i chunk
14
+
15
+ # Carica l'indice FAISS
16
+ index = faiss.read_index(faiss_index_path)
17
+
18
+ # Carica i chunk dal file
19
+ with open(chunk_file_path, "r", encoding="utf-8") as f:
20
+ chunks = f.read().split("\n\n") # Ogni chunk è separato da due newline
21
+
22
+ # Funzione per tradurre testo
23
+ def translate_text(text, source_lang="it", target_lang="en"):
24
+
25
+ if source_lang == target_lang:
26
+ return text # Nessuna traduzione necessaria
27
+
28
+ model_name = f"Helsinki-NLP/opus-mt-{source_lang}-{target_lang}"
29
+ tokenizer = MarianTokenizer.from_pretrained(model_name)
30
+ model = MarianMTModel.from_pretrained(model_name)
31
+ inputs = tokenizer(text, return_tensors="pt", padding=True)
32
+ translated = model.generate(**inputs)
33
+ return tokenizer.decode(translated[0], skip_special_tokens=True)
34
+
35
+ # Funzione per generare l'embedding della query usando OpenAI
36
+ def get_query_embedding(query, model="text-embedding-ada-002"):
37
+ response = client.embeddings.create(model=model, input=[query])
38
+ embedding = response.data[0].embedding
39
+ return np.array(embedding) # Restituisce l'embedding come array NumPy
40
+
41
+
42
+ # Funzione per cercare nel database FAISS
43
+ def search_in_faiss(query_embedding, k=3):
44
+ query_embedding = np.expand_dims(query_embedding, axis=0) # Assicura una matrice 2D
45
+ distances, indices = index.search(query_embedding, k)
46
+ return distances, indices
47
+
48
+ # Funzione per recuperare i chunk associati agli indici di FAISS
49
+ def get_chunks_from_indices(indices):
50
+ retrieved_chunks = []
51
+ for i in indices[0]:
52
+ if 0 <= i < len(chunks):
53
+ retrieved_chunks.append(chunks[i])
54
+ else:
55
+ retrieved_chunks.append(f"[Chunk non trovato per indice {i}]")
56
+ return retrieved_chunks
57
+
58
+ # Funzione per rispondere alla domanda usando GPT-3
59
+ def generate_response(query, context, max_tokens=550):
60
+ try:
61
+ response = client.chat.completions.create(
62
+ model="gpt-3.5-turbo",
63
+ messages=[
64
+ {"role": "system", "content": (
65
+ "Sei un tecnico specializzato in macchine tessili da taglio. Rispondi in italiano alla domanda in modo chiaro, "
66
+ "usando un tono professionale ma accessibile. Se il testo non contiene informazioni sufficienti per rispondere, dillo in modo esplicito."
67
+ )},
68
+ {"role": "user", "content": (
69
+ f"Domanda: {query}\n"
70
+ f"Testo: {context}\n"
71
+ "Rispondi in modo chiaro e operativo per un tecnico che deve svolgere la mansione."
72
+ )}
73
+ ],
74
+ max_tokens=max_tokens,
75
+ temperature=0.5,
76
+ )
77
+ return response.choices[0].message.content
78
+ except Exception as e:
79
+ print(f"Errore durante la generazione della risposta: {e}")
80
+ return "Errore nell'elaborazione della risposta."
81
+
82
+ # Funzione principale per ricerca e risposta
83
+ def search_and_answer(query, source_lang="it", target_lang="en"):
84
+ # Traduci la query in inglese
85
+ translated_query = translate_text(query, source_lang, target_lang)
86
+
87
+ # Ottieni l'embedding della query
88
+ query_embedding = get_query_embedding(translated_query)
89
+
90
+ # Esegui la ricerca in FAISS
91
+ distances, indices = search_in_faiss(query_embedding)
92
+
93
+ # Recupera i chunk associati agli indici
94
+ retrieved_chunks = get_chunks_from_indices(indices)
95
+ context = "\n".join(retrieved_chunks)
96
+
97
+ # Ottieni la risposta da GPT-3
98
+ response = generate_response(query, context)
99
+
100
+ # Traduci la risposta nella lingua originale
101
+ #translated_response = translate_text(response, target_lang, source_lang)
102
+
103
+ return context, response #translated_response
104
+
105
+ # Interfaccia Gradio
106
+ examples = [
107
+ ["Come cambio la lama della macchina?"],
108
+ ["Quali sono le precauzioni di sicurezza per usare la macchina?"],
109
+ ["Chi contattare in caso di problemi?"]
110
  ]
111
+ iface = gr.Interface(
112
+ fn=search_and_answer,
113
+ inputs=gr.Textbox(lines=2, placeholder="Inserisci la tua domanda qui..."),
114
+ outputs=[
115
+ gr.Textbox(label="Risultati della Ricerca"),
116
+ gr.Textbox(label="Risposta Generata")
117
+ ],
118
+ examples=examples,
119
+ title="Sistema RAG con Traduzione Multilingua",
120
+ description=(
121
+ "Inserisci una domanda in qualsiasi lingua supportata. Il sistema tradurrà la query, "
122
+ "eseguirà la ricerca e genererà una risposta nella lingua originale."
123
+ )
124
+ )
125
+
126
+ # Avvia l'app
127
+ iface.launch()