jkorstad commited on
Commit
0f30cb8
·
verified ·
1 Parent(s): cb90215

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -29
app.py CHANGED
@@ -2,40 +2,14 @@ import os
2
  import shutil
3
  import gradio as gr
4
  from transformers import ReactCodeAgent, HfEngine, Tool
5
- import pandas as pd
 
 
6
 
7
  from gradio import Chatbot
8
  from streaming import stream_to_gradio
9
  from huggingface_hub import login
10
  from gradio.data_classes import FileData
11
- import torch
12
-
13
-
14
- # Initialize ZeroGPU
15
- if torch.cuda.is_available():
16
- torch.backends.cudnn.benchmark = True
17
- torch.backends.cudnn.enabled = True
18
- torch.backends.cudnn.allow_tf32 = True
19
-
20
- import os
21
- os.environ['WANDB_DISABLED'] = 'true'
22
- os.environ['TOKENIZERS_PARALLELIZM'] = 'false'
23
- os.environ['TRANSFORMERS_PARALLELIZM'] = 'false'
24
- os.environ['HF_REPO_REWRITE'] = 'true'
25
-
26
- from zero_gpu import ZeroGpu
27
- ZeroGpu(use_cpu=False)
28
-
29
-
30
- # Set HF_HOME
31
- os.environ['HF_HOME'] = 'C:/Users/jpkor/data/.huggingface'
32
-
33
-
34
- # Set HUGGINGFACE_HUB_CACHE
35
- os.environ['HUGGINGFACE_HUB_CACHE'] = 'C:/Users/jpkor/data/.huggingface/cache'
36
-
37
-
38
- login(os.getenv("HUGGINGFACEHUB_API_TOKEN"), add_to_git_credential=True)
39
 
40
 
41
  llm_engine = HfEngine("meta-llama/Meta-Llama-3.1-70B-Instruct")
@@ -79,6 +53,7 @@ Parent = mother, father
79
  Child = daughter, son, stepdaughter, stepson
80
  Some children travelled only with a nanny, therefore parch=0 for them."""
81
 
 
82
  def get_images_in_directory(directory):
83
  image_extensions = {'.png', '.jpg', '.jpeg', '.gif', '.bmp', '.tiff'}
84
 
@@ -89,6 +64,7 @@ def get_images_in_directory(directory):
89
  image_files.append(os.path.join(root, file))
90
  return image_files
91
 
 
92
  def interact_with_agent(file_input, additional_notes):
93
  shutil.rmtree("./figures")
94
  os.makedirs("./figures")
 
2
  import shutil
3
  import gradio as gr
4
  from transformers import ReactCodeAgent, HfEngine, Tool
5
+ import pandas as
6
+ import spaces
7
+ import torch
8
 
9
  from gradio import Chatbot
10
  from streaming import stream_to_gradio
11
  from huggingface_hub import login
12
  from gradio.data_classes import FileData
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
 
14
 
15
  llm_engine = HfEngine("meta-llama/Meta-Llama-3.1-70B-Instruct")
 
53
  Child = daughter, son, stepdaughter, stepson
54
  Some children travelled only with a nanny, therefore parch=0 for them."""
55
 
56
+ @spaces.GPU
57
  def get_images_in_directory(directory):
58
  image_extensions = {'.png', '.jpg', '.jpeg', '.gif', '.bmp', '.tiff'}
59
 
 
64
  image_files.append(os.path.join(root, file))
65
  return image_files
66
 
67
+ @spaces.GPU
68
  def interact_with_agent(file_input, additional_notes):
69
  shutil.rmtree("./figures")
70
  os.makedirs("./figures")