manoskary commited on
Commit
7310fa3
·
1 Parent(s): 34fa2b0

update code

Browse files
Files changed (3) hide show
  1. README.md +3 -1
  2. app.py +18 -2
  3. requirements.txt +4 -1
README.md CHANGED
@@ -8,6 +8,9 @@ sdk_version: 5.49.1
8
  python_version: 3.11
9
  app_file: app.py
10
  pinned: false
 
 
 
11
  license: mit
12
  short_description: Explain Music Scores through analysis.
13
  ---
@@ -21,4 +24,3 @@ Scoreprompts is a tool that leverages large language models to analyze and expla
21
  - Upload music scores in various formats.
22
  - Utilize large language models to generate explanations.
23
  - User-friendly interface for easy interaction.
24
-
 
8
  python_version: 3.11
9
  app_file: app.py
10
  pinned: false
11
+ hf_oauth: true
12
+ hf_oauth_scopes:
13
+ - inference-api
14
  license: mit
15
  short_description: Explain Music Scores through analysis.
16
  ---
 
24
  - Upload music scores in various formats.
25
  - Utilize large language models to generate explanations.
26
  - User-friendly interface for easy interaction.
 
app.py CHANGED
@@ -100,6 +100,17 @@ def _extract_first_json(text: str) -> dict:
100
  return obj
101
 
102
 
 
 
 
 
 
 
 
 
 
 
 
103
  def _chat_json(
104
  client: InferenceClient,
105
  system_prompt: str,
@@ -265,6 +276,7 @@ def run_pipeline(
265
  max_tokens_writer: int,
266
  temperature_writer: float,
267
  force_cpu: bool,
 
268
  ):
269
  logs: list[str] = []
270
 
@@ -276,6 +288,7 @@ def run_pipeline(
276
  log_fn("Starting run")
277
  if not file_path:
278
  raise ValueError("No file uploaded.")
 
279
  score_path = Path(file_path)
280
  score_id = score_path.stem
281
 
@@ -500,7 +513,7 @@ def run_pipeline(
500
  chunk = bundles[0]
501
  log_fn(f"Schema chunk measures={chunk['chunk']['measure_start']}..{chunk['chunk']['measure_end']}")
502
 
503
- client = InferenceClient(model_id)
504
  t0 = time.time()
505
  bar_facts = _chat_json(
506
  client,
@@ -564,6 +577,9 @@ def run_pipeline(
564
  with gr.Blocks(title="ScorePrompts Prompt Builder") as demo:
565
  gr.Markdown("# ScorePrompts Prompt Builder (Gradio)")
566
  gr.Markdown("Note: this demo only reports the first chunk (default bars_per_chunk). Increase the Bars per chunk slider to include more measures.")
 
 
 
567
  with gr.Row():
568
  file_in = gr.File(label="Upload MusicXML (.musicxml/.xml/.mxl)", type="filepath")
569
  model_id = gr.Textbox(
@@ -611,4 +627,4 @@ with gr.Blocks(title="ScorePrompts Prompt Builder") as demo:
611
  )
612
 
613
  if __name__ == "__main__":
614
- demo.queue().launch()
 
100
  return obj
101
 
102
 
103
+ def _resolve_hf_token(oauth_token: gr.OAuthToken | None) -> str:
104
+ if oauth_token and getattr(oauth_token, "token", None):
105
+ return oauth_token.token
106
+ if os.getenv("SPACE_ID") or os.getenv("SPACE_HOST"):
107
+ raise ValueError("Please sign in with Hugging Face to run inference.")
108
+ env_token = os.getenv("HF_TOKEN")
109
+ if env_token:
110
+ return env_token
111
+ raise ValueError("No Hugging Face token available. Please sign in with the Login button.")
112
+
113
+
114
  def _chat_json(
115
  client: InferenceClient,
116
  system_prompt: str,
 
276
  max_tokens_writer: int,
277
  temperature_writer: float,
278
  force_cpu: bool,
279
+ oauth_token: gr.OAuthToken | None = None,
280
  ):
281
  logs: list[str] = []
282
 
 
288
  log_fn("Starting run")
289
  if not file_path:
290
  raise ValueError("No file uploaded.")
291
+ hf_token = _resolve_hf_token(oauth_token)
292
  score_path = Path(file_path)
293
  score_id = score_path.stem
294
 
 
513
  chunk = bundles[0]
514
  log_fn(f"Schema chunk measures={chunk['chunk']['measure_start']}..{chunk['chunk']['measure_end']}")
515
 
516
+ client = InferenceClient(model_id, token=hf_token)
517
  t0 = time.time()
518
  bar_facts = _chat_json(
519
  client,
 
577
  with gr.Blocks(title="ScorePrompts Prompt Builder") as demo:
578
  gr.Markdown("# ScorePrompts Prompt Builder (Gradio)")
579
  gr.Markdown("Note: this demo only reports the first chunk (default bars_per_chunk). Increase the Bars per chunk slider to include more measures.")
580
+ gr.Markdown("Sign in with Hugging Face to enable Inference API access.")
581
+ with gr.Row():
582
+ gr.LoginButton()
583
  with gr.Row():
584
  file_in = gr.File(label="Upload MusicXML (.musicxml/.xml/.mxl)", type="filepath")
585
  model_id = gr.Textbox(
 
627
  )
628
 
629
  if __name__ == "__main__":
630
+ demo.queue().launch()
requirements.txt CHANGED
@@ -36,6 +36,10 @@ music21>=8.0.0
36
 
37
  # Gradio and Hugging Face Spaces
38
  gradio>=4.0.0
 
 
 
 
39
 
40
  # Experiment tracking
41
  wandb>=0.13.0
@@ -63,4 +67,3 @@ git+https://github.com/manoskary/analysisgnn.git@musicbert_integration
63
 
64
  # ScorePrompts dependency as a private GitHub repo
65
  # git+https://x-access-token:${GITHUB_TOKEN}@github.com/manoskary/scoreprompts.git@main
66
-
 
36
 
37
  # Gradio and Hugging Face Spaces
38
  gradio>=4.0.0
39
+ authlib==1.6.6
40
+ cffi==2.0.0
41
+ cryptography==46.0.4
42
+ itsdangerous==2.2.0
43
 
44
  # Experiment tracking
45
  wandb>=0.13.0
 
67
 
68
  # ScorePrompts dependency as a private GitHub repo
69
  # git+https://x-access-token:${GITHUB_TOKEN}@github.com/manoskary/scoreprompts.git@main