diff --git a/.Jules/palette.md b/.Jules/palette.md new file mode 100644 index 0000000..84ed918 --- /dev/null +++ b/.Jules/palette.md @@ -0,0 +1,7 @@ +## 2026-02-02 - [Clarifying Ambiguous Controls] +**Learning:** Users can misinterpret 'Intensity' when it inversely correlates to frequency (e.g. 1 = high intensity/every beat, 16 = low intensity). +**Action:** Use 'Interval' or 'Frequency' for time-based controls and explicitly state the relationship (e.g. 'Every Nth beat') in help text. + +## 2026-02-02 - [Gradio Feedback] +**Learning:** Long-running processes in Gradio without 'gr.Progress' leave users uncertain if the app is frozen. +**Action:** Always add 'progress=gr.Progress()' to blocking functions and use 'gr.Info'/'gr.Error' instead of returning status strings. diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..bacca8e --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +__pycache__/ +output/ diff --git a/app.py b/app.py index 16bee30..f3fea46 100644 --- a/app.py +++ b/app.py @@ -14,7 +14,8 @@ StatusResult : TypeAlias = Tuple[str, str] -def process_video(mp3_file : str, video_files : VideoFilesInput, cut_intensity : float, start_time : float, end_time : float, output_filename : str, direction : str, playback_speed_str: str) -> StatusResult: +def process_video(mp3_file : str, video_files : VideoFilesInput, cut_intensity : float, start_time : float, end_time : float, output_filename : str, direction : str, playback_speed_str: str, progress=gr.Progress()) -> StatusResult: + progress(0, desc="Starting...") temp_dir = tempfile.mkdtemp() video_paths = [] @@ -28,7 +29,7 @@ def process_video(mp3_file : str, video_files : VideoFilesInput, cut_intensity : if len(video_paths) == 0: shutil.rmtree(temp_dir) - return None, 'Error: No valid video files uploaded' + raise gr.Error("No valid video files uploaded. Please select at least one MP4 file.") #create output folder script_dir = os.path.dirname(os.path.abspath(__file__)) @@ -55,12 +56,14 @@ def process_video(mp3_file : str, video_files : VideoFilesInput, cut_intensity : if end_time > 0: end_time_value = end_time + progress(0.1, desc="Analyzing audio beats...") beat_times, _, _ = analyze_beats( mp3_file, start_time=start_time, end_time=end_time_value ) + progress(0.3, desc="Processing video cuts...") result_path = create_music_video( mp3_file, video_paths, @@ -73,6 +76,7 @@ def process_video(mp3_file : str, video_files : VideoFilesInput, cut_intensity : speed_factor=speed_factor ) + progress(0.9, desc="Finalizing...") #move video to output shutil.move(result_path, output_path) @@ -80,6 +84,7 @@ def process_video(mp3_file : str, video_files : VideoFilesInput, cut_intensity : beats_used = len(beat_times[::cut_intensity_int]) status_msg = f"Successfully created video with {beats_used} cuts from {len(beat_times)} detected beats." + gr.Info(status_msg) shutil.rmtree(temp_dir) return output_path, status_msg @@ -121,8 +126,8 @@ def create_ui() -> gr.Blocks: maximum=16, value=4, step=1, - label='Cut Intensity', - info='Number of beats until next cut.' + label='Cut Interval (Beats)', + info='Cut every Nth beat. 1 = every beat, 4 = every 4th beat.' ) direction = gr.Radio( diff --git a/music_video_cutter.py b/music_video_cutter.py index 4fabfd6..46464cc 100644 --- a/music_video_cutter.py +++ b/music_video_cutter.py @@ -232,7 +232,7 @@ def create_music_video(mp3_file : str, video_files : VideoList, beat_times : Bea final_video = final_video.with_audio(audio) # Stelle sicher, dass das finale Video nicht länger als die Audiospur ist. - final_video = final_video.subclipped(0, audio_duration) + final_video = final_video.subclipped(0, min(final_video.duration, audio_duration)) # Schreibe die finale Videodatei. final_video.write_videofile(