This commit is contained in:
2026-02-05 23:43:19 -05:00
parent 168e6d5a61
commit 42a1ddf458
12 changed files with 746 additions and 44 deletions

View File

@@ -0,0 +1,93 @@
/**
* MergedWaveform — unified progress bar + waveform display
*
* Shows waveform bars coloured to indicate played vs unplayed portions.
* The played section doubles as the progress indicator, replacing the
* separate progress bar. Click-to-seek is supported.
*/
import { createSignal, createEffect, onCleanup } from "solid-js"
import { getWaveformData, getWaveformDataSync } from "../utils/audio-waveform"
type MergedWaveformProps = {
/** Audio URL — used to generate or retrieve waveform data */
audioUrl: string
/** Current playback position in seconds */
position: number
/** Total duration in seconds */
duration: number
/** Whether audio is currently playing */
isPlaying: boolean
/** Number of data points / columns */
resolution?: number
/** Callback when user clicks to seek */
onSeek?: (seconds: number) => void
}
/** Block characters for waveform amplitude levels */
const BARS = [".", "-", "~", "=", "#"]
export function MergedWaveform(props: MergedWaveformProps) {
const resolution = () => props.resolution ?? 64
// Waveform data — start with sync/cached, kick off async extraction
const [data, setData] = createSignal<number[]>(
getWaveformDataSync(props.audioUrl, resolution()),
)
// When the audioUrl changes, attempt async extraction for real data
createEffect(() => {
const url = props.audioUrl
const res = resolution()
if (!url) return
let cancelled = false
getWaveformData(url, res).then((result) => {
if (!cancelled) setData(result)
})
onCleanup(() => { cancelled = true })
})
const playedRatio = () =>
props.duration <= 0 ? 0 : Math.min(1, props.position / props.duration)
const renderLine = () => {
const d = data()
const played = Math.floor(d.length * playedRatio())
const playedColor = props.isPlaying ? "#6fa8ff" : "#7d8590"
const futureColor = "#3b4252"
const playedChars = d
.slice(0, played)
.map((v) => BARS[Math.min(BARS.length - 1, Math.floor(v * BARS.length))])
.join("")
const futureChars = d
.slice(played)
.map((v) => BARS[Math.min(BARS.length - 1, Math.floor(v * BARS.length))])
.join("")
return (
<box flexDirection="row" gap={0}>
<text fg={playedColor}>{playedChars || " "}</text>
<text fg={futureColor}>{futureChars || " "}</text>
</box>
)
}
const handleClick = (event: { x: number }) => {
const d = data()
const ratio = d.length === 0 ? 0 : event.x / d.length
const next = Math.max(
0,
Math.min(props.duration, Math.round(props.duration * ratio)),
)
props.onSeek?.(next)
}
return (
<box border padding={1} onMouseDown={handleClick}>
{renderLine()}
</box>
)
}

View File

@@ -4,7 +4,7 @@
* Right panel: episodes for the selected show
*/
import { createSignal, For, Show, createMemo } from "solid-js"
import { createSignal, For, Show, createMemo, createEffect } from "solid-js"
import { useKeyboard } from "@opentui/solid"
import { useFeedStore } from "../stores/feed"
import { format } from "date-fns"
@@ -26,6 +26,9 @@ export function MyShowsPage(props: MyShowsPageProps) {
const [episodeIndex, setEpisodeIndex] = createSignal(0)
const [isRefreshing, setIsRefreshing] = createSignal(false)
/** Threshold: load more when within this many items of the end */
const LOAD_MORE_THRESHOLD = 5
const shows = () => feedStore.getFilteredFeeds()
const selectedShow = createMemo(() => {
@@ -42,6 +45,19 @@ export function MyShowsPage(props: MyShowsPageProps) {
)
})
// Detect when user navigates near the bottom and load more episodes
createEffect(() => {
const idx = episodeIndex()
const eps = episodes()
const show = selectedShow()
if (!show || eps.length === 0) return
const nearBottom = idx >= eps.length - LOAD_MORE_THRESHOLD
if (nearBottom && feedStore.hasMoreEpisodes(show.id) && !feedStore.isLoadingMore()) {
feedStore.loadMoreEpisodes(show.id)
}
})
const formatDate = (date: Date): string => {
return format(date, "MMM d, yyyy")
}
@@ -231,6 +247,16 @@ export function MyShowsPage(props: MyShowsPageProps) {
</box>
)}
</For>
<Show when={feedStore.isLoadingMore()}>
<box paddingLeft={2} paddingTop={1}>
<text fg="yellow">Loading more episodes...</text>
</box>
</Show>
<Show when={!feedStore.isLoadingMore() && selectedShow() && feedStore.hasMoreEpisodes(selectedShow()!.id)}>
<box paddingLeft={2} paddingTop={1}>
<text fg="gray">Scroll down for more episodes</text>
</box>
</Show>
</scrollbox>
</Show>
</Show>

View File

@@ -1,7 +1,6 @@
import { useKeyboard } from "@opentui/solid"
import { PlaybackControls } from "./PlaybackControls"
import { Waveform } from "./Waveform"
import { createWaveform } from "../utils/waveform"
import { MergedWaveform } from "./MergedWaveform"
import { useAudio } from "../hooks/useAudio"
import type { Episode } from "../types/episode"
@@ -24,8 +23,6 @@ const SAMPLE_EPISODE: Episode = {
export function Player(props: PlayerProps) {
const audio = useAudio()
const waveform = () => createWaveform(64)
// The episode to display — prefer a passed-in episode, then the
// currently-playing episode, then fall back to the sample.
const episode = () => props.episode ?? audio.currentEpisode() ?? SAMPLE_EPISODE
@@ -86,7 +83,7 @@ export function Player(props: PlayerProps) {
<strong>Now Playing</strong>
</text>
<text fg="gray">
{formatTime(audio.position())} / {formatTime(dur())}
{formatTime(audio.position())} / {formatTime(dur())} ({progressPercent()}%)
</text>
</box>
@@ -100,27 +97,13 @@ export function Player(props: PlayerProps) {
</text>
<text fg="gray">{episode().description}</text>
<box flexDirection="column" gap={1}>
<box flexDirection="row" gap={1} alignItems="center">
<text fg="gray">Progress:</text>
<box flexGrow={1} height={1} backgroundColor="#2a2f3a">
<box
width={`${progressPercent()}%`}
height={1}
backgroundColor={audio.isPlaying() ? "#6fa8ff" : "#7d8590"}
/>
</box>
<text fg="gray">{progressPercent()}%</text>
</box>
<Waveform
data={waveform()}
position={audio.position()}
duration={dur()}
isPlaying={audio.isPlaying()}
onSeek={(next: number) => audio.seek(next)}
/>
</box>
<MergedWaveform
audioUrl={episode().audioUrl}
position={audio.position()}
duration={dur()}
isPlaying={audio.isPlaying()}
onSeek={(next: number) => audio.seek(next)}
/>
</box>
<PlaybackControls

View File

@@ -20,12 +20,18 @@ import {
migrateSourcesFromLocalStorage,
} from "../utils/feeds-persistence"
/** Max episodes to fetch on refresh */
/** Max episodes to load per page/chunk */
const MAX_EPISODES_REFRESH = 50
/** Max episodes to fetch on initial subscribe */
const MAX_EPISODES_SUBSCRIBE = 20
/** Cache of all parsed episodes per feed (feedId -> Episode[]) */
const fullEpisodeCache = new Map<string, Episode[]>()
/** Track how many episodes are currently loaded per feed */
const episodeLoadCount = new Map<string, number>()
/** Save feeds to file (async, fire-and-forget) */
function saveFeeds(feeds: Feed[]): void {
saveFeedsToFile(feeds).catch(() => {})
@@ -56,6 +62,7 @@ export function createFeedStore() {
sortDirection: "desc",
})
const [selectedFeedId, setSelectedFeedId] = createSignal<string | null>(null)
const [isLoadingMore, setIsLoadingMore] = createSignal(false)
/** Get filtered and sorted feeds */
const getFilteredFeeds = (): Feed[] => {
@@ -132,8 +139,8 @@ export function createFeedStore() {
return allEpisodes
}
/** Fetch latest episodes from an RSS feed URL */
const fetchEpisodes = async (feedUrl: string, limit: number): Promise<Episode[]> => {
/** Fetch latest episodes from an RSS feed URL, caching all parsed episodes */
const fetchEpisodes = async (feedUrl: string, limit: number, feedId?: string): Promise<Episode[]> => {
try {
const response = await fetch(feedUrl, {
headers: {
@@ -144,7 +151,15 @@ export function createFeedStore() {
if (!response.ok) return []
const xml = await response.text()
const parsed = parseRSSFeed(xml, feedUrl)
return parsed.episodes.slice(0, limit)
const allEpisodes = parsed.episodes
// Cache all parsed episodes for pagination
if (feedId) {
fullEpisodeCache.set(feedId, allEpisodes)
episodeLoadCount.set(feedId, Math.min(limit, allEpisodes.length))
}
return allEpisodes.slice(0, limit)
} catch {
return []
}
@@ -152,9 +167,10 @@ export function createFeedStore() {
/** Add a new feed and auto-fetch latest 20 episodes */
const addFeed = async (podcast: Podcast, sourceId: string, visibility: FeedVisibility = FeedVisibility.PUBLIC) => {
const episodes = await fetchEpisodes(podcast.feedUrl, MAX_EPISODES_SUBSCRIBE)
const feedId = crypto.randomUUID()
const episodes = await fetchEpisodes(podcast.feedUrl, MAX_EPISODES_SUBSCRIBE, feedId)
const newFeed: Feed = {
id: crypto.randomUUID(),
id: feedId,
podcast,
episodes,
visibility,
@@ -174,7 +190,7 @@ export function createFeedStore() {
const refreshFeed = async (feedId: string) => {
const feed = getFeed(feedId)
if (!feed) return
const episodes = await fetchEpisodes(feed.podcast.feedUrl, MAX_EPISODES_REFRESH)
const episodes = await fetchEpisodes(feed.podcast.feedUrl, MAX_EPISODES_REFRESH, feedId)
setFeeds((prev) => {
const updated = prev.map((f) =>
f.id === feedId ? { ...f, episodes, lastUpdated: new Date() } : f
@@ -194,6 +210,8 @@ export function createFeedStore() {
/** Remove a feed */
const removeFeed = (feedId: string) => {
fullEpisodeCache.delete(feedId)
episodeLoadCount.delete(feedId)
setFeeds((prev) => {
const updated = prev.filter((f) => f.id !== feedId)
saveFeeds(updated)
@@ -283,18 +301,76 @@ export function createFeedStore() {
return id ? getFeed(id) : undefined
}
/** Check if a feed has more episodes available beyond what's currently loaded */
const hasMoreEpisodes = (feedId: string): boolean => {
const cached = fullEpisodeCache.get(feedId)
if (!cached) return false
const loaded = episodeLoadCount.get(feedId) ?? 0
return loaded < cached.length
}
/** Load the next chunk of episodes for a feed from the cache.
* If no cache exists (e.g. app restart), re-fetches from the RSS feed. */
const loadMoreEpisodes = async (feedId: string) => {
if (isLoadingMore()) return
const feed = getFeed(feedId)
if (!feed) return
setIsLoadingMore(true)
try {
let cached = fullEpisodeCache.get(feedId)
// If no cache, re-fetch and parse the full feed
if (!cached) {
const response = await fetch(feed.podcast.feedUrl, {
headers: {
"Accept-Encoding": "identity",
"Accept": "application/rss+xml, application/xml, text/xml, */*",
},
})
if (!response.ok) return
const xml = await response.text()
const parsed = parseRSSFeed(xml, feed.podcast.feedUrl)
cached = parsed.episodes
fullEpisodeCache.set(feedId, cached)
// Set current load count to match what's already displayed
episodeLoadCount.set(feedId, feed.episodes.length)
}
const currentCount = episodeLoadCount.get(feedId) ?? feed.episodes.length
const newCount = Math.min(currentCount + MAX_EPISODES_REFRESH, cached.length)
if (newCount <= currentCount) return // nothing more to load
episodeLoadCount.set(feedId, newCount)
const episodes = cached.slice(0, newCount)
setFeeds((prev) => {
const updated = prev.map((f) =>
f.id === feedId ? { ...f, episodes } : f
)
saveFeeds(updated)
return updated
})
} finally {
setIsLoadingMore(false)
}
}
return {
// State
feeds,
sources,
filter,
selectedFeedId,
isLoadingMore,
// Computed
getFilteredFeeds,
getAllEpisodesChronological,
getFeed,
getSelectedFeed,
hasMoreEpisodes,
// Actions
setFilter,
@@ -305,6 +381,7 @@ export function createFeedStore() {
togglePinned,
refreshFeed,
refreshAllFeeds,
loadMoreEpisodes,
addSource,
removeSource,
toggleSource,

149
src/utils/audio-waveform.ts Normal file
View File

@@ -0,0 +1,149 @@
/**
* Audio waveform analysis for PodTUI
*
* Extracts amplitude data from audio files using ffmpeg (when available)
* or generates procedural waveform data as a fallback. Results are cached
* in-memory keyed by audio URL.
*/
/** Number of amplitude data points to generate */
const DEFAULT_RESOLUTION = 128
/** In-memory cache: audioUrl -> amplitude data */
const waveformCache = new Map<string, number[]>()
/**
* Try to extract real waveform data from an audio URL using ffmpeg.
* Returns null if ffmpeg is not available or the extraction fails.
*/
async function extractWithFfmpeg(audioUrl: string, resolution: number): Promise<number[] | null> {
try {
if (!Bun.which("ffmpeg")) return null
// Use ffmpeg to output raw PCM samples, then downsample to `resolution` points.
// -t 300: read at most 5 minutes (enough data to fill the waveform)
const proc = Bun.spawn(
[
"ffmpeg",
"-i", audioUrl,
"-t", "300",
"-ac", "1", // mono
"-ar", "8000", // low sample rate to keep data small
"-f", "s16le", // raw signed 16-bit PCM
"-v", "quiet",
"-",
],
{ stdout: "pipe", stderr: "ignore" },
)
const output = await new Response(proc.stdout).arrayBuffer()
await proc.exited
if (output.byteLength === 0) return null
const samples = new Int16Array(output)
if (samples.length === 0) return null
// Downsample to `resolution` buckets by taking the max absolute amplitude
// in each bucket.
const bucketSize = Math.max(1, Math.floor(samples.length / resolution))
const data: number[] = []
for (let i = 0; i < resolution; i++) {
const start = i * bucketSize
const end = Math.min(start + bucketSize, samples.length)
let maxAbs = 0
for (let j = start; j < end; j++) {
const abs = Math.abs(samples[j])
if (abs > maxAbs) maxAbs = abs
}
// Normalise to 0-1
data.push(Number((maxAbs / 32768).toFixed(3)))
}
return data
} catch {
return null
}
}
/**
* Generate a procedural (fake) waveform that looks plausible.
* Uses a combination of sine waves with different frequencies to
* simulate varying audio energy.
*/
function generateProcedural(resolution: number, seed: number): number[] {
const data: number[] = []
for (let i = 0; i < resolution; i++) {
const t = i + seed
const value =
0.15 +
Math.abs(Math.sin(t / 3.7)) * 0.35 +
Math.abs(Math.sin(t / 7.3)) * 0.25 +
Math.abs(Math.sin(t / 13.1)) * 0.15 +
(Math.random() * 0.1)
data.push(Number(Math.min(1, value).toFixed(3)))
}
return data
}
/**
* Simple numeric hash of a string, used to seed procedural generation
* so the same URL always produces the same waveform.
*/
function hashString(s: string): number {
let h = 0
for (let i = 0; i < s.length; i++) {
h = (h * 31 + s.charCodeAt(i)) | 0
}
return Math.abs(h)
}
/**
* Get waveform data for an audio URL.
*
* Returns cached data if available, otherwise attempts ffmpeg extraction
* and falls back to procedural generation.
*/
export async function getWaveformData(
audioUrl: string,
resolution: number = DEFAULT_RESOLUTION,
): Promise<number[]> {
const cacheKey = `${audioUrl}:${resolution}`
const cached = waveformCache.get(cacheKey)
if (cached) return cached
// Try real extraction first
const real = await extractWithFfmpeg(audioUrl, resolution)
if (real) {
waveformCache.set(cacheKey, real)
return real
}
// Fall back to procedural
const procedural = generateProcedural(resolution, hashString(audioUrl))
waveformCache.set(cacheKey, procedural)
return procedural
}
/**
* Synchronous fallback: get a waveform immediately (from cache or procedural).
* Use this when you need data without waiting for async extraction.
*/
export function getWaveformDataSync(
audioUrl: string,
resolution: number = DEFAULT_RESOLUTION,
): number[] {
const cacheKey = `${audioUrl}:${resolution}`
const cached = waveformCache.get(cacheKey)
if (cached) return cached
const procedural = generateProcedural(resolution, hashString(audioUrl))
waveformCache.set(cacheKey, procedural)
return procedural
}
/** Clear the waveform cache (for memory management) */
export function clearWaveformCache(): void {
waveformCache.clear()
}