feat: Implement Auto-Tagging Settings and MusicBrainz integration

- Added AutoTaggingSettings component for configuring auto-tagging preferences.
- Integrated localStorage for saving user preferences and options.
- Developed useAutoTagging hook for fetching and applying metadata from MusicBrainz.
- Created MusicBrainz API client for searching and retrieving music metadata.
- Enhanced metadata structure with additional fields for tracks and albums.
- Implemented rate-limiting for MusicBrainz API requests.
- Added UI components for user interaction and feedback during the tagging process.
This commit is contained in:
2025-08-10 15:02:49 +00:00
committed by GitHub
parent 18f0811787
commit 147602ad8c
10 changed files with 1904 additions and 37 deletions

View File

@@ -0,0 +1,73 @@
'use client';
import React, { useState } from 'react';
import {
ContextMenu,
ContextMenuContent,
ContextMenuItem,
ContextMenuSeparator,
ContextMenuTrigger,
} from "@/components/ui/context-menu";
import { MusicIcon, TagIcon, InfoIcon } from 'lucide-react';
import { AutoTaggingDialog } from './AutoTaggingDialog';
interface AutoTagContextMenuProps {
children: React.ReactNode;
mode: 'track' | 'album' | 'artist';
itemId: string;
itemName: string;
artistName?: string;
}
export function AutoTagContextMenu({
children,
mode,
itemId,
itemName,
artistName
}: AutoTagContextMenuProps) {
const [isDialogOpen, setIsDialogOpen] = useState(false);
return (
<>
<ContextMenu>
<ContextMenuTrigger asChild>
{children}
</ContextMenuTrigger>
<ContextMenuContent className="w-56">
<ContextMenuItem
onClick={() => setIsDialogOpen(true)}
className="cursor-pointer"
>
<TagIcon className="mr-2 h-4 w-4" />
Auto-Tag {mode === 'track' ? 'Track' : mode === 'album' ? 'Album' : 'Artist'}
</ContextMenuItem>
{mode === 'track' && (
<>
<ContextMenuSeparator />
<ContextMenuItem className="cursor-pointer">
<InfoIcon className="mr-2 h-4 w-4" />
View Track Details
</ContextMenuItem>
<ContextMenuItem className="cursor-pointer">
<MusicIcon className="mr-2 h-4 w-4" />
Edit Track Metadata
</ContextMenuItem>
</>
)}
</ContextMenuContent>
</ContextMenu>
<AutoTaggingDialog
isOpen={isDialogOpen}
onClose={() => setIsDialogOpen(false)}
mode={mode}
itemId={itemId}
itemName={itemName}
artistName={artistName}
/>
</>
);
}
export default AutoTagContextMenu;

View File

@@ -0,0 +1,319 @@
'use client';
import React, { useState, useEffect } from 'react';
import {
Dialog,
DialogContent,
DialogDescription,
DialogFooter,
DialogHeader,
DialogTitle,
} from "@/components/ui/dialog";
import {
Sheet,
SheetContent,
SheetDescription,
SheetHeader,
SheetTitle,
} from "@/components/ui/sheet";
import {
Tabs,
TabsContent,
TabsList,
TabsTrigger,
} from "@/components/ui/tabs";
import { Button } from "@/components/ui/button";
import { Progress } from "@/components/ui/progress";
import { useToast } from "@/hooks/use-toast";
import { useAutoTagging, EnhancedTrackMetadata, EnhancedAlbumMetadata } from "@/hooks/use-auto-tagging";
import { useIsMobile } from "@/hooks/use-mobile";
import {
MusicIcon,
AlbumIcon,
UsersIcon,
CheckCircle2Icon,
XCircleIcon,
AlertTriangleIcon,
InfoIcon
} from 'lucide-react';
import Image from 'next/image';
interface AutoTaggingDialogProps {
isOpen: boolean;
onClose: () => void;
mode: 'track' | 'album' | 'artist';
itemId: string;
itemName: string;
artistName?: string;
}
export const AutoTaggingDialog: React.FC<AutoTaggingDialogProps> = ({
isOpen,
onClose,
mode,
itemId,
itemName,
artistName
}) => {
const isMobile = useIsMobile();
const { toast } = useToast();
const [confidenceThreshold, setConfidenceThreshold] = useState(70);
const [activeTab, setActiveTab] = useState<'tracks' | 'albums'>('tracks');
const [isApplying, setIsApplying] = useState(false);
const {
isProcessing,
progress,
enhancedTracks,
enhancedAlbums,
startAutoTagging,
applyEnhancedMetadata
} = useAutoTagging();
// Start auto-tagging when the dialog is opened
useEffect(() => {
if (isOpen && itemId && !isProcessing && progress === 0) {
// Wrap in try/catch to handle any errors that might occur during auto-tagging
try {
startAutoTagging(mode, itemId, confidenceThreshold);
} catch (error) {
console.error('Failed to start auto-tagging:', error);
toast({
title: "Auto-Tagging Error",
description: error instanceof Error ? error.message : "Failed to start auto-tagging",
variant: "destructive",
});
onClose();
}
}
}, [isOpen, itemId, mode, isProcessing, progress, startAutoTagging, confidenceThreshold, toast, onClose]);
// Set the active tab based on the mode
useEffect(() => {
if (mode === 'track') {
setActiveTab('tracks');
} else if (mode === 'album' || mode === 'artist') {
setActiveTab('albums');
}
}, [mode]);
const handleApplyMetadata = async () => {
try {
setIsApplying(true);
await applyEnhancedMetadata(
enhancedTracks.filter(track => track.status === 'matched' && track.confidence >= confidenceThreshold),
enhancedAlbums.filter(album => album.status === 'matched' && album.confidence >= confidenceThreshold)
);
onClose();
} catch (error) {
console.error('Failed to apply metadata:', error);
toast({
title: "Error",
description: "Failed to apply metadata",
variant: "destructive",
});
} finally {
setIsApplying(false);
}
};
// Get match statistics
const matchedTracks = enhancedTracks.filter(track => track.status === 'matched' && track.confidence >= confidenceThreshold).length;
const totalTracks = enhancedTracks.length;
const matchedAlbums = enhancedAlbums.filter(album => album.status === 'matched' && album.confidence >= confidenceThreshold).length;
const totalAlbums = enhancedAlbums.length;
const getStatusIcon = (status: 'pending' | 'matched' | 'failed' | 'applied', confidence: number) => {
if (status === 'pending') return <AlertTriangleIcon className="w-4 h-4 text-yellow-500" />;
if (status === 'failed') return <XCircleIcon className="w-4 h-4 text-red-500" />;
if (status === 'matched' && confidence >= confidenceThreshold) return <CheckCircle2Icon className="w-4 h-4 text-green-500" />;
if (status === 'matched' && confidence < confidenceThreshold) return <InfoIcon className="w-4 h-4 text-yellow-500" />;
if (status === 'applied') return <CheckCircle2Icon className="w-4 h-4 text-blue-500" />;
return null;
};
const getConfidenceColor = (confidence: number) => {
if (confidence >= 90) return 'bg-green-500';
if (confidence >= 70) return 'bg-green-400';
if (confidence >= 50) return 'bg-yellow-500';
return 'bg-red-500';
};
// Render the appropriate dialog/sheet based on mobile status
const DialogComponent = isMobile ? Sheet : Dialog;
const DialogContentComponent = isMobile ? SheetContent : DialogContent;
const DialogHeaderComponent = isMobile ? SheetHeader : DialogHeader;
const DialogTitleComponent = isMobile ? SheetTitle : DialogTitle;
const DialogDescriptionComponent = isMobile ? SheetDescription : DialogDescription;
return (
<DialogComponent open={isOpen} onOpenChange={(open) => !open && onClose()}>
<DialogContentComponent className={isMobile ? "p-0 pt-8" : "max-w-3xl max-h-[90vh] overflow-hidden flex flex-col"}>
<DialogHeaderComponent className={isMobile ? "p-6 pb-2" : ""}>
<DialogTitleComponent>
Auto-Tagging {mode === 'track' ? 'Track' : mode === 'album' ? 'Album' : 'Artist'}
</DialogTitleComponent>
<DialogDescriptionComponent>
{isProcessing ? (
`Analyzing ${mode === 'track' ? 'track' : mode === 'album' ? 'album' : 'artist'} "${itemName}"`
) : (
`Found metadata for ${matchedTracks} of ${totalTracks} tracks${totalAlbums > 0 ? ` and ${matchedAlbums} of ${totalAlbums} albums` : ''}`
)}
</DialogDescriptionComponent>
{/* Progress bar */}
{(isProcessing || isApplying) && (
<div className="my-4">
<Progress value={progress} className="h-2" />
<p className="text-sm text-muted-foreground mt-2">
{isProcessing ? 'Analyzing metadata...' : 'Applying metadata...'}
</p>
</div>
)}
</DialogHeaderComponent>
{/* Tabs for tracks and albums */}
{!isProcessing && !isApplying && (
<div className={`flex-1 overflow-hidden flex flex-col ${isMobile ? "px-6" : ""}`}>
<Tabs value={activeTab} onValueChange={(value) => setActiveTab(value as 'tracks' | 'albums')} className="flex-1 flex flex-col">
<div className="flex justify-between items-center mb-2">
<TabsList>
<TabsTrigger value="tracks" disabled={totalTracks === 0}>
<MusicIcon className="w-4 h-4 mr-2" /> Tracks ({matchedTracks}/{totalTracks})
</TabsTrigger>
<TabsTrigger value="albums" disabled={totalAlbums === 0}>
<AlbumIcon className="w-4 h-4 mr-2" /> Albums ({matchedAlbums}/{totalAlbums})
</TabsTrigger>
</TabsList>
{/* Confidence threshold slider */}
<div className="flex items-center gap-2">
<span className="text-xs text-muted-foreground whitespace-nowrap">Min. Confidence: {confidenceThreshold}%</span>
<input
type="range"
min="0"
max="100"
value={confidenceThreshold}
onChange={(e) => setConfidenceThreshold(parseInt(e.target.value))}
className="w-24"
/>
</div>
</div>
{/* Tracks tab content */}
<TabsContent value="tracks" className="flex-1 overflow-auto data-[state=active]:flex flex-col">
<div className="rounded-md border">
<div className="bg-muted p-2 grid grid-cols-12 gap-2 text-sm font-medium">
<div className="col-span-1"></div>
<div className="col-span-4">Title</div>
<div className="col-span-3">Artist</div>
<div className="col-span-2">Album</div>
<div className="col-span-2 text-right">Confidence</div>
</div>
<div className="divide-y max-h-[50vh] overflow-auto">
{enhancedTracks.map(track => (
<div key={track.id} className="grid grid-cols-12 gap-2 p-2 items-center">
<div className="col-span-1">
{getStatusIcon(track.status, track.confidence)}
</div>
<div className="col-span-4 truncate">
{track.title}
</div>
<div className="col-span-3 truncate">
{track.artist}
</div>
<div className="col-span-2 truncate">
{track.album}
</div>
<div className="col-span-2 flex justify-end items-center gap-2">
<div className="h-2 w-10 rounded-full bg-gray-200">
<div
className={`h-full rounded-full ${getConfidenceColor(track.confidence)}`}
style={{ width: `${track.confidence}%` }}
/>
</div>
<span className="text-xs">{track.confidence}%</span>
</div>
</div>
))}
</div>
</div>
</TabsContent>
{/* Albums tab content */}
<TabsContent value="albums" className="flex-1 overflow-auto data-[state=active]:flex flex-col">
<div className="grid grid-cols-1 md:grid-cols-2 gap-4 max-h-[50vh] overflow-auto p-1">
{enhancedAlbums.map(album => (
<div key={album.id} className="border rounded-lg overflow-hidden">
<div className="flex">
{/* Album cover */}
<div className="relative w-24 h-24">
{album.coverArtUrl ? (
<Image
src={album.coverArtUrl}
alt={album.name}
fill
className="object-cover"
/>
) : (
<div className="w-full h-full bg-muted flex items-center justify-center">
<AlbumIcon className="w-8 h-8 text-muted-foreground" />
</div>
)}
{/* Status badge */}
<div className="absolute top-1 left-1">
{getStatusIcon(album.status, album.confidence)}
</div>
</div>
{/* Album info */}
<div className="flex-1 p-3">
<h4 className="font-medium text-sm truncate">{album.name}</h4>
<p className="text-xs text-muted-foreground truncate">{album.artist}</p>
<div className="mt-2 flex items-center gap-2">
<div className="h-2 w-10 rounded-full bg-gray-200">
<div
className={`h-full rounded-full ${getConfidenceColor(album.confidence)}`}
style={{ width: `${album.confidence}%` }}
/>
</div>
<span className="text-xs">{album.confidence}%</span>
</div>
{album.year && (
<p className="text-xs mt-1">Year: {album.year}</p>
)}
</div>
</div>
</div>
))}
</div>
</TabsContent>
</Tabs>
</div>
)}
<DialogFooter className={`${isMobile ? "p-6 pt-4" : "mt-4"}`}>
<div className="w-full flex flex-col md:flex-row justify-end gap-2">
<Button
variant="outline"
onClick={onClose}
disabled={isProcessing || isApplying}
>
Cancel
</Button>
<Button
onClick={handleApplyMetadata}
disabled={
isProcessing ||
isApplying ||
(matchedTracks === 0 && matchedAlbums === 0)
}
>
Apply Metadata
</Button>
</div>
</DialogFooter>
</DialogContentComponent>
</DialogComponent>
);
};
export default AutoTaggingDialog;

View File

@@ -0,0 +1,221 @@
'use client';
import React, { useState, useEffect } from 'react';
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card';
import { Switch } from '@/components/ui/switch';
import { Button } from '@/components/ui/button';
import { Label } from '@/components/ui/label';
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select';
import { Input } from '@/components/ui/input';
import { FaTags } from 'react-icons/fa';
import { useToast } from '@/hooks/use-toast';
import { AutoTaggingDialog } from './AutoTaggingDialog';
export const AutoTaggingSettings = () => {
const { toast } = useToast();
const [isClient, setIsClient] = useState(false);
const [autoTaggingEnabled, setAutoTaggingEnabled] = useState(false);
const [autoTagDialogOpen, setAutoTagDialogOpen] = useState(false);
const [selectedItem, setSelectedItem] = useState({
id: '',
name: 'Library',
mode: 'artist' as 'track' | 'album' | 'artist'
});
const [autoTagOptions, setAutoTagOptions] = useState({
rateLimit: 1000, // milliseconds between requests
autoProcess: false,
preferLocalMetadata: true,
tagsToUpdate: ['title', 'artist', 'album', 'year', 'genre'],
});
useEffect(() => {
setIsClient(true);
// Load saved preferences from localStorage
const savedAutoTagging = localStorage.getItem('auto-tagging-enabled');
if (savedAutoTagging !== null) {
setAutoTaggingEnabled(savedAutoTagging === 'true');
}
// Load saved auto-tag options
const savedOptions = localStorage.getItem('auto-tagging-options');
if (savedOptions !== null) {
try {
setAutoTagOptions(JSON.parse(savedOptions));
} catch (error) {
console.error('Failed to parse stored auto-tagging options:', error);
}
}
}, []);
const handleAutoTaggingToggle = (enabled: boolean) => {
setAutoTaggingEnabled(enabled);
if (isClient) {
localStorage.setItem('auto-tagging-enabled', enabled.toString());
}
toast({
title: enabled ? 'Auto-Tagging Enabled' : 'Auto-Tagging Disabled',
description: enabled
? 'Music will be automatically tagged with metadata from MusicBrainz'
: 'Auto-tagging has been disabled',
});
};
const handleOptionsChange = (key: string, value: unknown) => {
setAutoTagOptions(prev => {
const newOptions = { ...prev, [key]: value };
if (isClient) {
localStorage.setItem('auto-tagging-options', JSON.stringify(newOptions));
}
return newOptions;
});
};
const handleTagSelectionChange = (tag: string, isSelected: boolean) => {
setAutoTagOptions(prev => {
const currentTags = [...prev.tagsToUpdate];
const newTags = isSelected
? [...currentTags, tag]
: currentTags.filter(t => t !== tag);
const newOptions = { ...prev, tagsToUpdate: newTags };
if (isClient) {
localStorage.setItem('auto-tagging-options', JSON.stringify(newOptions));
}
return newOptions;
});
};
const isTagSelected = (tag: string) => {
return autoTagOptions.tagsToUpdate.includes(tag);
};
return (
<>
<Card className="mb-6 break-inside-avoid py-5">
<CardHeader>
<CardTitle className="flex items-center gap-2">
<FaTags className="w-5 h-5" />
Auto-Tagging
</CardTitle>
<CardDescription>
Configure metadata auto-tagging with MusicBrainz
</CardDescription>
</CardHeader>
<CardContent className="space-y-6">
<div className="flex items-center justify-between">
<div>
<p className="font-medium">Enable Auto-Tagging</p>
<p className="text-sm text-muted-foreground">
Automatically fetch and apply metadata from MusicBrainz
</p>
</div>
<Switch
checked={autoTaggingEnabled}
onCheckedChange={handleAutoTaggingToggle}
/>
</div>
{autoTaggingEnabled && (
<>
<div className="space-y-2">
<Label htmlFor="rate-limit">API Rate Limit (ms)</Label>
<Input
id="rate-limit"
type="number"
min={500}
max={5000}
step={100}
value={autoTagOptions.rateLimit}
onChange={(e) => handleOptionsChange('rateLimit', Number(e.target.value))}
/>
<p className="text-xs text-muted-foreground">
Time between API requests in milliseconds (min: 500ms)
</p>
</div>
<div className="flex items-center justify-between">
<div>
<p className="font-medium">Auto Process Results</p>
<p className="text-sm text-muted-foreground">
Automatically apply best matches without confirmation
</p>
</div>
<Switch
checked={autoTagOptions.autoProcess}
onCheckedChange={(checked) => handleOptionsChange('autoProcess', checked)}
/>
</div>
<div className="flex items-center justify-between">
<div>
<p className="font-medium">Prefer Local Metadata</p>
<p className="text-sm text-muted-foreground">
Keep existing metadata when confidence is low
</p>
</div>
<Switch
checked={autoTagOptions.preferLocalMetadata}
onCheckedChange={(checked) => handleOptionsChange('preferLocalMetadata', checked)}
/>
</div>
<div className="space-y-2">
<Label>Tags to Update</Label>
<div className="grid grid-cols-2 gap-2">
{['title', 'artist', 'album', 'year', 'genre', 'albumArtist', 'trackNumber', 'discNumber'].map(tag => (
<div key={tag} className="flex items-center space-x-2">
<Switch
id={`tag-${tag}`}
checked={isTagSelected(tag)}
onCheckedChange={(checked) => handleTagSelectionChange(tag, checked)}
/>
<Label htmlFor={`tag-${tag}`} className="capitalize">
{tag === 'albumArtist' ? 'Album Artist' :
tag === 'trackNumber' ? 'Track Number' :
tag === 'discNumber' ? 'Disc Number' : tag}
</Label>
</div>
))}
</div>
</div>
<div className="pt-2">
<Button onClick={() => {
// Set selected item to represent the whole library
setSelectedItem({
id: 'library',
name: 'Full Library',
mode: 'artist'
});
setAutoTagDialogOpen(true);
}} variant="outline">
<FaTags className="w-4 h-4 mr-2" />
Open Auto-Tagging Tool
</Button>
</div>
</>
)}
<div className="text-sm text-muted-foreground space-y-2">
<p><strong>How it works:</strong></p>
<ul className="list-disc list-inside space-y-1 ml-2">
<li>Metadata is fetched from MusicBrainz when you play tracks</li>
<li>Tags can be applied automatically or manually reviewed</li>
<li>Right-click on tracks or albums to tag them manually</li>
<li>MusicBrainz API has rate limits, so don&apos;t set too fast</li>
</ul>
</div>
</CardContent>
</Card>
<AutoTaggingDialog
isOpen={autoTagDialogOpen}
onClose={() => setAutoTagDialogOpen(false)}
mode={selectedItem.mode}
itemId={selectedItem.id}
itemName={selectedItem.name}
/>
</>
);
};

View File

@@ -1,12 +1,14 @@
'use client';
import React, { useRef, useState, useEffect } from 'react';
import React, { useRef, useState, useEffect, useCallback } from 'react';
import Image from 'next/image';
import { motion, PanInfo, AnimatePresence } from 'framer-motion';
import { useAudioPlayer, Track } from './AudioPlayerContext';
import { FaPlay, FaPause, FaExpand, FaForward, FaBackward } from 'react-icons/fa6';
import { FaPlay, FaPause, FaExpand, FaForward, FaBackward, FaVolumeHigh, FaVolumeXmark } from 'react-icons/fa6';
import { Heart } from 'lucide-react';
import { constrain } from '@/lib/utils';
import { Progress } from '@/components/ui/progress';
import { extractDominantColor } from '@/lib/image-utils';
interface DraggableMiniPlayerProps {
onExpand: () => void;
@@ -24,6 +26,12 @@ export const DraggableMiniPlayer: React.FC<DraggableMiniPlayerProps> = ({ onExpa
const [position, setPosition] = useState({ x: 0, y: 0 });
const [isDragging, setIsDragging] = useState(false);
const [dominantColor, setDominantColor] = useState<string | null>(null);
const [progress, setProgress] = useState(0);
const [showVolumeSlider, setShowVolumeSlider] = useState(false);
const [volume, setVolume] = useState(1);
const [clickCount, setClickCount] = useState(0);
const [clickTimer, setClickTimer] = useState<NodeJS.Timeout | null>(null);
const containerRef = useRef<HTMLDivElement>(null);
const dragStartRef = useRef({ x: 0, y: 0 });
@@ -34,6 +42,105 @@ export const DraggableMiniPlayer: React.FC<DraggableMiniPlayerProps> = ({ onExpa
}
}, [position, isDragging]);
// Extract dominant color from album art
useEffect(() => {
if (!currentTrack?.coverArt) {
setDominantColor(null);
return;
}
extractDominantColor(currentTrack.coverArt)
.then(color => setDominantColor(color))
.catch(error => {
console.error('Failed to extract color:', error);
setDominantColor(null);
});
}, [currentTrack?.coverArt]);
// Track progress from main audio player
useEffect(() => {
const updateProgress = () => {
const audioElement = document.querySelector('audio') as HTMLAudioElement | null;
if (audioElement && audioElement.duration) {
setProgress((audioElement.currentTime / audioElement.duration) * 100);
}
};
const updateVolume = () => {
const audioElement = document.querySelector('audio') as HTMLAudioElement | null;
if (audioElement) {
setVolume(audioElement.volume);
}
};
const interval = setInterval(updateProgress, 250);
updateVolume(); // Initial volume
// Set up event listener for volume changes
const audioElement = document.querySelector('audio');
if (audioElement) {
audioElement.addEventListener('volumechange', updateVolume);
}
return () => {
clearInterval(interval);
if (audioElement) {
audioElement.removeEventListener('volumechange', updateVolume);
}
};
}, [currentTrack]);
// Detect double clicks for expanding
const handleContainerClick = useCallback(() => {
setClickCount(prev => prev + 1);
if (clickTimer) {
clearTimeout(clickTimer);
}
const timer = setTimeout(() => {
// If single click, do nothing
if (clickCount === 0) {
// Nothing
}
// If double click, expand
else if (clickCount === 1) {
onExpand();
}
setClickCount(0);
}, 300);
setClickTimer(timer as unknown as NodeJS.Timeout);
}, [clickCount, clickTimer, onExpand]);
// Handle seeking in track
const handleProgressClick = (e: React.MouseEvent<HTMLDivElement>) => {
e.stopPropagation();
const audioElement = document.querySelector('audio') as HTMLAudioElement | null;
if (!audioElement) return;
const rect = e.currentTarget.getBoundingClientRect();
const clickX = e.clientX - rect.left;
const percent = clickX / rect.width;
audioElement.currentTime = percent * audioElement.duration;
};
// Handle volume change
const handleVolumeChange = (e: React.ChangeEvent<HTMLInputElement>) => {
const audioElement = document.querySelector('audio') as HTMLAudioElement | null;
if (!audioElement) return;
const newVolume = parseFloat(e.target.value);
audioElement.volume = newVolume;
setVolume(newVolume);
try {
localStorage.setItem('navidrome-volume', newVolume.toString());
} catch (error) {
console.error('Failed to save volume:', error);
}
};
// Keyboard controls for the mini player
useEffect(() => {
const handleKeyDown = (e: KeyboardEvent) => {
@@ -106,7 +213,7 @@ export const DraggableMiniPlayer: React.FC<DraggableMiniPlayerProps> = ({ onExpa
}
}, []);
// Ensure player stays within viewport bounds
// Ensure player stays within viewport bounds and implement edge snapping
useEffect(() => {
const constrainToViewport = () => {
if (!containerRef.current || isDragging) return;
@@ -118,17 +225,41 @@ export const DraggableMiniPlayer: React.FC<DraggableMiniPlayerProps> = ({ onExpa
// Add some padding from edges
const padding = 16;
const newX = constrain(
// Calculate constrained position
let newX = constrain(
position.x,
-(viewportWidth - rect.width) / 2 + padding,
(viewportWidth - rect.width) / 2 - padding
);
const newY = constrain(
let newY = constrain(
position.y,
-(viewportHeight - rect.height) / 2 + padding,
(viewportHeight - rect.height) / 2 - padding
);
// Edge snapping logic
const snapThreshold = 24; // Pixels from edge to trigger snap
const snapPositions = {
left: -(viewportWidth - rect.width) / 2 + padding,
right: (viewportWidth - rect.width) / 2 - padding,
top: -(viewportHeight - rect.height) / 2 + padding,
bottom: (viewportHeight - rect.height) / 2 - padding,
};
// Snap to left or right edge
if (Math.abs(newX - snapPositions.left) < snapThreshold) {
newX = snapPositions.left;
} else if (Math.abs(newX - snapPositions.right) < snapThreshold) {
newX = snapPositions.right;
}
// Snap to top or bottom edge
if (Math.abs(newY - snapPositions.top) < snapThreshold) {
newY = snapPositions.top;
} else if (Math.abs(newY - snapPositions.bottom) < snapThreshold) {
newY = snapPositions.bottom;
}
if (newX !== position.x || newY !== position.y) {
setPosition({ x: newX, y: newY });
@@ -181,18 +312,54 @@ export const DraggableMiniPlayer: React.FC<DraggableMiniPlayerProps> = ({ onExpa
transform: `translate(-50%, -50%)`
}}
className="cursor-grab active:cursor-grabbing"
onClick={handleContainerClick}
>
<div className="bg-background/95 backdrop-blur-sm border rounded-lg shadow-xl hover:shadow-2xl transition-shadow p-3 w-[280px]">
<div
className="backdrop-blur-sm border rounded-lg shadow-xl hover:shadow-2xl transition-shadow p-3 w-[280px]"
style={{
backgroundColor: dominantColor
? `${dominantColor.replace('rgb', 'rgba').replace(')', ', 0.15)')}`
: 'var(--background-color, rgba(0, 0, 0, 0.8))',
borderColor: dominantColor
? `${dominantColor.replace('rgb', 'rgba').replace(')', ', 0.3)')}`
: 'var(--border-color, rgba(255, 255, 255, 0.1))'
}}
>
{/* Progress bar at the top */}
<div className="mb-3" onClick={handleProgressClick}>
<Progress
value={progress}
className="h-1 cursor-pointer"
style={{
backgroundColor: dominantColor
? `${dominantColor.replace('rgb', 'rgba').replace(')', ', 0.2)')}`
: undefined,
'--progress-color': dominantColor || undefined
} as React.CSSProperties}
/>
</div>
<div className="flex items-center gap-3">
{/* Album Art */}
<div className="relative w-12 h-12 shrink-0">
<Image
src={currentTrack.coverArt || '/default-user.jpg'}
alt={currentTrack.name}
fill
className="rounded object-cover"
/>
</div>
{/* Album Art - Animated transition */}
<AnimatePresence mode="wait">
<motion.div
key={currentTrack.id}
className="relative w-12 h-12 shrink-0"
initial={{ opacity: 0, scale: 0.9 }}
animate={{ opacity: 1, scale: 1 }}
exit={{ opacity: 0, scale: 0.9 }}
transition={{ duration: 0.2 }}
>
<Image
src={currentTrack.coverArt || '/default-user.jpg'}
alt={currentTrack.name}
fill
className="rounded-md object-cover shadow-md"
sizes="48px"
priority
/>
</motion.div>
</AnimatePresence>
{/* Track Info */}
<div className="flex-1 min-w-0">
@@ -201,13 +368,13 @@ export const DraggableMiniPlayer: React.FC<DraggableMiniPlayerProps> = ({ onExpa
</div>
</div>
{/* Controls */}
{/* Keyboard shortcut hint */}
<div className="text-xs text-muted-foreground text-center mt-2 px-2">
Arrow keys to move Hold Shift for larger steps Esc to expand
</div>
<div className="text-xs text-muted-foreground text-center mt-2 px-2">
Double-click to expand Arrow keys to move
</div>
<div className="flex items-center justify-between mt-2 px-2">
{/* Controls */}
<div className="flex items-center justify-between mt-2 px-2">
<button
onClick={(e) => {
e.stopPropagation();
@@ -257,16 +424,53 @@ export const DraggableMiniPlayer: React.FC<DraggableMiniPlayerProps> = ({ onExpa
</button>
</div>
<button
onClick={(e) => {
e.stopPropagation();
onExpand();
}}
className="p-2 hover:bg-muted/50 rounded-full transition-colors"
>
<FaExpand className="w-4 h-4" />
</button>
<div className="relative">
<button
onClick={(e) => {
e.stopPropagation();
setShowVolumeSlider(prev => !prev);
}}
className="p-2 hover:bg-muted/50 rounded-full transition-colors"
title="Volume"
>
{volume === 0 ? (
<FaVolumeXmark className="w-4 h-4" />
) : (
<FaVolumeHigh className="w-4 h-4" />
)}
</button>
{/* Volume Slider */}
{showVolumeSlider && (
<div
className="absolute bottom-full left-1/2 -translate-x-1/2 mb-2 p-2 bg-background/95 backdrop-blur-sm border rounded-lg shadow-lg"
onClick={e => e.stopPropagation()}
>
<input
type="range"
min="0"
max="1"
step="0.01"
value={volume}
onChange={handleVolumeChange}
className="w-24 accent-foreground"
/>
</div>
)}
</div>
</div>
{/* Expand button in top-right corner */}
<button
onClick={(e) => {
e.stopPropagation();
onExpand();
}}
className="absolute top-2 right-2 p-1.5 hover:bg-muted/50 rounded-full transition-colors"
title="Expand"
>
<FaExpand className="w-3 h-3" />
</button>
</div>
</motion.div>
</AnimatePresence>

View File

@@ -16,8 +16,9 @@ import { SidebarCustomization } from '@/app/components/SidebarCustomization';
import { SettingsManagement } from '@/app/components/SettingsManagement';
import { CacheManagement } from '@/app/components/CacheManagement';
import { OfflineManagement } from '@/app/components/OfflineManagement';
import { FaServer, FaUser, FaLock, FaCheck, FaTimes, FaLastfm, FaCog } from 'react-icons/fa';
import { Settings, ExternalLink } from 'lucide-react';
import { AutoTaggingSettings } from '@/app/components/AutoTaggingSettings';
import { FaServer, FaUser, FaLock, FaCheck, FaTimes, FaLastfm, FaCog, FaTags } from 'react-icons/fa';
import { Settings, ExternalLink, Tag } from 'lucide-react';
import { Switch } from '@/components/ui/switch';
const SettingsPage = () => {
@@ -788,6 +789,11 @@ const SettingsPage = () => {
<OfflineManagement />
</div>
{/* Auto-Tagging Settings */}
<div className="break-inside-avoid mb-6">
<AutoTaggingSettings />
</div>
<Card className="mb-6 break-inside-avoid py-5">
<CardHeader>
<CardTitle>Appearance</CardTitle>

603
hooks/use-auto-tagging.ts Normal file
View File

@@ -0,0 +1,603 @@
import { useState, useCallback } from 'react';
import MusicBrainzClient, {
MusicBrainzRelease,
MusicBrainzReleaseDetails,
MusicBrainzRecording,
MusicBrainzRecordingDetails
} from '@/lib/musicbrainz-api';
import { getNavidromeAPI } from '@/lib/navidrome';
import { useToast } from '@/hooks/use-toast';
import { Album, Song, Artist } from '@/lib/navidrome';
// Define interfaces for the enhanced metadata
// Define interfaces for the enhanced metadata
export interface EnhancedTrackMetadata {
id: string; // Navidrome track ID
title: string; // Track title
artist: string; // Artist name
album: string; // Album name
mbTrackId?: string; // MusicBrainz recording ID
mbReleaseId?: string; // MusicBrainz release ID
mbArtistId?: string; // MusicBrainz artist ID
year?: string; // Release year
genres?: string[]; // Genres
tags?: string[]; // Tags
trackNumber?: number; // Track number
discNumber?: number; // Disc number
duration?: number; // Duration in seconds
artistCountry?: string; // Artist country
artistType?: string; // Artist type (group, person, etc.)
releaseType?: string; // Release type (album, EP, single, etc.)
status: 'pending' | 'matched' | 'failed' | 'applied'; // Status of the track metadata
confidence: number; // Match confidence (0-100)
}
export interface EnhancedAlbumMetadata {
id: string; // Navidrome album ID
name: string; // Album name
artist: string; // Album artist name
mbReleaseId?: string; // MusicBrainz release ID
mbArtistId?: string; // MusicBrainz artist ID
year?: string; // Release year
genres?: string[]; // Genres
tags?: string[]; // Tags
country?: string; // Release country
releaseType?: string; // Release type (album, EP, single, etc.)
barcode?: string; // Barcode
label?: string; // Record label
status: 'pending' | 'matched' | 'failed' | 'applied'; // Status
confidence: number; // Match confidence (0-100)
tracks: EnhancedTrackMetadata[]; // Tracks in the album
coverArtUrl?: string; // Cover art URL from MusicBrainz
}
// Type for the Auto-Tagging operation mode
export type AutoTaggingMode = 'track' | 'album' | 'artist';
export function useAutoTagging() {
const [isProcessing, setIsProcessing] = useState(false);
const [progress, setProgress] = useState(0);
const [enhancedTracks, setEnhancedTracks] = useState<EnhancedTrackMetadata[]>([]);
const [enhancedAlbums, setEnhancedAlbums] = useState<EnhancedAlbumMetadata[]>([]);
const { toast } = useToast();
const api = getNavidromeAPI();
/**
* Find enhanced metadata for a single track from MusicBrainz
*/
const enhanceTrack = useCallback(async (track: Song): Promise<EnhancedTrackMetadata> => {
try {
// Start with basic metadata
const enhancedTrack: EnhancedTrackMetadata = {
id: track.id,
title: track.title,
artist: track.artist,
album: track.album,
status: 'pending',
confidence: 0
};
// Try to find the track in MusicBrainz
const recording = await MusicBrainzClient.findBestMatchingRecording(
track.title,
track.artist,
track.duration * 1000 // Convert to milliseconds
);
if (!recording) {
enhancedTrack.status = 'failed';
return enhancedTrack;
}
// Get detailed recording information
const recordingDetails = await MusicBrainzClient.getRecording(recording.id);
if (!recordingDetails) {
enhancedTrack.status = 'failed';
return enhancedTrack;
}
// Calculate match confidence
const titleSimilarity = calculateStringSimilarity(
MusicBrainzClient.normalizeString(track.title),
MusicBrainzClient.normalizeString(recording.title)
);
const artistSimilarity = calculateStringSimilarity(
MusicBrainzClient.normalizeString(track.artist),
MusicBrainzClient.normalizeString(recording['artist-credit'][0]?.artist.name || '')
);
// Calculate confidence score (0-100)
enhancedTrack.confidence = Math.round((titleSimilarity * 0.6 + artistSimilarity * 0.4) * 100);
// Update track with MusicBrainz metadata
enhancedTrack.mbTrackId = recording.id;
enhancedTrack.mbArtistId = recording['artist-credit'][0]?.artist.id;
// Extract additional metadata from recordingDetails
if (recordingDetails.releases && recordingDetails.releases.length > 0) {
enhancedTrack.mbReleaseId = recordingDetails.releases[0].id;
}
if (recordingDetails['first-release-date']) {
enhancedTrack.year = recordingDetails['first-release-date'].split('-')[0];
}
if (recordingDetails.genres) {
enhancedTrack.genres = recordingDetails.genres.map(genre => genre.name);
}
if (recordingDetails.tags) {
enhancedTrack.tags = recordingDetails.tags.map(tag => tag.name);
}
enhancedTrack.status = 'matched';
return enhancedTrack;
} catch (error) {
console.error('Failed to enhance track:', error);
return {
id: track.id,
title: track.title,
artist: track.artist,
album: track.album,
status: 'failed',
confidence: 0
};
}
}, []);
/**
* Find enhanced metadata for an album and its tracks from MusicBrainz
*/
const enhanceAlbum = useCallback(async (album: Album, tracks: Song[]): Promise<EnhancedAlbumMetadata> => {
try {
// Start with basic metadata
const enhancedAlbum: EnhancedAlbumMetadata = {
id: album.id,
name: album.name,
artist: album.artist,
status: 'pending',
confidence: 0,
tracks: []
};
// Try to find the album in MusicBrainz
const release = await MusicBrainzClient.findBestMatchingRelease(
album.name,
album.artist,
tracks.length
);
if (!release) {
enhancedAlbum.status = 'failed';
return enhancedAlbum;
}
// Get detailed release information
const releaseDetails = await MusicBrainzClient.getRelease(release.id);
if (!releaseDetails) {
enhancedAlbum.status = 'failed';
return enhancedAlbum;
}
// Calculate match confidence
const albumSimilarity = calculateStringSimilarity(
MusicBrainzClient.normalizeString(album.name),
MusicBrainzClient.normalizeString(release.title)
);
const artistSimilarity = calculateStringSimilarity(
MusicBrainzClient.normalizeString(album.artist),
MusicBrainzClient.normalizeString(release['artist-credit'][0]?.artist.name || '')
);
// Calculate confidence score (0-100)
enhancedAlbum.confidence = Math.round((albumSimilarity * 0.6 + artistSimilarity * 0.4) * 100);
// Update album with MusicBrainz metadata
enhancedAlbum.mbReleaseId = release.id;
enhancedAlbum.mbArtistId = release['artist-credit'][0]?.artist.id;
if (release.date) {
enhancedAlbum.year = release.date.split('-')[0];
}
if (release.country) {
enhancedAlbum.country = release.country;
}
// We need to access release-group via a type assertion since it's not defined in MusicBrainzRelease interface
// But it exists in the MusicBrainzReleaseDetails which we're working with
const releaseWithGroup = release as unknown as { 'release-group'?: { id: string; 'primary-type'?: string } };
if (releaseWithGroup['release-group'] && releaseWithGroup['release-group']['primary-type']) {
enhancedAlbum.releaseType = releaseWithGroup['release-group']['primary-type'];
}
if (releaseDetails.barcode) {
enhancedAlbum.barcode = releaseDetails.barcode;
}
// Get cover art URL
if (releaseDetails['cover-art-archive'] && releaseDetails['cover-art-archive'].front) {
enhancedAlbum.coverArtUrl = MusicBrainzClient.getCoverArtUrl(release.id);
}
// Match tracks with MusicBrainz tracks
const enhancedTracks: EnhancedTrackMetadata[] = [];
// First, organize MB tracks by disc and track number
// Define a type for the MusicBrainz track
interface MusicBrainzTrack {
position: number;
number: string;
title: string;
length?: number;
recording: {
id: string;
title: string;
length?: number;
};
}
const mbTracks: Record<number, Record<number, MusicBrainzTrack>> = {};
if (releaseDetails.media) {
for (const medium of releaseDetails.media) {
const discNumber = medium.position;
mbTracks[discNumber] = {};
for (const track of medium.tracks) {
mbTracks[discNumber][track.position] = track;
}
}
}
// Try to match each track
for (const track of tracks) {
// Basic track info
const enhancedTrack: EnhancedTrackMetadata = {
id: track.id,
title: track.title,
artist: track.artist,
album: track.album,
status: 'pending',
confidence: 0
};
// Try to find the track by position if available
if (track.discNumber && track.track && mbTracks[track.discNumber] && mbTracks[track.discNumber][track.track]) {
const mbTrack = mbTracks[track.discNumber][track.track];
enhancedTrack.mbTrackId = mbTrack.recording.id;
enhancedTrack.mbReleaseId = release.id;
enhancedTrack.trackNumber = track.track;
enhancedTrack.discNumber = track.discNumber;
// Calculate title similarity
const titleSimilarity = calculateStringSimilarity(
MusicBrainzClient.normalizeString(track.title),
MusicBrainzClient.normalizeString(mbTrack.title)
);
enhancedTrack.confidence = Math.round(titleSimilarity * 100);
enhancedTrack.status = 'matched';
}
// If we can't match by position, try to match by title
else {
// Find in any medium and any position
let bestMatch: MusicBrainzTrack | null = null;
let bestSimilarity = 0;
for (const discNumber of Object.keys(mbTracks)) {
for (const trackNumber of Object.keys(mbTracks[Number(discNumber)])) {
const mbTrack = mbTracks[Number(discNumber)][Number(trackNumber)];
const similarity = calculateStringSimilarity(
MusicBrainzClient.normalizeString(track.title),
MusicBrainzClient.normalizeString(mbTrack.title)
);
if (similarity > bestSimilarity && similarity > 0.6) { // 60% similarity threshold
bestMatch = mbTrack;
bestSimilarity = similarity;
}
}
}
if (bestMatch) {
enhancedTrack.mbTrackId = bestMatch.recording.id;
enhancedTrack.mbReleaseId = release.id;
enhancedTrack.confidence = Math.round(bestSimilarity * 100);
enhancedTrack.status = 'matched';
} else {
enhancedTrack.status = 'failed';
}
}
enhancedTracks.push(enhancedTrack);
}
// Update album with tracks
enhancedAlbum.tracks = enhancedTracks;
enhancedAlbum.status = 'matched';
return enhancedAlbum;
} catch (error) {
console.error('Failed to enhance album:', error);
return {
id: album.id,
name: album.name,
artist: album.artist,
status: 'failed',
confidence: 0,
tracks: []
};
}
}, []);
/**
* Start the auto-tagging process for a track, album, or artist
*/
const startAutoTagging = useCallback(async (
mode: AutoTaggingMode,
itemId: string,
confidenceThreshold: number = 70
) => {
if (!api) {
toast({
title: "Error",
description: "Navidrome API is not configured",
variant: "destructive",
});
return;
}
setIsProcessing(true);
setProgress(0);
setEnhancedTracks([]);
setEnhancedAlbums([]);
try {
// Process different modes
if (mode === 'track') {
// In the absence of a direct method to get a song by ID,
// we'll find it by searching for it in its album
const searchResults = await api.search(itemId, 0, 0, 10);
const track = searchResults.songs.find(song => song.id === itemId);
if (!track) {
throw new Error('Track not found');
}
setProgress(10);
// Enhance track metadata
const enhancedTrack = await enhanceTrack(track);
setEnhancedTracks([enhancedTrack]);
setProgress(100);
toast({
title: "Track Analysis Complete",
description: enhancedTrack.status === 'matched'
? `Found metadata for "${track.title}" with ${enhancedTrack.confidence}% confidence`
: `Couldn't find metadata for "${track.title}"`,
});
}
else if (mode === 'album') {
// Get album and its tracks from Navidrome
const { album, songs } = await api.getAlbum(itemId);
if (!album) {
throw new Error('Album not found');
}
setProgress(10);
// Enhance album metadata
const enhancedAlbum = await enhanceAlbum(album, songs);
setEnhancedAlbums([enhancedAlbum]);
setProgress(100);
toast({
title: "Album Analysis Complete",
description: enhancedAlbum.status === 'matched'
? `Found metadata for "${album.name}" with ${enhancedAlbum.confidence}% confidence`
: `Couldn't find metadata for "${album.name}"`,
});
}
else if (mode === 'artist') {
// Get artist and their albums from Navidrome
try {
const { artist, albums } = await api.getArtist(itemId);
if (!artist) {
throw new Error('Artist not found');
}
setProgress(5);
const enhancedAlbumsData: EnhancedAlbumMetadata[] = [];
let processedAlbums = 0;
// Process each album
for (const album of albums) {
try {
const { songs } = await api.getAlbum(album.id);
const enhancedAlbum = await enhanceAlbum(album, songs);
enhancedAlbumsData.push(enhancedAlbum);
} catch (albumError) {
console.error('Error processing album:', albumError);
// Continue with the next album
}
processedAlbums++;
setProgress(5 + Math.round((processedAlbums / albums.length) * 95));
}
setEnhancedAlbums(enhancedAlbumsData);
setProgress(100);
const matchedAlbums = enhancedAlbumsData.filter(album =>
album.status === 'matched' && album.confidence >= confidenceThreshold
).length;
toast({
title: "Artist Analysis Complete",
description: `Found metadata for ${matchedAlbums} of ${albums.length} albums by "${artist.name}"`,
});
} catch (artistError) {
console.error('Error fetching artist:', artistError);
toast({
title: "Artist Not Found",
description: "Could not find the artist in your library",
variant: "destructive",
});
setProgress(100);
}
}
} catch (error) {
console.error('Auto-tagging error:', error);
toast({
title: "Auto-Tagging Failed",
description: error instanceof Error ? error.message : "An unknown error occurred",
variant: "destructive",
});
} finally {
setIsProcessing(false);
}
}, [api, enhanceTrack, enhanceAlbum, toast]);
/**
* Apply enhanced metadata to tracks in Navidrome
*/
const applyEnhancedMetadata = useCallback(async (
tracks: EnhancedTrackMetadata[],
albums?: EnhancedAlbumMetadata[]
) => {
if (!api) {
toast({
title: "Error",
description: "Navidrome API is not configured",
variant: "destructive",
});
return;
}
setIsProcessing(true);
setProgress(0);
try {
let processedItems = 0;
const totalItems = tracks.length + (albums?.length || 0);
// Apply album metadata first
if (albums && albums.length > 0) {
for (const album of albums) {
if (album.status === 'matched') {
// To be implemented: Update album metadata via Navidrome API
// This requires a custom Navidrome endpoint or plugin
console.log('Would update album:', album);
}
processedItems++;
setProgress(Math.round((processedItems / totalItems) * 100));
}
}
// Apply track metadata
for (const track of tracks) {
if (track.status === 'matched') {
// To be implemented: Update track metadata via Navidrome API
// This requires a custom Navidrome endpoint or plugin
console.log('Would update track:', track);
// Alternatively, suggest implementing this feature using a separate
// script that interacts with music files directly
}
processedItems++;
setProgress(Math.round((processedItems / totalItems) * 100));
}
toast({
title: "Metadata Applied",
description: `Updated metadata for ${tracks.filter(t => t.status === 'matched').length} tracks`,
});
} catch (error) {
console.error('Failed to apply metadata:', error);
toast({
title: "Metadata Update Failed",
description: error instanceof Error ? error.message : "An unknown error occurred",
variant: "destructive",
});
} finally {
setIsProcessing(false);
}
}, [api, toast]);
return {
isProcessing,
progress,
enhancedTracks,
enhancedAlbums,
startAutoTagging,
applyEnhancedMetadata
};
}
/**
* Calculate similarity between two strings (0-1)
* Uses Levenshtein distance
*/
function calculateStringSimilarity(str1: string, str2: string): number {
// If either string is empty, return 0
if (!str1.length || !str2.length) {
return 0;
}
// If strings are identical, return 1
if (str1 === str2) {
return 1;
}
// Calculate Levenshtein distance
const distance = levenshteinDistance(str1, str2);
// Calculate similarity score
const maxLength = Math.max(str1.length, str2.length);
const similarity = 1 - distance / maxLength;
return similarity;
}
/**
* Calculate Levenshtein distance between two strings
*/
function levenshteinDistance(str1: string, str2: string): number {
const matrix: number[][] = [];
// Initialize matrix with row and column indices
for (let i = 0; i <= str1.length; i++) {
matrix[i] = [i];
}
for (let j = 0; j <= str2.length; j++) {
matrix[0][j] = j;
}
// Fill in the matrix
for (let i = 1; i <= str1.length; i++) {
for (let j = 1; j <= str2.length; j++) {
const cost = str1[i - 1] === str2[j - 1] ? 0 : 1;
matrix[i][j] = Math.min(
matrix[i - 1][j] + 1, // Deletion
matrix[i][j - 1] + 1, // Insertion
matrix[i - 1][j - 1] + cost // Substitution
);
}
}
return matrix[str1.length][str2.length];
}

View File

@@ -123,3 +123,85 @@ export function useOptimalImageSize(
const divisions = [60, 120, 240, 400, 600, 1200];
return divisions.find(size => size >= optimalSize) || 1200;
}
/**
* Extract dominant color from an image
* @param imageUrl - URL of the image to analyze
* @returns Promise that resolves to CSS color string (rgb format)
*/
export async function extractDominantColor(imageUrl: string): Promise<string> {
return new Promise((resolve, reject) => {
try {
const img = document.createElement('img');
img.crossOrigin = 'anonymous';
img.onload = () => {
try {
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d');
if (!ctx) {
resolve('rgb(25, 25, 25)'); // Fallback dark color
return;
}
canvas.width = img.width;
canvas.height = img.height;
ctx.drawImage(img, 0, 0);
// Simple dominant color extraction
const imageData = ctx.getImageData(0, 0, canvas.width, canvas.height);
const data = imageData.data;
let r = 0, g = 0, b = 0;
// Sample points across the image (for performance, not using all pixels)
const sampleSize = Math.max(1, Math.floor(data.length / 4000));
let sampleCount = 0;
for (let i = 0; i < data.length; i += 4 * sampleSize) {
r += data[i];
g += data[i + 1];
b += data[i + 2];
sampleCount++;
}
r = Math.floor(r / sampleCount);
g = Math.floor(g / sampleCount);
b = Math.floor(b / sampleCount);
// Adjust brightness to ensure readability
const brightness = (r * 299 + g * 587 + b * 114) / 1000;
// For very light colors, darken them
if (brightness > 200) {
const darkFactor = 0.7;
r = Math.floor(r * darkFactor);
g = Math.floor(g * darkFactor);
b = Math.floor(b * darkFactor);
}
// For very dark colors, lighten them slightly
if (brightness < 50) {
const lightFactor = 1.3;
r = Math.min(255, Math.floor(r * lightFactor));
g = Math.min(255, Math.floor(g * lightFactor));
b = Math.min(255, Math.floor(b * lightFactor));
}
resolve(`rgb(${r}, ${g}, ${b})`);
} catch (error) {
console.error('Error extracting color:', error);
resolve('rgb(25, 25, 25)'); // Fallback dark color
}
};
img.onerror = () => {
resolve('rgb(25, 25, 25)'); // Fallback dark color
};
img.src = imageUrl;
} catch (error) {
console.error('Error loading image for color extraction:', error);
resolve('rgb(25, 25, 25)'); // Fallback dark color
}
});
}

347
lib/musicbrainz-api.ts Normal file
View File

@@ -0,0 +1,347 @@
/**
* MusicBrainz API client for the auto-tagging feature
*
* This module provides functions to search and fetch metadata from MusicBrainz,
* which is an open music encyclopedia that collects music metadata.
*/
// Define the User-Agent string as per MusicBrainz API guidelines
// https://musicbrainz.org/doc/MusicBrainz_API/Rate_Limiting#User-Agent
const USER_AGENT = 'mice/1.0.0 (https://github.com/sillyangel/mice)';
// Base URL for MusicBrainz API
const API_BASE_URL = 'https://musicbrainz.org/ws/2';
// Add a delay between requests to comply with MusicBrainz rate limiting
const RATE_LIMIT_DELAY = 1100; // Slightly more than 1 second to be safe
// Queue for API requests to ensure proper rate limiting
const requestQueue: (() => Promise<unknown>)[] = [];
let isProcessingQueue = false;
/**
* Process the request queue with proper rate limiting
*/
async function processQueue() {
if (isProcessingQueue || requestQueue.length === 0) return;
isProcessingQueue = true;
while (requestQueue.length > 0) {
const request = requestQueue.shift();
if (request) {
try {
await request();
} catch (error) {
console.error('MusicBrainz API request failed:', error);
}
// Wait before processing the next request
await new Promise(resolve => setTimeout(resolve, RATE_LIMIT_DELAY));
}
}
isProcessingQueue = false;
}
/**
* Make a rate-limited request to the MusicBrainz API
*/
async function makeRequest<T>(endpoint: string, params: Record<string, string> = {}): Promise<T> {
return new Promise<T>((resolve, reject) => {
const requestFn = async () => {
try {
const url = new URL(`${API_BASE_URL}${endpoint}`);
// Add format parameter
url.searchParams.append('fmt', 'json');
// Add other parameters
Object.entries(params).forEach(([key, value]) => {
url.searchParams.append(key, value);
});
const response = await fetch(url.toString(), {
headers: {
'User-Agent': USER_AGENT
}
});
if (!response.ok) {
throw new Error(`MusicBrainz API error: ${response.status} ${response.statusText}`);
}
const data = await response.json();
resolve(data as T);
} catch (error) {
reject(error);
}
};
// Add request to queue
requestQueue.push(requestFn);
processQueue();
});
}
/**
* Search for releases (albums) in MusicBrainz
*/
export async function searchReleases(query: string, limit: number = 10): Promise<MusicBrainzRelease[]> {
try {
interface ReleaseSearchResult {
releases: MusicBrainzRelease[];
}
const data = await makeRequest<ReleaseSearchResult>('/release', {
query,
limit: limit.toString()
});
return data.releases || [];
} catch (error) {
console.error('Failed to search releases:', error);
return [];
}
}
/**
* Search for recordings (tracks) in MusicBrainz
*/
export async function searchRecordings(query: string, limit: number = 10): Promise<MusicBrainzRecording[]> {
try {
interface RecordingSearchResult {
recordings: MusicBrainzRecording[];
}
const data = await makeRequest<RecordingSearchResult>('/recording', {
query,
limit: limit.toString()
});
return data.recordings || [];
} catch (error) {
console.error('Failed to search recordings:', error);
return [];
}
}
/**
* Get detailed information about a release by its MBID
*/
export async function getRelease(mbid: string): Promise<MusicBrainzReleaseDetails | null> {
try {
// Request with recording-level relationships to get track-level data
const data = await makeRequest<MusicBrainzReleaseDetails>(`/release/${mbid}`, {
inc: 'recordings+artists+labels+artist-credits'
});
return data;
} catch (error) {
console.error(`Failed to get release ${mbid}:`, error);
return null;
}
}
/**
* Get detailed information about a recording by its MBID
*/
export async function getRecording(mbid: string): Promise<MusicBrainzRecordingDetails | null> {
try {
const data = await makeRequest<MusicBrainzRecordingDetails>(`/recording/${mbid}`, {
inc: 'artists+releases+artist-credits'
});
return data;
} catch (error) {
console.error(`Failed to get recording ${mbid}:`, error);
return null;
}
}
/**
* Find the best matching release for the given album information
* This uses fuzzy matching to find the most likely match
*/
export async function findBestMatchingRelease(
albumName: string,
artistName: string,
trackCount?: number
): Promise<MusicBrainzRelease | null> {
try {
// Build a search query with both album and artist
const query = `release:"${albumName}" AND artist:"${artistName}"`;
const releases = await searchReleases(query, 5);
if (!releases || releases.length === 0) {
return null;
}
// If track count is provided, prioritize releases with the same track count
if (trackCount !== undefined) {
const exactTrackCountMatch = releases.find(release =>
release['track-count'] === trackCount
);
if (exactTrackCountMatch) {
return exactTrackCountMatch;
}
}
// Just return the first result as it's likely the best match
return releases[0];
} catch (error) {
console.error('Failed to find matching release:', error);
return null;
}
}
/**
* Find the best matching recording for the given track information
*/
export async function findBestMatchingRecording(
trackName: string,
artistName: string,
duration?: number // in milliseconds
): Promise<MusicBrainzRecording | null> {
try {
// Build a search query with both track and artist
const query = `recording:"${trackName}" AND artist:"${artistName}"`;
const recordings = await searchRecordings(query, 5);
if (!recordings || recordings.length === 0) {
return null;
}
// If duration is provided, try to find a close match
if (duration !== undefined) {
// Convert to milliseconds if not already (MusicBrainz uses milliseconds)
const durationMs = duration < 1000 ? duration * 1000 : duration;
// Find recording with the closest duration (within 5 seconds)
const durationMatches = recordings.filter(recording => {
if (!recording.length) return false;
return Math.abs(recording.length - durationMs) < 5000; // 5 second tolerance
});
if (durationMatches.length > 0) {
return durationMatches[0];
}
}
// Just return the first result as it's likely the best match
return recordings[0];
} catch (error) {
console.error('Failed to find matching recording:', error);
return null;
}
}
// Type definitions for MusicBrainz API responses
export interface MusicBrainzRelease {
id: string; // MBID
title: string;
'artist-credit': Array<{
artist: {
id: string;
name: string;
};
name: string;
}>;
date?: string;
country?: string;
'track-count': number;
status?: string;
disambiguation?: string;
}
export interface MusicBrainzReleaseDetails extends MusicBrainzRelease {
media: Array<{
position: number;
format?: string;
tracks: Array<{
position: number;
number: string;
title: string;
length?: number;
recording: {
id: string;
title: string;
length?: number;
};
}>;
}>;
'cover-art-archive'?: {
artwork: boolean;
count: number;
front: boolean;
back: boolean;
};
barcode?: string;
'release-group'?: {
id: string;
'primary-type'?: string;
};
}
export interface MusicBrainzRecording {
id: string; // MBID
title: string;
length?: number; // in milliseconds
'artist-credit': Array<{
artist: {
id: string;
name: string;
};
name: string;
}>;
releases?: Array<{
id: string;
title: string;
}>;
isrcs?: string[];
}
export interface MusicBrainzRecordingDetails extends MusicBrainzRecording {
disambiguation?: string;
'first-release-date'?: string;
genres?: Array<{
id: string;
name: string;
}>;
tags?: Array<{
count: number;
name: string;
}>;
}
// Cover art functions
// MusicBrainz has a separate API for cover art: Cover Art Archive
export function getCoverArtUrl(releaseId: string, size: 'small' | 'large' | '500' | 'full' = 'large'): string {
return `https://coverartarchive.org/release/${releaseId}/front-${size}`;
}
// Utility function to normalize strings for comparison
export function normalizeString(input: string): string {
return input
.toLowerCase()
.replace(/[^\w\s]/g, '') // Remove special characters
.replace(/\s+/g, ' ') // Replace multiple spaces with a single space
.trim();
}
// Export the MusicBrainz client as a singleton
const MusicBrainzClient = {
searchReleases,
searchRecordings,
getRelease,
getRecording,
findBestMatchingRelease,
findBestMatchingRecording,
getCoverArtUrl,
normalizeString
};
export default MusicBrainzClient;

View File

@@ -215,12 +215,21 @@ class NavidromeAPI {
}
async getArtist(artistId: string): Promise<{ artist: Artist; albums: Album[] }> {
const response = await this.makeRequest('getArtist', { id: artistId });
const artistData = response.artist as Artist & { album?: Album[] };
return {
artist: artistData,
albums: artistData.album || []
};
try {
const response = await this.makeRequest('getArtist', { id: artistId });
// Check if artist data exists
if (!response.artist) {
throw new Error('Artist not found in response');
}
const artistData = response.artist as Artist & { album?: Album[] };
return {
artist: artistData,
albums: artistData.album || []
};
} catch (error) {
console.error('Navidrome API request failed:', error);
throw new Error('Artist not found');
}
}
async getAlbums(type?: 'newest' | 'recent' | 'frequent' | 'random' | 'alphabeticalByName' | 'alphabeticalByArtist' | 'starred' | 'highest', size: number = 500, offset: number = 0): Promise<Album[]> {

View File

@@ -12,6 +12,8 @@ const nextConfig = {
hostname: "**",
}
],
minimumCacheTTL: 60,
// unoptimized: true,
},
async headers() {
return [
@@ -69,6 +71,7 @@ const nextConfig = {
},
// This is required to support PostHog trailing slash API requests
skipTrailingSlashRedirect: true,
};
export default nextConfig;