update JSMPEGPlayer

This commit is contained in:
NlightN22 2025-02-22 16:18:14 +07:00
parent affe29fa10
commit 7ed8afc670
9 changed files with 546 additions and 84 deletions

View File

@ -25,6 +25,7 @@
"@types/validator": "^13.7.17",
"axios": "^1.4.0",
"bson-objectid": "^2.0.4",
"clsx": "^2.1.1",
"cookies-next": "^4.1.1",
"cpr": "^3.0.1",
"date-fns": "^3.3.1",
@ -55,6 +56,7 @@
"react-scripts": "5.0.1",
"react-use-websocket": "^4.7.0",
"strftime": "0.10.1",
"tailwind-merge": "^3.0.1",
"typescript": "^4.4.2",
"validator": "^13.9.0",
"video.js": "^8.10.0",

View File

@ -27,7 +27,11 @@ const LiveCameraPage = () => {
return (
<Flex w='100%' h='100%' justify='center' align='center' direction='column'>
<CameraPageHeader camera={camera} editButton />
<Player camera={camera} />
<Player
camera={camera}
useWebGL={true}
preferredLiveMode='jsmpeg'
/>
</Flex>
);
}

View File

@ -131,7 +131,7 @@ export type GetFrigateHost = z.infer<typeof getFrigateHostSchema>
export type GetFrigateHostWConfig = GetFrigateHost & { config: FrigateConfig }
export type GetCamera = z.infer<typeof getCameraSchema>
export type GetCameraWHost = z.infer<typeof getCameraWithHostSchema>
export type GetCameraWHostWConfig = GetCameraWHost & { config?: CameraConfig }
export type GetCameraWHostWConfig = GetCameraWHost & { config: CameraConfig }
export type PutFrigateHost = z.infer<typeof putFrigateHostSchema>
export type DeleteFrigateHost = z.infer<typeof deleteFrigateHostSchema>
export type GetRole = z.infer<typeof getRoleSchema>

View File

@ -1,74 +1,249 @@
// @ts-ignore we know this doesn't have types
import JSMpeg from "@cycjimmy/jsmpeg-player";
import { useViewportSize } from "@mantine/hooks";
import { useEffect, useRef, useState } from "react";
import { useEffect, useMemo, useRef, useState } from "react";
import { useTranslation } from "react-i18next";
import { cn } from "../../utils/class.merge";
import { PlayerStatsType } from "../../../types/live";
type JSMpegPlayerProps = {
wsUrl: string;
cameraHeight?: number,
cameraWidth?: number,
url: string;
camera: string
className?: string;
width: number;
height: number;
containerRef: React.MutableRefObject<HTMLDivElement | null>;
playbackEnabled: boolean;
useWebGL: boolean;
setStats?: (stats: PlayerStatsType) => void;
onPlaying?: () => void;
};
const JSMpegPlayer = (
{
wsUrl,
cameraWidth = 1200,
cameraHeight = 800,
url,
camera,
width,
height,
className,
containerRef,
playbackEnabled,
useWebGL = false,
setStats,
onPlaying,
}: JSMpegPlayerProps
) => {
const { t } = useTranslation()
const playerRef = useRef<HTMLDivElement>(null);
const [playerInitialized, setPlayerInitialized] = useState(false)
const videoRef = useRef<HTMLDivElement>(null);
const canvasRef = useRef<HTMLCanvasElement>(null);
const internalContainerRef = useRef<HTMLDivElement | null>(null);
const onPlayingRef = useRef(onPlaying);
const [showCanvas, setShowCanvas] = useState(false);
const [hasData, setHasData] = useState(false);
const hasDataRef = useRef(hasData);
const [dimensionsReady, setDimensionsReady] = useState(false);
const bytesReceivedRef = useRef(0);
const lastTimestampRef = useRef(Date.now());
const statsIntervalRef = useRef<NodeJS.Timeout | null>(null);
const { height: maxHeight, width: maxWidth } = useViewportSize()
useEffect(() => {
const video = new JSMpeg.VideoElement(
playerRef.current,
wsUrl,
{},
{ protocols: [], audio: false, videoBufferSize: 1024 * 1024 * 4 }
const selectedContainerRef = useMemo(
() => (containerRef.current ? containerRef : internalContainerRef),
// we know that these deps are correct
// eslint-disable-next-line react-hooks/exhaustive-deps
[containerRef, containerRef.current, internalContainerRef],
);
const toggleFullscreen = () => {
const canvas = video.els.canvas;
if (!document.fullscreenElement && !(document as any).webkitFullscreenElement) { // Use bracket notation for webkit
// Enter fullscreen
if (canvas.requestFullscreen) {
canvas.requestFullscreen();
} else if ((canvas as any).webkitRequestFullScreen) { // Use bracket notation for webkit
(canvas as any).webkitRequestFullScreen();
} else if (canvas.mozRequestFullScreen) {
canvas.mozRequestFullScreen();
}
} else {
// Exit fullscreen
if (document.exitFullscreen) {
document.exitFullscreen();
} else if ((document as any).webkitExitFullscreen) { // Use bracket notation for webkit
(document as any).webkitExitFullscreen();
} else if ((document as any).mozCancelFullScreen) {
(document as any).mozCancelFullScreen();
}
}
};
const { height: containerHeight, width: containerWidth } = useViewportSize()
video.els.canvas.addEventListener('dblclick', toggleFullscreen);
const stretch = true;
const aspectRatio = width / height;
const fitAspect = useMemo(
() => containerWidth / containerHeight,
[containerWidth, containerHeight],
);
const scaledHeight = useMemo(() => {
if (selectedContainerRef?.current && width && height) {
const scaledHeight =
aspectRatio < (fitAspect ?? 0)
? Math.floor(
Math.min(
containerHeight,
selectedContainerRef.current?.clientHeight,
),
)
: aspectRatio >= fitAspect
? Math.floor(containerWidth / aspectRatio)
: Math.floor(containerWidth / aspectRatio) / 1.5;
const finalHeight = stretch
? scaledHeight
: Math.min(scaledHeight, height);
if (finalHeight > 0) {
return finalHeight;
}
}
return undefined;
}, [
aspectRatio,
containerWidth,
containerHeight,
fitAspect,
height,
width,
stretch,
selectedContainerRef,
]);
const scaledWidth = useMemo(() => {
if (aspectRatio && scaledHeight) {
return Math.ceil(scaledHeight * aspectRatio);
}
return undefined;
}, [scaledHeight, aspectRatio]);
useEffect(() => {
if (scaledWidth && scaledHeight) {
setDimensionsReady(true);
}
}, [scaledWidth, scaledHeight]);
useEffect(() => {
onPlayingRef.current = onPlaying;
}, [onPlaying]);
useEffect(() => {
if (!selectedContainerRef?.current || !url) {
return;
}
const videoWrapper = videoRef.current;
const canvas = canvasRef.current;
let videoElement: JSMpeg.VideoElement | null = null;
let frameCount = 0;
setHasData(false);
if (videoWrapper && playbackEnabled) {
// Delayed init to avoid issues with react strict mode
const initPlayer = setTimeout(() => {
videoElement = new JSMpeg.VideoElement(
videoWrapper,
url,
{ canvas: canvas },
{
protocols: [],
audio: false,
disableGl: !useWebGL,
disableWebAssembly: !useWebGL,
videoBufferSize: 1024 * 1024 * 4,
onVideoDecode: () => {
if (!hasDataRef.current) {
setHasData(true);
onPlayingRef.current?.();
}
frameCount++;
},
},
);
// Set up WebSocket message handler
if (
videoElement.player &&
videoElement.player.source &&
videoElement.player.source.socket
) {
const socket = videoElement.player.source.socket;
socket.addEventListener("message", (event: MessageEvent) => {
if (event.data instanceof ArrayBuffer) {
bytesReceivedRef.current += event.data.byteLength;
}
});
}
// Update stats every second
statsIntervalRef.current = setInterval(() => {
const currentTimestamp = Date.now();
const timeDiff = (currentTimestamp - lastTimestampRef.current) / 1000; // in seconds
const bitrate = (bytesReceivedRef.current * 8) / timeDiff / 1000; // in kbps
setStats?.({
streamType: "jsmpeg",
bandwidth: Math.round(bitrate),
totalFrames: frameCount,
latency: undefined,
droppedFrames: undefined,
decodedFrames: undefined,
droppedFrameRate: undefined,
});
bytesReceivedRef.current = 0;
lastTimestampRef.current = currentTimestamp;
}, 1000);
return () => {
video.destroy();
video.els.canvas.removeEventListener('dblclick', toggleFullscreen);
if (statsIntervalRef.current) {
clearInterval(statsIntervalRef.current);
frameCount = 0;
statsIntervalRef.current = null;
}
};
}, [wsUrl]);
}, 0);
return () => {
clearTimeout(initPlayer);
if (statsIntervalRef.current) {
clearInterval(statsIntervalRef.current);
statsIntervalRef.current = null;
}
if (videoElement) {
try {
// this causes issues in react strict mode
// https://stackoverflow.com/questions/76822128/issue-with-cycjimmy-jsmpeg-player-in-react-18-cannot-read-properties-of-null-o
videoElement.destroy();
// eslint-disable-next-line no-empty
} catch (e) {}
}
};
}
// we know that these deps are correct
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [playbackEnabled, url]);
useEffect(() => {
setShowCanvas(hasData && dimensionsReady);
}, [hasData, dimensionsReady]);
useEffect(() => {
hasDataRef.current = hasData;
}, [hasData]);
return (
<div className={cn(className, !containerRef.current && "size-full")}>
<div
ref={playerRef}
key={wsUrl}
title={t('player.doubleClickToFullHint')}
style={{ width: cameraWidth, height: cameraHeight, maxWidth: maxWidth, maxHeight: maxHeight - 100, }} />
)
};
className="internal-jsmpeg-container size-full"
ref={internalContainerRef}
>
<div
ref={videoRef}
className={cn(
"jsmpeg flex h-full w-auto items-center justify-center",
!showCanvas && "hidden",
)}
>
<canvas
ref={canvasRef}
className="rounded-lg md:rounded-2xl"
style={{
width: scaledWidth,
height: scaledHeight,
}}
></canvas>
</div>
</div>
</div>
);
}
export default JSMpegPlayer

View File

@ -1,25 +1,46 @@
import { useCallback, useEffect, useRef } from "react";
import { useCallback, useEffect, useRef, useState } from "react";
import { LivePlayerError, PlayerStatsType } from "../../../types/live";
type WebRtcPlayerProps = {
className?: string;
camera: string;
wsURI: string;
className?: string;
playbackEnabled?: boolean;
onPlaying?: () => void,
wsUrl: string
audioEnabled?: boolean;
volume?: number;
microphoneEnabled?: boolean;
iOSCompatFullScreen?: boolean; // ios doesn't support fullscreen divs so we must support the video element
pip?: boolean;
getStats?: boolean;
setStats?: (stats: PlayerStatsType) => void;
onPlaying?: () => void;
onError?: (error: LivePlayerError) => void;
};
export default function WebRtcPlayer({
className,
camera,
wsURI,
className,
playbackEnabled = true,
audioEnabled = false,
volume,
microphoneEnabled = false,
iOSCompatFullScreen = false,
pip = false,
getStats = false,
setStats,
onPlaying,
wsUrl
onError,
}: WebRtcPlayerProps) {
// camera states
const pcRef = useRef<RTCPeerConnection | undefined>();
const videoRef = useRef<HTMLVideoElement | null>(null);
const [bufferTimeout, setBufferTimeout] = useState<NodeJS.Timeout>();
const videoLoadTimeoutRef = useRef<NodeJS.Timeout>();
const PeerConnection = useCallback(
async (media: string) => {
if (!videoRef.current) {
@ -27,6 +48,7 @@ export default function WebRtcPlayer({
}
const pc = new RTCPeerConnection({
bundlePolicy: "max-bundle",
iceServers: [{ urls: "stun:stun.l.google.com:19302" }],
});
@ -59,7 +81,7 @@ export default function WebRtcPlayer({
.filter((kind) => media.indexOf(kind) >= 0)
.map(
(kind) =>
pc.addTransceiver(kind, { direction: "recvonly" }).receiver.track
pc.addTransceiver(kind, { direction: "recvonly" }).receiver.track,
);
localTracks.push(...tracks);
}
@ -67,12 +89,12 @@ export default function WebRtcPlayer({
videoRef.current.srcObject = new MediaStream(localTracks);
return pc;
},
[videoRef]
[videoRef],
);
async function getMediaTracks(
media: string,
constraints: MediaStreamConstraints
constraints: MediaStreamConstraints,
) {
try {
const stream =
@ -86,12 +108,13 @@ export default function WebRtcPlayer({
}
const connect = useCallback(
async (ws: WebSocket, aPc: Promise<RTCPeerConnection | undefined>) => {
async (aPc: Promise<RTCPeerConnection | undefined>) => {
if (!aPc) {
return;
}
pcRef.current = await aPc;
const ws = new WebSocket(wsURI);
ws.addEventListener("open", () => {
pcRef.current?.addEventListener("icecandidate", (ev) => {
@ -127,7 +150,7 @@ export default function WebRtcPlayer({
}
});
},
[]
[wsURI],
);
useEffect(() => {
@ -139,13 +162,10 @@ export default function WebRtcPlayer({
return;
}
// const url = `$baseUrl{.replace(
// /^http/,
// "ws"
// )}live/webrtc/api/ws?src=${camera}`;
const ws = new WebSocket(wsUrl);
const aPc = PeerConnection("video+audio");
connect(ws, aPc);
const aPc = PeerConnection(
microphoneEnabled ? "video+audio+microphone" : "video+audio",
);
connect(aPc);
return () => {
if (pcRef.current) {
@ -153,16 +173,174 @@ export default function WebRtcPlayer({
pcRef.current = undefined;
}
};
}, [camera, connect, PeerConnection, pcRef, videoRef, playbackEnabled, wsUrl]);
}, [
camera,
wsURI,
connect,
PeerConnection,
pcRef,
videoRef,
playbackEnabled,
microphoneEnabled,
]);
// ios compat
const [iOSCompatControls, setiOSCompatControls] = useState(false);
useEffect(() => {
if (!videoRef.current || !pip) {
return;
}
videoRef.current.requestPictureInPicture();
}, [pip, videoRef]);
// control volume
useEffect(() => {
if (!videoRef.current || volume == undefined) {
return;
}
videoRef.current.volume = volume;
}, [volume, videoRef]);
useEffect(() => {
videoLoadTimeoutRef.current = setTimeout(() => {
onError?.("stalled");
}, 5000);
return () => {
if (videoLoadTimeoutRef.current) {
clearTimeout(videoLoadTimeoutRef.current);
}
};
// we know that these deps are correct
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
const handleLoadedData = () => {
if (videoLoadTimeoutRef.current) {
clearTimeout(videoLoadTimeoutRef.current);
}
onPlaying?.();
};
useEffect(() => {
if (!pcRef.current || !getStats) return;
let lastBytesReceived = 0;
let lastTimestamp = 0;
const interval = setInterval(async () => {
if (pcRef.current && videoRef.current && !videoRef.current.paused) {
const report = await pcRef.current.getStats();
let bytesReceived = 0;
let timestamp = 0;
let roundTripTime = 0;
let framesReceived = 0;
let framesDropped = 0;
let framesDecoded = 0;
report.forEach((stat) => {
if (stat.type === "inbound-rtp" && stat.kind === "video") {
bytesReceived = stat.bytesReceived;
timestamp = stat.timestamp;
framesReceived = stat.framesReceived;
framesDropped = stat.framesDropped;
framesDecoded = stat.framesDecoded;
}
if (stat.type === "candidate-pair" && stat.state === "succeeded") {
roundTripTime = stat.currentRoundTripTime;
}
});
const timeDiff = (timestamp - lastTimestamp) / 1000; // in seconds
const bitrate =
timeDiff > 0
? (bytesReceived - lastBytesReceived) / timeDiff / 1000
: 0; // in kbps
setStats?.({
streamType: "WebRTC",
bandwidth: Math.round(bitrate),
latency: roundTripTime,
totalFrames: framesReceived,
droppedFrames: framesDropped,
decodedFrames: framesDecoded,
droppedFrameRate:
framesReceived > 0 ? (framesDropped / framesReceived) * 100 : 0,
});
lastBytesReceived = bytesReceived;
lastTimestamp = timestamp;
}
}, 1000);
return () => {
clearInterval(interval);
setStats?.({
streamType: "-",
bandwidth: 0,
latency: undefined,
totalFrames: 0,
droppedFrames: undefined,
decodedFrames: 0,
droppedFrameRate: 0,
});
};
// we need to listen on the value of the ref
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [pcRef, pcRef.current, getStats]);
return (
<video
ref={videoRef}
className={className}
controls={iOSCompatControls}
autoPlay
playsInline
muted
onLoadedData={onPlaying}
muted={!audioEnabled}
onLoadedData={handleLoadedData}
onProgress={
onError != undefined
? () => {
if (videoRef.current?.paused) {
return;
}
if (bufferTimeout) {
clearTimeout(bufferTimeout);
setBufferTimeout(undefined);
}
setBufferTimeout(
setTimeout(() => {
if (
document.visibilityState === "visible" &&
pcRef.current != undefined
) {
onError("stalled");
}
}, 3000),
);
}
: undefined
}
onClick={
iOSCompatFullScreen
? () => setiOSCompatControls(!iOSCompatControls)
: undefined
}
onError={(e) => {
if (
// @ts-expect-error code does exist
e.target.error.code == MediaError.MEDIA_ERR_NETWORK
) {
onError?.("startup");
}
}}
/>
);
}

View File

@ -0,0 +1,6 @@
import { type ClassValue, clsx } from "clsx";
import { twMerge } from "tailwind-merge";
export function cn(...inputs: ClassValue[]) {
return twMerge(clsx(inputs));
}

View File

@ -1 +1,48 @@
export type LivePlayerMode = "webrtc" | "mse" | "jsmpeg" | "debug";
export type VideoResolutionType = {
width: number;
height: number;
};
type LiveProducerMetadata = {
type: string;
url: string;
remote_addr: string;
user_agent: string;
sdp: string;
medias?: string[];
receivers?: string[];
recv: number;
};
type LiveConsumerMetadata = {
type: string;
url: string;
remote_addr: string;
user_agent: string;
sdp: string;
medias?: string[];
senders?: string[];
send: number;
};
export type LiveStreamMetadata = {
producers: LiveProducerMetadata[];
consumers: LiveConsumerMetadata[];
};
export type LivePlayerError = "stalled" | "startup" | "mse-decode";
export type AudioState = Record<string, boolean>;
export type StatsState = Record<string, boolean>;
export type VolumeState = Record<string, number>;
export type PlayerStatsType = {
streamType: string;
bandwidth: number;
latency: number | undefined;
totalFrames: number;
droppedFrames: number | undefined;
decodedFrames: number | undefined;
droppedFrameRate: number | undefined;
};

View File

@ -1,4 +1,4 @@
import { useEffect, useMemo, useState } from 'react';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import useCameraActivity from '../hooks/use-camera-activity';
import useCameraLiveMode from '../hooks/use-camera-live-mode';
import { proxyApi } from '../services/frigate.proxy/frigate.api';
@ -6,11 +6,14 @@ import { GetCameraWHostWConfig } from '../services/frigate.proxy/frigate.schema'
import JSMpegPlayer from '../shared/components/players/JSMpegPlayer';
import MSEPlayer from '../shared/components/players/MsePlayer';
import WebRtcPlayer from '../shared/components/players/WebRTCPlayer';
import { LivePlayerMode } from '../types/live';
import { LivePlayerMode, PlayerStatsType } from '../types/live';
import { isProduction } from '../shared/env.const';
type LivePlayerProps = {
camera: GetCameraWHostWConfig;
cameraRef?: (ref: HTMLDivElement | null) => void;
useWebGL: boolean;
containerRef?: React.MutableRefObject<HTMLDivElement | null>;
preferredLiveMode?: LivePlayerMode;
showStillWithoutActivity?: boolean;
windowVisible?: boolean;
@ -18,20 +21,42 @@ type LivePlayerProps = {
const Player = ({
camera,
cameraRef = undefined,
useWebGL = false,
showStillWithoutActivity = true,
containerRef,
preferredLiveMode,
windowVisible = true,
}: LivePlayerProps) => {
const internalContainerRef = useRef<HTMLDivElement | null>(null);
// stats
const [stats, setStats] = useState<PlayerStatsType>({
streamType: "-",
bandwidth: 0, // in kbps
latency: undefined, // in seconds
totalFrames: 0,
droppedFrames: undefined,
decodedFrames: 0,
droppedFrameRate: 0, // percentage
});
const hostNameWPort = camera.frigateHost ? new URL(camera.frigateHost.host).host : ''
const wsUrl = proxyApi.cameraWsURL(hostNameWPort, camera.name)
const cameraConfig = camera.config!
const [key, setKey] = useState(0);
const { activeMotion, activeTracking } =
useCameraActivity(cameraConfig);
const cameraActive = useMemo(
() => windowVisible && (activeMotion || activeTracking),
[activeMotion, activeTracking, windowVisible]
() =>
!showStillWithoutActivity ||
(windowVisible && (activeMotion || activeTracking)),
[activeMotion, activeTracking, showStillWithoutActivity, windowVisible],
);
// camera live state
@ -52,7 +77,12 @@ const Player = ({
}
}, [cameraActive, liveReady, liveMode]);
if (!isProduction) console.log(`liveMode: `, liveMode)
const playerIsPlaying = useCallback(() => {
setLiveReady(true);
}, []);
let player;
if (liveMode === "webrtc") {
player = (
@ -61,7 +91,7 @@ const Player = ({
camera={cameraConfig.live.stream_name}
playbackEnabled={cameraActive}
onPlaying={() => setLiveReady(true)}
wsUrl={wsUrl}
wsURI={wsUrl}
/>
);
} else if (liveMode === "mse") {
@ -69,7 +99,7 @@ const Player = ({
player = (
<MSEPlayer
className={`rounded-2xl h-full ${liveReady ? "" : "hidden"}`}
camera='Not yet implemented' // TODO implement player
camera='Not yet implemented' // TODO implement MSE player with audio
playbackEnabled={cameraActive}
onPlaying={() => setLiveReady(true)}
wsUrl={wsUrl}
@ -86,9 +116,19 @@ const Player = ({
} else if (liveMode === "jsmpeg") {
player = (
<JSMpegPlayer
wsUrl={wsUrl}
cameraWidth={camera.config?.detect.width}
cameraHeight={camera.config?.detect.height}
key={"jsmpeg_" + key}
url={wsUrl}
camera={camera.config.name}
className="flex justify-center overflow-hidden rounded-lg md:rounded-2xl"
width={camera.config.detect.width}
height={camera.config.detect.height}
playbackEnabled={
showStillWithoutActivity
}
containerRef={containerRef ?? internalContainerRef}
useWebGL={useWebGL}
setStats={setStats}
onPlaying={playerIsPlaying}
/>
);
}

View File

@ -3768,6 +3768,11 @@ clsx@1.1.1:
resolved "https://registry.yarnpkg.com/clsx/-/clsx-1.1.1.tgz#98b3134f9abbdf23b2663491ace13c5c03a73188"
integrity sha512-6/bPho624p3S2pMyvP5kKBPXnI3ufHLObBFCfgx+LkeR5lg2XYy2hqZqUf45ypD8COn2bhgGJSUE+l5dhNBieA==
clsx@^2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/clsx/-/clsx-2.1.1.tgz#eed397c9fd8bd882bfb18deab7102049a2f32999"
integrity sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==
co@^4.6.0:
version "4.6.0"
resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184"
@ -9861,6 +9866,11 @@ tabbable@^6.0.1:
resolved "https://registry.yarnpkg.com/tabbable/-/tabbable-6.2.0.tgz#732fb62bc0175cfcec257330be187dcfba1f3b97"
integrity sha512-Cat63mxsVJlzYvN51JmVXIgNoUokrIaT2zLclCXjRd8boZ0004U4KCs/sToJ75C6sdlByWxpYnb5Boif1VSFew==
tailwind-merge@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/tailwind-merge/-/tailwind-merge-3.0.1.tgz#0f0189966511ebcd63ef98d9eaf5607beb0d59d3"
integrity sha512-AvzE8FmSoXC7nC+oU5GlQJbip2UO7tmOhOfQyOmPhrStOGXHU08j8mZEHZ4BmCqY5dWTCo4ClWkNyRNx1wpT0g==
tailwindcss@^3.0.2:
version "3.4.1"
resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-3.4.1.tgz#f512ca5d1dd4c9503c7d3d28a968f1ad8f5c839d"