Merge branch 'main' of https://github.com/GamerBoss101/HooHacks-12
This commit is contained in:
@@ -8,7 +8,6 @@ import logging
|
||||
import numpy as np
|
||||
import torch
|
||||
import torchaudio
|
||||
import whisperx
|
||||
from io import BytesIO
|
||||
from typing import List, Dict, Any, Optional
|
||||
from flask import Flask, request, send_from_directory, Response
|
||||
@@ -17,6 +16,8 @@ from flask_socketio import SocketIO, emit, disconnect
|
||||
from generator import load_csm_1b, Segment
|
||||
from collections import deque
|
||||
from threading import Lock
|
||||
from transformers import pipeline
|
||||
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
@@ -25,68 +26,24 @@ logging.basicConfig(
|
||||
)
|
||||
logger = logging.getLogger("sesame-server")
|
||||
|
||||
# CUDA Environment Setup
|
||||
def setup_cuda_environment():
|
||||
"""Set up CUDA environment with proper error handling"""
|
||||
# Search for CUDA libraries in common locations
|
||||
cuda_lib_dirs = [
|
||||
"/usr/local/cuda/lib64",
|
||||
"/usr/lib/x86_64-linux-gnu",
|
||||
"/usr/local/cuda/extras/CUPTI/lib64"
|
||||
]
|
||||
|
||||
# Add directories to LD_LIBRARY_PATH if they exist
|
||||
current_ld_path = os.environ.get('LD_LIBRARY_PATH', '')
|
||||
for cuda_dir in cuda_lib_dirs:
|
||||
if os.path.exists(cuda_dir) and cuda_dir not in current_ld_path:
|
||||
if current_ld_path:
|
||||
os.environ['LD_LIBRARY_PATH'] = f"{current_ld_path}:{cuda_dir}"
|
||||
else:
|
||||
os.environ['LD_LIBRARY_PATH'] = cuda_dir
|
||||
current_ld_path = os.environ['LD_LIBRARY_PATH']
|
||||
|
||||
logger.info(f"LD_LIBRARY_PATH set to: {os.environ.get('LD_LIBRARY_PATH', 'not set')}")
|
||||
|
||||
# Determine best compute device
|
||||
device = "cpu"
|
||||
compute_type = "int8"
|
||||
|
||||
# Determine best compute device
|
||||
if torch.backends.mps.is_available():
|
||||
device = "mps"
|
||||
elif torch.cuda.is_available():
|
||||
try:
|
||||
# Set CUDA preferences
|
||||
os.environ["CUDA_VISIBLE_DEVICES"] = "0" # Limit to first GPU only
|
||||
|
||||
# Try enabling TF32 precision if available
|
||||
try:
|
||||
torch.backends.cuda.matmul.allow_tf32 = True
|
||||
torch.backends.cudnn.allow_tf32 = True
|
||||
torch.backends.cudnn.enabled = True
|
||||
torch.backends.cudnn.benchmark = True
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not set advanced CUDA options: {e}")
|
||||
|
||||
# Test if CUDA is functional
|
||||
if torch.cuda.is_available():
|
||||
try:
|
||||
# Test basic CUDA operations
|
||||
x = torch.rand(10, device="cuda")
|
||||
y = x + x
|
||||
del x, y
|
||||
torch.cuda.empty_cache()
|
||||
device = "cuda"
|
||||
compute_type = "float16"
|
||||
logger.info("CUDA is fully functional")
|
||||
except Exception as e:
|
||||
logger.warning(f"CUDA available but not working correctly: {e}")
|
||||
device = "cpu"
|
||||
else:
|
||||
logger.info("CUDA is not available, using CPU")
|
||||
# Test CUDA functionality
|
||||
torch.rand(10, device="cuda")
|
||||
torch.backends.cuda.matmul.allow_tf32 = True
|
||||
torch.backends.cudnn.allow_tf32 = True
|
||||
torch.backends.cudnn.benchmark = True
|
||||
device = "cuda"
|
||||
logger.info("CUDA is fully functional")
|
||||
except Exception as e:
|
||||
logger.error(f"Error setting up computing environment: {e}")
|
||||
|
||||
return device, compute_type
|
||||
|
||||
# Set up the compute environment
|
||||
device, compute_type = setup_cuda_environment()
|
||||
logger.warning(f"CUDA available but not working correctly: {e}")
|
||||
device = "cpu"
|
||||
else:
|
||||
device = "cpu"
|
||||
logger.info("Using CPU")
|
||||
|
||||
# Constants and Configuration
|
||||
SILENCE_THRESHOLD = 0.01
|
||||
@@ -99,73 +56,106 @@ base_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
static_dir = os.path.join(base_dir, "static")
|
||||
os.makedirs(static_dir, exist_ok=True)
|
||||
|
||||
# Define a simple energy-based speech detector
|
||||
class SpeechDetector:
|
||||
def __init__(self):
|
||||
self.min_speech_energy = 0.01
|
||||
self.speech_window = 0.2 # seconds
|
||||
|
||||
def detect_speech(self, audio_tensor, sample_rate):
|
||||
# Calculate frame size based on window size
|
||||
frame_size = int(sample_rate * self.speech_window)
|
||||
|
||||
# If audio is shorter than frame size, use the entire audio
|
||||
if audio_tensor.shape[0] < frame_size:
|
||||
frames = [audio_tensor]
|
||||
else:
|
||||
# Split audio into frames
|
||||
frames = [audio_tensor[i:i+frame_size] for i in range(0, len(audio_tensor), frame_size)]
|
||||
|
||||
# Calculate energy per frame
|
||||
energies = [torch.mean(frame**2).item() for frame in frames]
|
||||
|
||||
# Determine if there's speech based on energy threshold
|
||||
has_speech = any(e > self.min_speech_energy for e in energies)
|
||||
|
||||
return has_speech
|
||||
|
||||
speech_detector = SpeechDetector()
|
||||
logger.info("Initialized simple speech detector")
|
||||
|
||||
# Model Loading Functions
|
||||
def load_speech_models():
|
||||
"""Load all required speech models with fallbacks"""
|
||||
# Load speech generation model (Sesame CSM)
|
||||
try:
|
||||
logger.info(f"Loading Sesame CSM model on {device}...")
|
||||
generator = load_csm_1b(device=device)
|
||||
logger.info("Sesame CSM model loaded successfully")
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading Sesame CSM on {device}: {e}")
|
||||
if device == "cuda":
|
||||
try:
|
||||
logger.info("Trying to load Sesame CSM on CPU instead...")
|
||||
generator = load_csm_1b(device="cpu")
|
||||
logger.info("Sesame CSM model loaded on CPU successfully")
|
||||
except Exception as cpu_error:
|
||||
logger.critical(f"Failed to load speech synthesis model: {cpu_error}")
|
||||
raise RuntimeError("Failed to load speech synthesis model")
|
||||
else:
|
||||
raise RuntimeError("Failed to load speech synthesis model on any device")
|
||||
"""Load speech generation and recognition models"""
|
||||
# Load CSM (existing code)
|
||||
generator = load_csm_1b(device=device)
|
||||
|
||||
# Load ASR model (WhisperX)
|
||||
# Load Whisper model for speech recognition
|
||||
try:
|
||||
logger.info("Loading WhisperX model...")
|
||||
# Start with the tiny model on CPU for reliable initialization
|
||||
asr_model = whisperx.load_model("tiny", "cpu", compute_type="int8")
|
||||
logger.info("WhisperX 'tiny' model loaded on CPU successfully")
|
||||
|
||||
# Try upgrading to GPU if available
|
||||
if device == "cuda":
|
||||
try:
|
||||
logger.info("Trying to load WhisperX on CUDA...")
|
||||
# Test with a tiny model first
|
||||
test_audio = torch.zeros(16000) # 1 second of silence
|
||||
|
||||
cuda_model = whisperx.load_model("tiny", "cuda", compute_type="float16")
|
||||
# Test the model with real inference
|
||||
_ = cuda_model.transcribe(test_audio.numpy(), batch_size=1)
|
||||
asr_model = cuda_model
|
||||
logger.info("WhisperX model running on CUDA successfully")
|
||||
|
||||
# Try to upgrade to small model
|
||||
try:
|
||||
small_model = whisperx.load_model("small", "cuda", compute_type="float16")
|
||||
_ = small_model.transcribe(test_audio.numpy(), batch_size=1)
|
||||
asr_model = small_model
|
||||
logger.info("WhisperX 'small' model loaded on CUDA successfully")
|
||||
except Exception as e:
|
||||
logger.warning(f"Staying with 'tiny' model on CUDA: {e}")
|
||||
except Exception as e:
|
||||
logger.warning(f"CUDA loading failed, staying with CPU model: {e}")
|
||||
logger.info(f"Loading speech recognition model on {device}...")
|
||||
speech_recognizer = pipeline("automatic-speech-recognition",
|
||||
model="openai/whisper-small",
|
||||
device=device)
|
||||
logger.info("Speech recognition model loaded successfully")
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading WhisperX model: {e}")
|
||||
# Create a minimal dummy model as last resort
|
||||
class DummyModel:
|
||||
def __init__(self):
|
||||
self.device = "cpu"
|
||||
def transcribe(self, *args, **kwargs):
|
||||
return {"segments": [{"text": "Speech recognition currently unavailable."}]}
|
||||
|
||||
asr_model = DummyModel()
|
||||
logger.warning("Using dummy transcription model - ASR functionality limited")
|
||||
logger.error(f"Error loading speech recognition model: {e}")
|
||||
speech_recognizer = None
|
||||
|
||||
return generator, asr_model
|
||||
return generator, speech_recognizer
|
||||
|
||||
# Load speech models
|
||||
generator, asr_model = load_speech_models()
|
||||
# Unpack both models
|
||||
generator, speech_recognizer = load_speech_models()
|
||||
|
||||
# Initialize Llama 3.2 model for conversation responses
|
||||
def load_llm_model():
|
||||
"""Load Llama 3.2 model for generating text responses"""
|
||||
try:
|
||||
logger.info("Loading Llama 3.2 model for conversational responses...")
|
||||
model_id = "meta-llama/Llama-3.2-1B-Instruct"
|
||||
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
||||
|
||||
# Determine compute device for LLM
|
||||
llm_device = "cpu" # Default to CPU for LLM
|
||||
|
||||
# Use CUDA if available and there's enough VRAM
|
||||
if device == "cuda" and torch.cuda.is_available():
|
||||
try:
|
||||
free_mem = torch.cuda.get_device_properties(0).total_memory - torch.cuda.memory_allocated(0)
|
||||
# If we have at least 2GB free, use CUDA for LLM
|
||||
if free_mem > 2 * 1024 * 1024 * 1024:
|
||||
llm_device = "cuda"
|
||||
except:
|
||||
pass
|
||||
|
||||
logger.info(f"Using {llm_device} for Llama 3.2 model")
|
||||
|
||||
# Load the model with lower precision for efficiency
|
||||
model = AutoModelForCausalLM.from_pretrained(
|
||||
model_id,
|
||||
torch_dtype=torch.float16 if llm_device == "cuda" else torch.float32,
|
||||
device_map=llm_device
|
||||
)
|
||||
|
||||
# Create a pipeline for easier inference
|
||||
llm = pipeline(
|
||||
"text-generation",
|
||||
model=model,
|
||||
tokenizer=tokenizer,
|
||||
max_length=512,
|
||||
do_sample=True,
|
||||
temperature=0.7,
|
||||
top_p=0.9,
|
||||
repetition_penalty=1.1
|
||||
)
|
||||
|
||||
logger.info("Llama 3.2 model loaded successfully")
|
||||
return llm
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading Llama 3.2 model: {e}")
|
||||
return None
|
||||
|
||||
# Load the LLM model
|
||||
llm = load_llm_model()
|
||||
|
||||
# Set up Flask and Socket.IO
|
||||
app = Flask(__name__)
|
||||
@@ -307,67 +297,117 @@ def encode_audio_data(audio_tensor: torch.Tensor) -> str:
|
||||
buf.seek(0)
|
||||
return f"data:audio/wav;base64,{base64.b64encode(buf.read()).decode('utf-8')}"
|
||||
|
||||
def transcribe_audio(audio_tensor: torch.Tensor) -> str:
|
||||
"""Transcribe audio using WhisperX with robust error handling"""
|
||||
global asr_model
|
||||
def process_speech(audio_tensor: torch.Tensor, client_id: str) -> str:
|
||||
"""Process speech with speech recognition"""
|
||||
if not speech_recognizer:
|
||||
# Fallback to basic detection if model failed to load
|
||||
return detect_speech_energy(audio_tensor)
|
||||
|
||||
try:
|
||||
# Save the tensor to a temporary file
|
||||
temp_path = os.path.join(base_dir, f"temp_audio_{time.time()}.wav")
|
||||
# Save audio to temp file for Whisper
|
||||
temp_path = os.path.join(base_dir, f"temp_{time.time()}.wav")
|
||||
torchaudio.save(temp_path, audio_tensor.unsqueeze(0).cpu(), generator.sample_rate)
|
||||
|
||||
logger.info(f"Transcribing audio file: {os.path.getsize(temp_path)} bytes")
|
||||
# Perform speech recognition
|
||||
result = speech_recognizer(temp_path)
|
||||
transcription = result["text"]
|
||||
|
||||
# Load the audio for WhisperX
|
||||
try:
|
||||
audio = whisperx.load_audio(temp_path)
|
||||
except Exception as e:
|
||||
logger.warning(f"WhisperX load_audio failed: {e}")
|
||||
# Fall back to manual loading
|
||||
import soundfile as sf
|
||||
audio, sr = sf.read(temp_path)
|
||||
if sr != 16000: # WhisperX expects 16kHz audio
|
||||
from scipy import signal
|
||||
audio = signal.resample(audio, int(len(audio) * 16000 / sr))
|
||||
|
||||
# Transcribe with error handling
|
||||
try:
|
||||
result = asr_model.transcribe(audio, batch_size=4)
|
||||
except RuntimeError as e:
|
||||
if "CUDA" in str(e) or "libcudnn" in str(e):
|
||||
logger.warning(f"CUDA error in transcription, falling back to CPU: {e}")
|
||||
try:
|
||||
# Try CPU model
|
||||
cpu_model = whisperx.load_model("tiny", "cpu", compute_type="int8")
|
||||
result = cpu_model.transcribe(audio, batch_size=1)
|
||||
# Update the global model if the original one is broken
|
||||
asr_model = cpu_model
|
||||
except Exception as cpu_e:
|
||||
logger.error(f"CPU fallback failed: {cpu_e}")
|
||||
return "I'm having trouble processing audio right now."
|
||||
else:
|
||||
raise
|
||||
finally:
|
||||
# Clean up
|
||||
if os.path.exists(temp_path):
|
||||
os.remove(temp_path)
|
||||
|
||||
# Extract text from segments
|
||||
if result["segments"] and len(result["segments"]) > 0:
|
||||
transcription = " ".join([segment["text"] for segment in result["segments"]])
|
||||
logger.info(f"Transcription: '{transcription.strip()}'")
|
||||
return transcription.strip()
|
||||
|
||||
return ""
|
||||
except Exception as e:
|
||||
logger.error(f"Error in transcription: {e}")
|
||||
# Clean up temp file
|
||||
if os.path.exists(temp_path):
|
||||
os.remove(temp_path)
|
||||
return "I heard something but couldn't understand it."
|
||||
|
||||
# Return empty string if no speech detected
|
||||
if not transcription or transcription.isspace():
|
||||
return "I didn't detect any speech. Could you please try again?"
|
||||
|
||||
return transcription
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Speech recognition error: {e}")
|
||||
return "Sorry, I couldn't understand what you said. Could you try again?"
|
||||
|
||||
def detect_speech_energy(audio_tensor: torch.Tensor) -> str:
|
||||
"""Basic speech detection based on audio energy levels"""
|
||||
# Calculate audio energy
|
||||
energy = torch.mean(torch.abs(audio_tensor)).item()
|
||||
|
||||
logger.debug(f"Audio energy detected: {energy:.6f}")
|
||||
|
||||
# Generate response based on energy level
|
||||
if energy > 0.1: # Louder speech
|
||||
return "I heard you speaking clearly. How can I help you today?"
|
||||
elif energy > 0.05: # Moderate speech
|
||||
return "I heard you say something. Could you please repeat that?"
|
||||
elif energy > 0.02: # Soft speech
|
||||
return "I detected some speech, but it was quite soft. Could you speak up a bit?"
|
||||
else: # Very soft or no speech
|
||||
return "I didn't detect any speech. Could you please try again?"
|
||||
|
||||
def generate_response(text: str, conversation_history: List[Segment]) -> str:
|
||||
"""Generate a contextual response based on the transcribed text"""
|
||||
# Simple response logic - can be replaced with a more sophisticated LLM
|
||||
"""Generate a contextual response based on the transcribed text using Llama 3.2"""
|
||||
# If LLM is not available, use simple responses
|
||||
if llm is None:
|
||||
return generate_simple_response(text)
|
||||
|
||||
try:
|
||||
# Create a conversational prompt based on history
|
||||
# Format: recent conversation turns (up to 4) + current user input
|
||||
history_str = ""
|
||||
|
||||
# Add up to 4 recent conversation turns (excluding the current one)
|
||||
recent_segments = [
|
||||
seg for seg in conversation_history[-8:]
|
||||
if seg.text and not seg.text.isspace()
|
||||
]
|
||||
|
||||
for i, segment in enumerate(recent_segments):
|
||||
speaker_name = "User" if segment.speaker == 0 else "Assistant"
|
||||
history_str += f"{speaker_name}: {segment.text}\n"
|
||||
|
||||
# Construct the prompt for Llama 3.2
|
||||
prompt = f"""<|system|>
|
||||
You are Sesame, a helpful, friendly and concise voice assistant.
|
||||
Keep your responses conversational, natural, and to the point.
|
||||
Respond to the user's latest message in the context of the conversation.
|
||||
<|end|>
|
||||
|
||||
{history_str}
|
||||
User: {text}
|
||||
Assistant:"""
|
||||
|
||||
logger.debug(f"LLM Prompt: {prompt}")
|
||||
|
||||
# Generate response with the LLM
|
||||
result = llm(
|
||||
prompt,
|
||||
max_new_tokens=150,
|
||||
do_sample=True,
|
||||
temperature=0.7,
|
||||
top_p=0.9,
|
||||
repetition_penalty=1.1
|
||||
)
|
||||
|
||||
# Extract the generated text
|
||||
response = result[0]["generated_text"]
|
||||
|
||||
# Extract just the Assistant's response (after the prompt)
|
||||
response = response.split("Assistant:")[-1].strip()
|
||||
|
||||
# Clean up and ensure it's not too long for TTS
|
||||
response = response.split("\n")[0].strip()
|
||||
if len(response) > 200:
|
||||
response = response[:197] + "..."
|
||||
|
||||
logger.info(f"LLM response: {response}")
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error generating LLM response: {e}")
|
||||
# Fall back to simple responses
|
||||
return generate_simple_response(text)
|
||||
|
||||
def generate_simple_response(text: str) -> str:
|
||||
"""Generate a simple rule-based response as fallback"""
|
||||
responses = {
|
||||
"hello": "Hello there! How can I help you today?",
|
||||
"hi": "Hi there! What can I do for you?",
|
||||
@@ -394,7 +434,7 @@ def generate_response(text: str, conversation_history: List[Segment]) -> str:
|
||||
elif len(text) < 10:
|
||||
return "Thanks for your message. Could you elaborate a bit more?"
|
||||
else:
|
||||
return f"I understand you said '{text}'. That's interesting! Can you tell me more about that?"
|
||||
return f"I heard you say something about that. Can you tell me more?"
|
||||
|
||||
# Flask Routes
|
||||
@app.route('/')
|
||||
@@ -610,33 +650,32 @@ def process_complete_utterance(client_id, client, speaker_id, is_incomplete=Fals
|
||||
# Combine audio chunks
|
||||
full_audio = torch.cat(client['streaming_buffer'], dim=0)
|
||||
|
||||
# Process with speech-to-text
|
||||
logger.info(f"[{client_id[:8]}] Starting transcription...")
|
||||
transcribed_text = transcribe_audio(full_audio)
|
||||
# Process audio to generate a response (no speech recognition)
|
||||
generated_text = process_speech(full_audio, client_id)
|
||||
|
||||
# Add suffix for incomplete utterances
|
||||
if is_incomplete:
|
||||
transcribed_text += " (processing continued speech...)"
|
||||
generated_text += " (processing continued speech...)"
|
||||
|
||||
# Log the transcription
|
||||
logger.info(f"[{client_id[:8]}] Transcribed: '{transcribed_text}'")
|
||||
# Log the generated text
|
||||
logger.info(f"[{client_id[:8]}] Generated text: '{generated_text}'")
|
||||
|
||||
# Handle the transcription result
|
||||
if transcribed_text:
|
||||
# Handle the result
|
||||
if generated_text:
|
||||
# Add user message to context
|
||||
user_segment = Segment(text=transcribed_text, speaker=speaker_id, audio=full_audio)
|
||||
user_segment = Segment(text=generated_text, speaker=speaker_id, audio=full_audio)
|
||||
client['context_segments'].append(user_segment)
|
||||
|
||||
# Send the transcribed text to client
|
||||
# Send the text to client
|
||||
emit('transcription', {
|
||||
'type': 'transcription',
|
||||
'text': transcribed_text
|
||||
'text': generated_text
|
||||
}, room=client_id)
|
||||
|
||||
# Only generate a response if this is a complete utterance
|
||||
if not is_incomplete:
|
||||
# Generate a contextual response
|
||||
response_text = generate_response(transcribed_text, client['context_segments'])
|
||||
response_text = generate_response(generated_text, client['context_segments'])
|
||||
logger.info(f"[{client_id[:8]}] Generating response: '{response_text}'")
|
||||
|
||||
# Let the client know we're processing
|
||||
@@ -684,7 +723,7 @@ def process_complete_utterance(client_id, client, speaker_id, is_incomplete=Fals
|
||||
'message': "Sorry, there was an error generating the audio response."
|
||||
}, room=client_id)
|
||||
else:
|
||||
# If transcription failed, send a notification
|
||||
# If processing failed, send a notification
|
||||
emit('error', {
|
||||
'type': 'error',
|
||||
'message': "Sorry, I couldn't understand what you said. Could you try again?"
|
||||
@@ -791,7 +830,7 @@ if __name__ == "__main__":
|
||||
print(f" - Network URL: http://<your-ip-address>:5000")
|
||||
print(f"{'='*60}")
|
||||
print(f"🌐 Device: {device.upper()}")
|
||||
print(f"🧠 Models: Sesame CSM + WhisperX ASR")
|
||||
print(f"🧠 Models: Sesame CSM (TTS only)")
|
||||
print(f"🔧 Serving from: {os.path.join(base_dir, 'index.html')}")
|
||||
print(f"{'='*60}")
|
||||
print(f"Ready to receive connections! Press Ctrl+C to stop the server.\n")
|
||||
|
||||
808
React/package-lock.json
generated
808
React/package-lock.json
generated
@@ -11,7 +11,8 @@
|
||||
"@auth0/nextjs-auth0": "^4.3.0",
|
||||
"next": "15.2.4",
|
||||
"react": "^19.1.0",
|
||||
"react-dom": "^19.1.0"
|
||||
"react-dom": "^19.1.0",
|
||||
"twilio": "^5.5.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@skeletonlabs/skeleton": "^3.1.1",
|
||||
@@ -89,6 +90,111 @@
|
||||
"integrity": "sha512-+SFtMgoiYP3WoSswuNmxJOCwi06TdWE733D+WPjpXIe4LXGULwEaofiiAy6kbS0+XjM5xF5n3lKuBwN2SnqD9g==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@next/swc-darwin-arm64": {
|
||||
"version": "15.2.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-15.2.4.tgz",
|
||||
"integrity": "sha512-1AnMfs655ipJEDC/FHkSr0r3lXBgpqKo4K1kiwfUf3iE68rDFXZ1TtHdMvf7D0hMItgDZ7Vuq3JgNMbt/+3bYw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@next/swc-darwin-x64": {
|
||||
"version": "15.2.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-15.2.4.tgz",
|
||||
"integrity": "sha512-3qK2zb5EwCwxnO2HeO+TRqCubeI/NgCe+kL5dTJlPldV/uwCnUgC7VbEzgmxbfrkbjehL4H9BPztWOEtsoMwew==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@next/swc-linux-arm64-gnu": {
|
||||
"version": "15.2.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-15.2.4.tgz",
|
||||
"integrity": "sha512-HFN6GKUcrTWvem8AZN7tT95zPb0GUGv9v0d0iyuTb303vbXkkbHDp/DxufB04jNVD+IN9yHy7y/6Mqq0h0YVaQ==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@next/swc-linux-arm64-musl": {
|
||||
"version": "15.2.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-15.2.4.tgz",
|
||||
"integrity": "sha512-Oioa0SORWLwi35/kVB8aCk5Uq+5/ZIumMK1kJV+jSdazFm2NzPDztsefzdmzzpx5oGCJ6FkUC7vkaUseNTStNA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@next/swc-linux-x64-gnu": {
|
||||
"version": "15.2.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-15.2.4.tgz",
|
||||
"integrity": "sha512-yb5WTRaHdkgOqFOZiu6rHV1fAEK0flVpaIN2HB6kxHVSy/dIajWbThS7qON3W9/SNOH2JWkVCyulgGYekMePuw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@next/swc-linux-x64-musl": {
|
||||
"version": "15.2.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-15.2.4.tgz",
|
||||
"integrity": "sha512-Dcdv/ix6srhkM25fgXiyOieFUkz+fOYkHlydWCtB0xMST6X9XYI3yPDKBZt1xuhOytONsIFJFB08xXYsxUwJLw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@next/swc-win32-arm64-msvc": {
|
||||
"version": "15.2.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-15.2.4.tgz",
|
||||
"integrity": "sha512-dW0i7eukvDxtIhCYkMrZNQfNicPDExt2jPb9AZPpL7cfyUo7QSNl1DjsHjmmKp6qNAqUESyT8YFl/Aw91cNJJg==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@next/swc-win32-x64-msvc": {
|
||||
"version": "15.2.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-15.2.4.tgz",
|
||||
@@ -569,6 +675,41 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/agent-base": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
|
||||
"integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"debug": "4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/asynckit": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
||||
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "1.8.4",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.8.4.tgz",
|
||||
"integrity": "sha512-eBSYY4Y68NNlHbHBMdeDmKNtDgXWhQsJcGqzO3iLUM0GraQFSS9cVgPX5I9b3lbdFKyYoAEGAZF1DwhTaljNAw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.15.6",
|
||||
"form-data": "^4.0.0",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/buffer-equal-constant-time": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
|
||||
"integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==",
|
||||
"license": "BSD-3-Clause"
|
||||
},
|
||||
"node_modules/busboy": {
|
||||
"version": "1.6.0",
|
||||
"resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz",
|
||||
@@ -580,6 +721,35 @@
|
||||
"node": ">=10.16.0"
|
||||
}
|
||||
},
|
||||
"node_modules/call-bind-apply-helpers": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
|
||||
"integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"es-errors": "^1.3.0",
|
||||
"function-bind": "^1.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/call-bound": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz",
|
||||
"integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"call-bind-apply-helpers": "^1.0.2",
|
||||
"get-intrinsic": "^1.3.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/caniuse-lite": {
|
||||
"version": "1.0.30001707",
|
||||
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001707.tgz",
|
||||
@@ -651,6 +821,18 @@
|
||||
"simple-swizzle": "^0.2.2"
|
||||
}
|
||||
},
|
||||
"node_modules/combined-stream": {
|
||||
"version": "1.0.8",
|
||||
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"delayed-stream": "~1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/csstype": {
|
||||
"version": "3.1.3",
|
||||
"resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz",
|
||||
@@ -658,6 +840,38 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/dayjs": {
|
||||
"version": "1.11.13",
|
||||
"resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.13.tgz",
|
||||
"integrity": "sha512-oaMBel6gjolK862uaPQOVTA7q3TZhuSvuMQAAglQDOWYO9A91IrAOUJEyKVlqJlHE0vq5p5UXxzdPfMH/x6xNg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/debug": {
|
||||
"version": "4.4.0",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
|
||||
"integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ms": "^2.1.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"supports-color": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/delayed-stream": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
||||
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/dequal": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz",
|
||||
@@ -677,6 +891,29 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/dunder-proto": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
|
||||
"integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"call-bind-apply-helpers": "^1.0.1",
|
||||
"es-errors": "^1.3.0",
|
||||
"gopd": "^1.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/ecdsa-sig-formatter": {
|
||||
"version": "1.0.11",
|
||||
"resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz",
|
||||
"integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/enhanced-resolve": {
|
||||
"version": "5.18.1",
|
||||
"resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.1.tgz",
|
||||
@@ -691,6 +928,144 @@
|
||||
"node": ">=10.13.0"
|
||||
}
|
||||
},
|
||||
"node_modules/es-define-property": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz",
|
||||
"integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/es-errors": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
|
||||
"integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/es-object-atoms": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz",
|
||||
"integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"es-errors": "^1.3.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/es-set-tostringtag": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
|
||||
"integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"es-errors": "^1.3.0",
|
||||
"get-intrinsic": "^1.2.6",
|
||||
"has-tostringtag": "^1.0.2",
|
||||
"hasown": "^2.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/follow-redirects": {
|
||||
"version": "1.15.9",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz",
|
||||
"integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "individual",
|
||||
"url": "https://github.com/sponsors/RubenVerborgh"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=4.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"debug": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/form-data": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.2.tgz",
|
||||
"integrity": "sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.8",
|
||||
"es-set-tostringtag": "^2.1.0",
|
||||
"mime-types": "^2.1.12"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/function-bind": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
|
||||
"integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
|
||||
"license": "MIT",
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/get-intrinsic": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
|
||||
"integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"call-bind-apply-helpers": "^1.0.2",
|
||||
"es-define-property": "^1.0.1",
|
||||
"es-errors": "^1.3.0",
|
||||
"es-object-atoms": "^1.1.1",
|
||||
"function-bind": "^1.1.2",
|
||||
"get-proto": "^1.0.1",
|
||||
"gopd": "^1.2.0",
|
||||
"has-symbols": "^1.1.0",
|
||||
"hasown": "^2.0.2",
|
||||
"math-intrinsics": "^1.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/get-proto": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz",
|
||||
"integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"dunder-proto": "^1.0.1",
|
||||
"es-object-atoms": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/gopd": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
|
||||
"integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/graceful-fs": {
|
||||
"version": "4.2.11",
|
||||
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
|
||||
@@ -698,6 +1073,58 @@
|
||||
"dev": true,
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/has-symbols": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
|
||||
"integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/has-tostringtag": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
|
||||
"integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"has-symbols": "^1.0.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/hasown": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
|
||||
"integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"function-bind": "^1.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/https-proxy-agent": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz",
|
||||
"integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"agent-base": "6",
|
||||
"debug": "4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/is-arrayish": {
|
||||
"version": "0.3.2",
|
||||
"resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz",
|
||||
@@ -724,6 +1151,49 @@
|
||||
"url": "https://github.com/sponsors/panva"
|
||||
}
|
||||
},
|
||||
"node_modules/jsonwebtoken": {
|
||||
"version": "9.0.2",
|
||||
"resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz",
|
||||
"integrity": "sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"jws": "^3.2.2",
|
||||
"lodash.includes": "^4.3.0",
|
||||
"lodash.isboolean": "^3.0.3",
|
||||
"lodash.isinteger": "^4.0.4",
|
||||
"lodash.isnumber": "^3.0.3",
|
||||
"lodash.isplainobject": "^4.0.6",
|
||||
"lodash.isstring": "^4.0.1",
|
||||
"lodash.once": "^4.0.0",
|
||||
"ms": "^2.1.1",
|
||||
"semver": "^7.5.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12",
|
||||
"npm": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/jwa": {
|
||||
"version": "1.4.1",
|
||||
"resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz",
|
||||
"integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"buffer-equal-constant-time": "1.0.1",
|
||||
"ecdsa-sig-formatter": "1.0.11",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/jws": {
|
||||
"version": "3.2.2",
|
||||
"resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz",
|
||||
"integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"jwa": "^1.4.1",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/lightningcss": {
|
||||
"version": "1.29.2",
|
||||
"resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.29.2.tgz",
|
||||
@@ -774,6 +1244,84 @@
|
||||
"url": "https://opencollective.com/parcel"
|
||||
}
|
||||
},
|
||||
"node_modules/lodash.includes": {
|
||||
"version": "4.3.0",
|
||||
"resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz",
|
||||
"integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/lodash.isboolean": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz",
|
||||
"integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/lodash.isinteger": {
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz",
|
||||
"integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/lodash.isnumber": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz",
|
||||
"integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/lodash.isplainobject": {
|
||||
"version": "4.0.6",
|
||||
"resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz",
|
||||
"integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/lodash.isstring": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz",
|
||||
"integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/lodash.once": {
|
||||
"version": "4.1.1",
|
||||
"resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz",
|
||||
"integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/math-intrinsics": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
|
||||
"integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/mime-db": {
|
||||
"version": "1.52.0",
|
||||
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
|
||||
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/mime-types": {
|
||||
"version": "2.1.35",
|
||||
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
|
||||
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"mime-db": "1.52.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/ms": {
|
||||
"version": "2.1.3",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
|
||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/nanoid": {
|
||||
"version": "3.3.11",
|
||||
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz",
|
||||
@@ -883,6 +1431,18 @@
|
||||
"url": "https://github.com/sponsors/panva"
|
||||
}
|
||||
},
|
||||
"node_modules/object-inspect": {
|
||||
"version": "1.13.4",
|
||||
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz",
|
||||
"integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/picocolors": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
|
||||
@@ -925,6 +1485,27 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/proxy-from-env": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/qs": {
|
||||
"version": "6.14.0",
|
||||
"resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz",
|
||||
"integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==",
|
||||
"license": "BSD-3-Clause",
|
||||
"dependencies": {
|
||||
"side-channel": "^1.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.6"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/react": {
|
||||
"version": "19.1.0",
|
||||
"resolved": "https://registry.npmjs.org/react/-/react-19.1.0.tgz",
|
||||
@@ -946,18 +1527,43 @@
|
||||
"react": "^19.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/safe-buffer": {
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/feross"
|
||||
},
|
||||
{
|
||||
"type": "patreon",
|
||||
"url": "https://www.patreon.com/feross"
|
||||
},
|
||||
{
|
||||
"type": "consulting",
|
||||
"url": "https://feross.org/support"
|
||||
}
|
||||
],
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/scheduler": {
|
||||
"version": "0.26.0",
|
||||
"resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.26.0.tgz",
|
||||
"integrity": "sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/scmp": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/scmp/-/scmp-2.1.0.tgz",
|
||||
"integrity": "sha512-o/mRQGk9Rcer/jEEw/yw4mwo3EU/NvYvp577/Btqrym9Qy5/MdWGBqipbALgd2lrdWTJ5/gqDusxfnQBxOxT2Q==",
|
||||
"license": "BSD-3-Clause"
|
||||
},
|
||||
"node_modules/semver": {
|
||||
"version": "7.7.1",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz",
|
||||
"integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==",
|
||||
"license": "ISC",
|
||||
"optional": true,
|
||||
"bin": {
|
||||
"semver": "bin/semver.js"
|
||||
},
|
||||
@@ -1005,6 +1611,78 @@
|
||||
"@img/sharp-win32-x64": "0.33.5"
|
||||
}
|
||||
},
|
||||
"node_modules/side-channel": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz",
|
||||
"integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"es-errors": "^1.3.0",
|
||||
"object-inspect": "^1.13.3",
|
||||
"side-channel-list": "^1.0.0",
|
||||
"side-channel-map": "^1.0.1",
|
||||
"side-channel-weakmap": "^1.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/side-channel-list": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz",
|
||||
"integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"es-errors": "^1.3.0",
|
||||
"object-inspect": "^1.13.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/side-channel-map": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz",
|
||||
"integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"call-bound": "^1.0.2",
|
||||
"es-errors": "^1.3.0",
|
||||
"get-intrinsic": "^1.2.5",
|
||||
"object-inspect": "^1.13.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/side-channel-weakmap": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz",
|
||||
"integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"call-bound": "^1.0.2",
|
||||
"es-errors": "^1.3.0",
|
||||
"get-intrinsic": "^1.2.5",
|
||||
"object-inspect": "^1.13.3",
|
||||
"side-channel-map": "^1.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/simple-swizzle": {
|
||||
"version": "0.2.2",
|
||||
"resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz",
|
||||
@@ -1091,6 +1769,24 @@
|
||||
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
|
||||
"license": "0BSD"
|
||||
},
|
||||
"node_modules/twilio": {
|
||||
"version": "5.5.1",
|
||||
"resolved": "https://registry.npmjs.org/twilio/-/twilio-5.5.1.tgz",
|
||||
"integrity": "sha512-b1gLd2eMsCSCHRerp3GQUedVlz0nCTt5FbyPxDPmMvk5cm6eIPk4ZTp5JNpgucARZgpCB2uUACJbdcidEHAUBA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"axios": "^1.7.8",
|
||||
"dayjs": "^1.11.9",
|
||||
"https-proxy-agent": "^5.0.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"qs": "^6.9.4",
|
||||
"scmp": "^2.1.0",
|
||||
"xmlbuilder": "^13.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.0"
|
||||
}
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
"version": "5.8.2",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.2.tgz",
|
||||
@@ -1121,109 +1817,13 @@
|
||||
"react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@next/swc-darwin-arm64": {
|
||||
"version": "15.2.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-15.2.4.tgz",
|
||||
"integrity": "sha512-1AnMfs655ipJEDC/FHkSr0r3lXBgpqKo4K1kiwfUf3iE68rDFXZ1TtHdMvf7D0hMItgDZ7Vuq3JgNMbt/+3bYw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"node_modules/xmlbuilder": {
|
||||
"version": "13.0.2",
|
||||
"resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-13.0.2.tgz",
|
||||
"integrity": "sha512-Eux0i2QdDYKbdbA6AM6xE4m6ZTZr4G4xF9kahI2ukSEMCzwce2eX9WlTI5J3s+NU7hpasFsr8hWIONae7LluAQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@next/swc-darwin-x64": {
|
||||
"version": "15.2.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-15.2.4.tgz",
|
||||
"integrity": "sha512-3qK2zb5EwCwxnO2HeO+TRqCubeI/NgCe+kL5dTJlPldV/uwCnUgC7VbEzgmxbfrkbjehL4H9BPztWOEtsoMwew==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@next/swc-linux-arm64-gnu": {
|
||||
"version": "15.2.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-15.2.4.tgz",
|
||||
"integrity": "sha512-HFN6GKUcrTWvem8AZN7tT95zPb0GUGv9v0d0iyuTb303vbXkkbHDp/DxufB04jNVD+IN9yHy7y/6Mqq0h0YVaQ==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@next/swc-linux-arm64-musl": {
|
||||
"version": "15.2.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-15.2.4.tgz",
|
||||
"integrity": "sha512-Oioa0SORWLwi35/kVB8aCk5Uq+5/ZIumMK1kJV+jSdazFm2NzPDztsefzdmzzpx5oGCJ6FkUC7vkaUseNTStNA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@next/swc-linux-x64-gnu": {
|
||||
"version": "15.2.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-15.2.4.tgz",
|
||||
"integrity": "sha512-yb5WTRaHdkgOqFOZiu6rHV1fAEK0flVpaIN2HB6kxHVSy/dIajWbThS7qON3W9/SNOH2JWkVCyulgGYekMePuw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@next/swc-linux-x64-musl": {
|
||||
"version": "15.2.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-15.2.4.tgz",
|
||||
"integrity": "sha512-Dcdv/ix6srhkM25fgXiyOieFUkz+fOYkHlydWCtB0xMST6X9XYI3yPDKBZt1xuhOytONsIFJFB08xXYsxUwJLw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"node_modules/@next/swc-win32-arm64-msvc": {
|
||||
"version": "15.2.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-15.2.4.tgz",
|
||||
"integrity": "sha512-dW0i7eukvDxtIhCYkMrZNQfNicPDExt2jPb9AZPpL7cfyUo7QSNl1DjsHjmmKp6qNAqUESyT8YFl/Aw91cNJJg==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
"node": ">=6.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,6 +16,24 @@ export default async function Home() {
|
||||
|
||||
console.log("Session:", session?.user);
|
||||
|
||||
const handleEmergency = async () => {
|
||||
// send texts
|
||||
const response = await fetch("/api/sendMessage", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
message: `yo i need help`,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
console.error("Error sending message:", response.statusText);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// If no session, show sign-up and login buttons
|
||||
if (!session) {
|
||||
|
||||
|
||||
28
React/src/pages/api/sendMessage.ts
Normal file
28
React/src/pages/api/sendMessage.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { NextApiRequest, NextApiResponse } from 'next';
|
||||
import twilio from 'twilio';
|
||||
|
||||
const accountSid = process.env.TWILIO_ACCOUNT_SID;
|
||||
const authToken = process.env.TWILIO_AUTH_TOKEN;
|
||||
const client = twilio(accountSid, authToken);
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
if (req.method === 'POST') {
|
||||
const { message } = req.body;
|
||||
|
||||
try {
|
||||
const response = await client.messages.create({
|
||||
body: message,
|
||||
from: process.env.TWILIO_PHONE_NUMBER,
|
||||
to: '+18777804236',
|
||||
});
|
||||
|
||||
res.status(200).json({ success: true, sid: response.sid });
|
||||
} catch (error) {
|
||||
console.error('Error sending message:', error);
|
||||
res.status(500).json({ success: false, error: 'Failed to send message' });
|
||||
}
|
||||
} else {
|
||||
res.setHeader('Allow', ['POST']);
|
||||
res.status(405).end(`Method ${req.method} Not Allowed`);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user