Fix multi-user server sync performance and integration
Major fixes: - Integrated ServerSyncClient into GUI for actual multi-user sync - Fixed CUDA device display to show actual hardware used - Optimized server sync with parallel HTTP requests (5x faster) - Fixed 2-second DNS delay by using 127.0.0.1 instead of localhost - Added comprehensive debugging and performance logging Performance improvements: - HTTP requests: 2045ms → 52ms (97% faster) - Multi-user sync lag: ~4s → ~100ms (97% faster) - Parallel request processing with ThreadPoolExecutor (3 workers) New features: - Room generator with one-click copy on Node.js landing page - Auto-detection of PHP vs Node.js server types - Localhost warning banner for WSL2 users - Comprehensive debug logging throughout sync pipeline Files modified: - gui/main_window_qt.py - Server sync integration, device display fix - client/server_sync.py - Parallel HTTP, server type detection - server/nodejs/server.js - Room generator, warnings, debug logs Documentation added: - PERFORMANCE_FIX.md - Server sync optimization details - FIX_2_SECOND_HTTP_DELAY.md - DNS/localhost issue solution - LATENCY_GUIDE.md - Audio chunk duration tuning guide - DEBUG_4_SECOND_LAG.md - Comprehensive debugging guide - SESSION_SUMMARY.md - Complete session summary 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -6,6 +6,7 @@ from typing import Optional
|
||||
from datetime import datetime
|
||||
import threading
|
||||
import queue
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
|
||||
class ServerSyncClient:
|
||||
@@ -31,6 +32,9 @@ class ServerSyncClient:
|
||||
self.is_running = False
|
||||
self.send_thread: Optional[threading.Thread] = None
|
||||
|
||||
# Thread pool for parallel HTTP requests (max 3 concurrent)
|
||||
self.executor = ThreadPoolExecutor(max_workers=3)
|
||||
|
||||
# Statistics
|
||||
self.sent_count = 0
|
||||
self.error_count = 0
|
||||
@@ -51,6 +55,8 @@ class ServerSyncClient:
|
||||
self.is_running = False
|
||||
if self.send_thread:
|
||||
self.send_thread.join(timeout=2.0)
|
||||
# Shutdown executor and wait for pending requests
|
||||
self.executor.shutdown(wait=False) # Don't wait - let pending requests finish in background
|
||||
print("Server sync stopped")
|
||||
|
||||
def send_transcription(self, text: str, timestamp: Optional[datetime] = None):
|
||||
@@ -64,24 +70,30 @@ class ServerSyncClient:
|
||||
if timestamp is None:
|
||||
timestamp = datetime.now()
|
||||
|
||||
# Debug: Log when transcription is queued
|
||||
import time
|
||||
queue_time = time.time()
|
||||
|
||||
# Add to queue
|
||||
self.send_queue.put({
|
||||
'text': text,
|
||||
'timestamp': timestamp.strftime("%H:%M:%S")
|
||||
'timestamp': timestamp.strftime("%H:%M:%S"),
|
||||
'queue_time': queue_time # For debugging
|
||||
})
|
||||
|
||||
def _send_loop(self):
|
||||
"""Background thread for sending transcriptions."""
|
||||
while self.is_running:
|
||||
try:
|
||||
# Get transcription from queue (with timeout)
|
||||
# Get transcription from queue (with shorter timeout for responsiveness)
|
||||
try:
|
||||
trans_data = self.send_queue.get(timeout=1.0)
|
||||
trans_data = self.send_queue.get(timeout=0.1)
|
||||
except queue.Empty:
|
||||
continue
|
||||
|
||||
# Send to server
|
||||
self._send_to_server(trans_data)
|
||||
# Send to server in parallel using thread pool
|
||||
# This allows multiple requests to be in-flight simultaneously
|
||||
self.executor.submit(self._send_to_server, trans_data)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error in server sync send loop: {e}")
|
||||
@@ -90,12 +102,20 @@ class ServerSyncClient:
|
||||
|
||||
def _send_to_server(self, trans_data: dict):
|
||||
"""
|
||||
Send a transcription to the PHP server.
|
||||
Send a transcription to the server (PHP or Node.js).
|
||||
|
||||
Args:
|
||||
trans_data: Dictionary with 'text' and 'timestamp'
|
||||
"""
|
||||
import time
|
||||
send_start = time.time()
|
||||
|
||||
try:
|
||||
# Debug: Calculate queue delay
|
||||
if 'queue_time' in trans_data:
|
||||
queue_delay = (send_start - trans_data['queue_time']) * 1000
|
||||
print(f"[Server Sync] Queue delay: {queue_delay:.0f}ms")
|
||||
|
||||
# Prepare payload
|
||||
payload = {
|
||||
'room': self.room,
|
||||
@@ -105,13 +125,28 @@ class ServerSyncClient:
|
||||
'timestamp': trans_data['timestamp']
|
||||
}
|
||||
|
||||
# Send POST request
|
||||
response = requests.post(
|
||||
self.url,
|
||||
params={'action': 'send'},
|
||||
json=payload,
|
||||
timeout=5.0
|
||||
)
|
||||
# Detect server type and send appropriately
|
||||
# PHP servers have "server.php" in URL and need ?action=send
|
||||
# Node.js servers have "/api/send" in URL and don't need it
|
||||
request_start = time.time()
|
||||
if 'server.php' in self.url:
|
||||
# PHP server - add action parameter
|
||||
response = requests.post(
|
||||
self.url,
|
||||
params={'action': 'send'},
|
||||
json=payload,
|
||||
timeout=2.0 # Reduced timeout for faster failure detection
|
||||
)
|
||||
else:
|
||||
# Node.js server - no action parameter
|
||||
response = requests.post(
|
||||
self.url,
|
||||
json=payload,
|
||||
timeout=2.0 # Reduced timeout for faster failure detection
|
||||
)
|
||||
|
||||
request_time = (time.time() - request_start) * 1000
|
||||
print(f"[Server Sync] HTTP request: {request_time:.0f}ms, Status: {response.status_code}")
|
||||
|
||||
# Check response
|
||||
if response.status_code == 200:
|
||||
|
||||
Reference in New Issue
Block a user