Fix recurring event series modification via drag and drop operations
This commit resolves the "Failed to fetch" errors when updating recurring event series through drag operations by implementing proper request sequencing and fixing time parameter handling. Key fixes: - Eliminate HTTP request cancellation by sequencing operations properly - Add global mutex to prevent CalDAV HTTP race conditions - Implement complete RFC 5545-compliant series splitting for "this_and_future" - Fix frontend to pass dragged times instead of original times - Add comprehensive error handling and request timing logs - Backend now handles both UPDATE (add UNTIL) and CREATE (new series) in single request Technical changes: - Frontend: Remove concurrent CREATE request, pass dragged times to backend - Backend: Implement full this_and_future logic with sequential operations - CalDAV: Add mutex serialization and detailed error tracking - Series: Create new series with occurrence date + dragged times 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -32,6 +32,7 @@ regex = "1.0"
|
|||||||
dotenvy = "0.15"
|
dotenvy = "0.15"
|
||||||
base64 = "0.21"
|
base64 = "0.21"
|
||||||
thiserror = "1.0"
|
thiserror = "1.0"
|
||||||
|
lazy_static = "1.4"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tokio = { version = "1.0", features = ["macros", "rt"] }
|
tokio = { version = "1.0", features = ["macros", "rt"] }
|
||||||
|
|||||||
@@ -1,8 +1,16 @@
|
|||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use std::error::Error;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use tokio::sync::Mutex;
|
||||||
use calendar_models::{VEvent, EventStatus, EventClass, CalendarUser, VAlarm};
|
use calendar_models::{VEvent, EventStatus, EventClass, CalendarUser, VAlarm};
|
||||||
|
|
||||||
|
// Global mutex to serialize CalDAV HTTP requests to prevent race conditions
|
||||||
|
lazy_static::lazy_static! {
|
||||||
|
static ref CALDAV_HTTP_MUTEX: Arc<Mutex<()>> = Arc::new(Mutex::new(()));
|
||||||
|
}
|
||||||
|
|
||||||
/// Type alias for shared VEvent (for backward compatibility during migration)
|
/// Type alias for shared VEvent (for backward compatibility during migration)
|
||||||
pub type CalendarEvent = VEvent;
|
pub type CalendarEvent = VEvent;
|
||||||
|
|
||||||
@@ -105,9 +113,15 @@ pub struct CalDAVClient {
|
|||||||
impl CalDAVClient {
|
impl CalDAVClient {
|
||||||
/// Create a new CalDAV client with the given configuration
|
/// Create a new CalDAV client with the given configuration
|
||||||
pub fn new(config: crate::config::CalDAVConfig) -> Self {
|
pub fn new(config: crate::config::CalDAVConfig) -> Self {
|
||||||
|
// Create HTTP client with global timeout to prevent hanging requests
|
||||||
|
let http_client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(60)) // 60 second global timeout
|
||||||
|
.build()
|
||||||
|
.expect("Failed to create HTTP client");
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
config,
|
config,
|
||||||
http_client: reqwest::Client::new(),
|
http_client,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -773,6 +787,10 @@ impl CalDAVClient {
|
|||||||
println!("Creating event at: {}", full_url);
|
println!("Creating event at: {}", full_url);
|
||||||
println!("iCal data: {}", ical_data);
|
println!("iCal data: {}", ical_data);
|
||||||
|
|
||||||
|
println!("📡 Acquiring CalDAV HTTP lock for CREATE request...");
|
||||||
|
let _lock = CALDAV_HTTP_MUTEX.lock().await;
|
||||||
|
println!("📡 Lock acquired, sending CREATE request to CalDAV server...");
|
||||||
|
|
||||||
let response = self.http_client
|
let response = self.http_client
|
||||||
.put(&full_url)
|
.put(&full_url)
|
||||||
.header("Authorization", format!("Basic {}", self.config.get_basic_auth()))
|
.header("Authorization", format!("Basic {}", self.config.get_basic_auth()))
|
||||||
@@ -823,15 +841,49 @@ impl CalDAVClient {
|
|||||||
println!("📝 Updated iCal data: {}", ical_data);
|
println!("📝 Updated iCal data: {}", ical_data);
|
||||||
println!("📝 Event has {} exception dates", event.exdate.len());
|
println!("📝 Event has {} exception dates", event.exdate.len());
|
||||||
|
|
||||||
let response = self.http_client
|
println!("📡 Acquiring CalDAV HTTP lock for PUT request...");
|
||||||
|
let _lock = CALDAV_HTTP_MUTEX.lock().await;
|
||||||
|
println!("📡 Lock acquired, sending PUT request to CalDAV server...");
|
||||||
|
println!("🔗 PUT URL: {}", full_url);
|
||||||
|
println!("🔍 Request headers: Authorization: Basic [HIDDEN], Content-Type: text/calendar; charset=utf-8");
|
||||||
|
|
||||||
|
let request_builder = self.http_client
|
||||||
.put(&full_url)
|
.put(&full_url)
|
||||||
.header("Authorization", format!("Basic {}", self.config.get_basic_auth()))
|
.header("Authorization", format!("Basic {}", self.config.get_basic_auth()))
|
||||||
.header("Content-Type", "text/calendar; charset=utf-8")
|
.header("Content-Type", "text/calendar; charset=utf-8")
|
||||||
.header("User-Agent", "calendar-app/0.1.0")
|
.header("User-Agent", "calendar-app/0.1.0")
|
||||||
.body(ical_data)
|
.timeout(std::time::Duration::from_secs(30))
|
||||||
.send()
|
.body(ical_data);
|
||||||
.await
|
|
||||||
.map_err(|e| CalDAVError::ParseError(e.to_string()))?;
|
println!("📡 About to execute PUT request at {}", chrono::Utc::now().format("%H:%M:%S%.3f"));
|
||||||
|
let start_time = std::time::Instant::now();
|
||||||
|
let response_result = request_builder.send().await;
|
||||||
|
let elapsed = start_time.elapsed();
|
||||||
|
|
||||||
|
println!("📡 PUT request completed after {}ms at {}", elapsed.as_millis(), chrono::Utc::now().format("%H:%M:%S%.3f"));
|
||||||
|
let response = response_result.map_err(|e| {
|
||||||
|
println!("❌ HTTP PUT request failed after {}ms: {}", elapsed.as_millis(), e);
|
||||||
|
println!("❌ Error source: {:?}", e.source());
|
||||||
|
println!("❌ Error string: {}", e.to_string());
|
||||||
|
if e.is_timeout() {
|
||||||
|
println!("❌ Error was a timeout");
|
||||||
|
} else if e.is_connect() {
|
||||||
|
println!("❌ Error was a connection error");
|
||||||
|
} else if e.is_request() {
|
||||||
|
println!("❌ Error was a request error");
|
||||||
|
} else if e.to_string().contains("operation was canceled") || e.to_string().contains("cancelled") {
|
||||||
|
println!("❌ Error indicates operation was cancelled");
|
||||||
|
} else {
|
||||||
|
println!("❌ Error was of unknown type");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this might be a concurrent request issue
|
||||||
|
if e.to_string().contains("cancel") {
|
||||||
|
println!("⚠️ Potential race condition detected - request was cancelled, possibly by another concurrent operation");
|
||||||
|
}
|
||||||
|
|
||||||
|
CalDAVError::ParseError(e.to_string())
|
||||||
|
})?;
|
||||||
|
|
||||||
println!("Event update response status: {}", response.status());
|
println!("Event update response status: {}", response.status());
|
||||||
|
|
||||||
@@ -1020,6 +1072,10 @@ impl CalDAVClient {
|
|||||||
|
|
||||||
println!("Deleting event at: {}", full_url);
|
println!("Deleting event at: {}", full_url);
|
||||||
|
|
||||||
|
println!("📡 Acquiring CalDAV HTTP lock for DELETE request...");
|
||||||
|
let _lock = CALDAV_HTTP_MUTEX.lock().await;
|
||||||
|
println!("📡 Lock acquired, sending DELETE request to CalDAV server...");
|
||||||
|
|
||||||
let response = self.http_client
|
let response = self.http_client
|
||||||
.delete(&full_url)
|
.delete(&full_url)
|
||||||
.header("Authorization", format!("Basic {}", self.config.get_basic_auth()))
|
.header("Authorization", format!("Basic {}", self.config.get_basic_auth()))
|
||||||
|
|||||||
@@ -242,19 +242,39 @@ pub async fn update_event_series(
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Create CalDAV config from token and password
|
// Create CalDAV config from token and password
|
||||||
let config = state.auth_service.caldav_config_from_token(&token, &password)?;
|
println!("🔄 Creating CalDAV config for series update...");
|
||||||
|
let config = match state.auth_service.caldav_config_from_token(&token, &password) {
|
||||||
|
Ok(config) => {
|
||||||
|
println!("✅ CalDAV config created successfully");
|
||||||
|
config
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
println!("❌ Failed to create CalDAV config: {}", e);
|
||||||
|
return Err(e);
|
||||||
|
}
|
||||||
|
};
|
||||||
let client = CalDAVClient::new(config);
|
let client = CalDAVClient::new(config);
|
||||||
|
|
||||||
// Use the parsed frequency for further processing (avoiding unused variable warning)
|
// Use the parsed frequency for further processing (avoiding unused variable warning)
|
||||||
let _freq_for_processing = recurrence_freq;
|
let _freq_for_processing = recurrence_freq;
|
||||||
|
|
||||||
// Determine which calendar to search (or search all calendars)
|
// Determine which calendar to search (or search all calendars)
|
||||||
|
println!("🔍 Determining calendar paths...");
|
||||||
let calendar_paths = if let Some(ref path) = request.calendar_path {
|
let calendar_paths = if let Some(ref path) = request.calendar_path {
|
||||||
|
println!("✅ Using specified calendar path: {}", path);
|
||||||
vec![path.clone()]
|
vec![path.clone()]
|
||||||
} else {
|
} else {
|
||||||
client.discover_calendars()
|
println!("🔍 Discovering all available calendars...");
|
||||||
.await
|
match client.discover_calendars().await {
|
||||||
.map_err(|e| ApiError::Internal(format!("Failed to discover calendars: {}", e)))?
|
Ok(paths) => {
|
||||||
|
println!("✅ Discovered {} calendar paths", paths.len());
|
||||||
|
paths
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
println!("❌ Failed to discover calendars: {}", e);
|
||||||
|
return Err(ApiError::Internal(format!("Failed to discover calendars: {}", e)));
|
||||||
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if calendar_paths.is_empty() {
|
if calendar_paths.is_empty() {
|
||||||
@@ -262,75 +282,114 @@ pub async fn update_event_series(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Find the series event across all specified calendars
|
// Find the series event across all specified calendars
|
||||||
|
println!("🔍 Searching for series UID '{}' across {} calendar(s)...", request.series_uid, calendar_paths.len());
|
||||||
let mut existing_event = None;
|
let mut existing_event = None;
|
||||||
let mut calendar_path = String::new();
|
let mut calendar_path = String::new();
|
||||||
|
|
||||||
for path in &calendar_paths {
|
for path in &calendar_paths {
|
||||||
if let Ok(Some(event)) = client.fetch_event_by_uid(path, &request.series_uid).await {
|
println!("🔍 Searching calendar path: {}", path);
|
||||||
|
match client.fetch_event_by_uid(path, &request.series_uid).await {
|
||||||
|
Ok(Some(event)) => {
|
||||||
|
println!("✅ Found series event in calendar: {}", path);
|
||||||
existing_event = Some(event);
|
existing_event = Some(event);
|
||||||
calendar_path = path.clone();
|
calendar_path = path.clone();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
Ok(None) => {
|
||||||
|
println!("❌ Series event not found in calendar: {}", path);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
println!("❌ Error searching calendar {}: {}", path, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut existing_event = existing_event
|
let mut existing_event = existing_event
|
||||||
.ok_or_else(|| ApiError::NotFound(format!("Event series with UID '{}' not found", request.series_uid)))?;
|
.ok_or_else(|| ApiError::NotFound(format!("Event series with UID '{}' not found", request.series_uid)))?;
|
||||||
|
|
||||||
println!("📅 Found series event in calendar: {}", calendar_path);
|
println!("📅 Found series event in calendar: {}", calendar_path);
|
||||||
|
println!("📅 Event details: UID={}, summary={:?}, dtstart={}",
|
||||||
|
existing_event.uid, existing_event.summary, existing_event.dtstart);
|
||||||
|
|
||||||
// Parse datetime components for the update
|
// Parse datetime components for the update
|
||||||
// For recurring events, preserve the original series start date and only update the time
|
println!("🕒 Parsing datetime components...");
|
||||||
// to prevent the entire series from shifting to a different date
|
|
||||||
let original_start_date = existing_event.dtstart.date_naive();
|
let original_start_date = existing_event.dtstart.date_naive();
|
||||||
let start_date = original_start_date; // Always use original series date
|
|
||||||
|
// For "this_and_future" updates, use the occurrence date for the new series
|
||||||
|
// For other updates, preserve the original series start date
|
||||||
|
let start_date = if request.update_scope == "this_and_future" && request.occurrence_date.is_some() {
|
||||||
|
let occurrence_date_str = request.occurrence_date.as_ref().unwrap();
|
||||||
|
let occurrence_date = chrono::NaiveDate::parse_from_str(occurrence_date_str, "%Y-%m-%d")
|
||||||
|
.map_err(|_| ApiError::BadRequest("Invalid occurrence_date format. Expected YYYY-MM-DD".to_string()))?;
|
||||||
|
println!("🕒 Using occurrence date: {} for this_and_future update", occurrence_date);
|
||||||
|
occurrence_date
|
||||||
|
} else {
|
||||||
|
println!("🕒 Using original start date: {} for series update", original_start_date);
|
||||||
|
original_start_date
|
||||||
|
};
|
||||||
|
|
||||||
// Log what we're doing for debugging
|
// Log what we're doing for debugging
|
||||||
|
println!("🕒 Parsing requested start date: {}", request.start_date);
|
||||||
let requested_date = chrono::NaiveDate::parse_from_str(&request.start_date, "%Y-%m-%d")
|
let requested_date = chrono::NaiveDate::parse_from_str(&request.start_date, "%Y-%m-%d")
|
||||||
.map_err(|_| ApiError::BadRequest("Invalid start_date format. Expected YYYY-MM-DD".to_string()))?;
|
.map_err(|_| ApiError::BadRequest("Invalid start_date format. Expected YYYY-MM-DD".to_string()))?;
|
||||||
println!("📅 Preserving original series date {} (requested: {})", original_start_date, requested_date);
|
println!("📅 Preserving original series date {} (requested: {})", original_start_date, requested_date);
|
||||||
|
|
||||||
|
println!("🕒 Determining datetime format (all_day: {})...", request.all_day);
|
||||||
let (start_datetime, end_datetime) = if request.all_day {
|
let (start_datetime, end_datetime) = if request.all_day {
|
||||||
|
println!("🕒 Processing all-day event...");
|
||||||
let start_dt = start_date.and_hms_opt(0, 0, 0)
|
let start_dt = start_date.and_hms_opt(0, 0, 0)
|
||||||
.ok_or_else(|| ApiError::BadRequest("Invalid start date".to_string()))?;
|
.ok_or_else(|| ApiError::BadRequest("Invalid start date".to_string()))?;
|
||||||
|
|
||||||
// For all-day events, also preserve the original date pattern
|
// For all-day events, also preserve the original date pattern
|
||||||
let end_date = if !request.end_date.is_empty() {
|
let end_date = if !request.end_date.is_empty() {
|
||||||
|
println!("🕒 Calculating end date from original duration...");
|
||||||
// Calculate the duration from the original event
|
// Calculate the duration from the original event
|
||||||
let original_duration_days = existing_event.dtend
|
let original_duration_days = existing_event.dtend
|
||||||
.map(|end| (end.date_naive() - existing_event.dtstart.date_naive()).num_days())
|
.map(|end| (end.date_naive() - existing_event.dtstart.date_naive()).num_days())
|
||||||
.unwrap_or(0);
|
.unwrap_or(0);
|
||||||
|
println!("🕒 Original duration: {} days", original_duration_days);
|
||||||
start_date + chrono::Duration::days(original_duration_days)
|
start_date + chrono::Duration::days(original_duration_days)
|
||||||
} else {
|
} else {
|
||||||
|
println!("🕒 Using same date for end date");
|
||||||
start_date
|
start_date
|
||||||
};
|
};
|
||||||
|
|
||||||
let end_dt = end_date.and_hms_opt(23, 59, 59)
|
let end_dt = end_date.and_hms_opt(23, 59, 59)
|
||||||
.ok_or_else(|| ApiError::BadRequest("Invalid end date".to_string()))?;
|
.ok_or_else(|| ApiError::BadRequest("Invalid end date".to_string()))?;
|
||||||
|
|
||||||
|
println!("🕒 All-day datetime range: {} to {}", start_dt, end_dt);
|
||||||
(chrono::Utc.from_utc_datetime(&start_dt), chrono::Utc.from_utc_datetime(&end_dt))
|
(chrono::Utc.from_utc_datetime(&start_dt), chrono::Utc.from_utc_datetime(&end_dt))
|
||||||
} else {
|
} else {
|
||||||
|
println!("🕒 Processing timed event...");
|
||||||
let start_time = if !request.start_time.is_empty() {
|
let start_time = if !request.start_time.is_empty() {
|
||||||
|
println!("🕒 Parsing start time: {}", request.start_time);
|
||||||
chrono::NaiveTime::parse_from_str(&request.start_time, "%H:%M")
|
chrono::NaiveTime::parse_from_str(&request.start_time, "%H:%M")
|
||||||
.map_err(|_| ApiError::BadRequest("Invalid start_time format. Expected HH:MM".to_string()))?
|
.map_err(|_| ApiError::BadRequest("Invalid start_time format. Expected HH:MM".to_string()))?
|
||||||
} else {
|
} else {
|
||||||
|
println!("🕒 Using existing event start time");
|
||||||
existing_event.dtstart.time()
|
existing_event.dtstart.time()
|
||||||
};
|
};
|
||||||
|
|
||||||
let end_time = if !request.end_time.is_empty() {
|
let end_time = if !request.end_time.is_empty() {
|
||||||
|
println!("🕒 Parsing end time: {}", request.end_time);
|
||||||
chrono::NaiveTime::parse_from_str(&request.end_time, "%H:%M")
|
chrono::NaiveTime::parse_from_str(&request.end_time, "%H:%M")
|
||||||
.map_err(|_| ApiError::BadRequest("Invalid end_time format. Expected HH:MM".to_string()))?
|
.map_err(|_| ApiError::BadRequest("Invalid end_time format. Expected HH:MM".to_string()))?
|
||||||
} else {
|
} else {
|
||||||
|
println!("🕒 Calculating end time from existing event");
|
||||||
existing_event.dtend.map(|dt| dt.time()).unwrap_or_else(|| {
|
existing_event.dtend.map(|dt| dt.time()).unwrap_or_else(|| {
|
||||||
existing_event.dtstart.time() + chrono::Duration::hours(1)
|
existing_event.dtstart.time() + chrono::Duration::hours(1)
|
||||||
})
|
})
|
||||||
};
|
};
|
||||||
|
|
||||||
|
println!("🕒 Calculated times: start={}, end={}", start_time, end_time);
|
||||||
let start_dt = start_date.and_time(start_time);
|
let start_dt = start_date.and_time(start_time);
|
||||||
// For timed events, preserve the original date and only update times
|
// For timed events, preserve the original date and only update times
|
||||||
let end_dt = if !request.end_time.is_empty() {
|
let end_dt = if !request.end_time.is_empty() {
|
||||||
|
println!("🕒 Using new end time with preserved date");
|
||||||
// Use the new end time with the preserved original date
|
// Use the new end time with the preserved original date
|
||||||
start_date.and_time(end_time)
|
start_date.and_time(end_time)
|
||||||
} else {
|
} else {
|
||||||
|
println!("🕒 Calculating end time based on original duration");
|
||||||
// Calculate end time based on original duration
|
// Calculate end time based on original duration
|
||||||
let original_duration = existing_event.dtend
|
let original_duration = existing_event.dtend
|
||||||
.map(|end| end - existing_event.dtstart)
|
.map(|end| end - existing_event.dtstart)
|
||||||
@@ -338,37 +397,57 @@ pub async fn update_event_series(
|
|||||||
(chrono::Utc.from_utc_datetime(&start_dt) + original_duration).naive_utc()
|
(chrono::Utc.from_utc_datetime(&start_dt) + original_duration).naive_utc()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
println!("🕒 Timed datetime range: {} to {}", start_dt, end_dt);
|
||||||
(chrono::Utc.from_utc_datetime(&start_dt), chrono::Utc.from_utc_datetime(&end_dt))
|
(chrono::Utc.from_utc_datetime(&start_dt), chrono::Utc.from_utc_datetime(&end_dt))
|
||||||
};
|
};
|
||||||
|
|
||||||
// Handle different update scopes
|
// Handle different update scopes
|
||||||
|
println!("🎯 Handling update scope: '{}'", request.update_scope);
|
||||||
let (updated_event, occurrences_affected) = match request.update_scope.as_str() {
|
let (updated_event, occurrences_affected) = match request.update_scope.as_str() {
|
||||||
"all_in_series" => {
|
"all_in_series" => {
|
||||||
|
println!("🎯 Processing all_in_series update...");
|
||||||
// Update the entire series - modify the master event
|
// Update the entire series - modify the master event
|
||||||
update_entire_series(&mut existing_event, &request, start_datetime, end_datetime)?
|
update_entire_series(&mut existing_event, &request, start_datetime, end_datetime)?
|
||||||
},
|
},
|
||||||
"this_and_future" => {
|
"this_and_future" => {
|
||||||
|
println!("🎯 Processing this_and_future update...");
|
||||||
// Split the series: keep past occurrences, create new series from occurrence date
|
// Split the series: keep past occurrences, create new series from occurrence date
|
||||||
update_this_and_future(&mut existing_event, &request, start_datetime, end_datetime)?
|
update_this_and_future(&mut existing_event, &request, start_datetime, end_datetime, &client, &calendar_path).await?
|
||||||
},
|
},
|
||||||
"this_only" => {
|
"this_only" => {
|
||||||
|
println!("🎯 Processing this_only update...");
|
||||||
// Create exception for single occurrence, keep original series
|
// Create exception for single occurrence, keep original series
|
||||||
let event_href = existing_event.href.as_ref()
|
let event_href = existing_event.href.as_ref()
|
||||||
.ok_or_else(|| ApiError::Internal("Event missing href for single occurrence update".to_string()))?
|
.ok_or_else(|| ApiError::Internal("Event missing href for single occurrence update".to_string()))?
|
||||||
.clone();
|
.clone();
|
||||||
|
println!("🎯 Using event href: {}", event_href);
|
||||||
update_single_occurrence(&mut existing_event, &request, start_datetime, end_datetime, &client, &calendar_path, &event_href).await?
|
update_single_occurrence(&mut existing_event, &request, start_datetime, end_datetime, &client, &calendar_path, &event_href).await?
|
||||||
},
|
},
|
||||||
_ => unreachable!(), // Already validated above
|
_ => unreachable!(), // Already validated above
|
||||||
};
|
};
|
||||||
|
|
||||||
|
println!("✅ Update scope processing completed, {} occurrences affected", occurrences_affected);
|
||||||
|
|
||||||
// Update the event on the CalDAV server using the original event's href
|
// Update the event on the CalDAV server using the original event's href
|
||||||
// Note: For "this_only" updates, the original series was already updated in update_single_occurrence
|
// Note: For "this_only" updates, the original series was already updated in update_single_occurrence
|
||||||
if request.update_scope != "this_only" {
|
if request.update_scope != "this_only" {
|
||||||
|
println!("📤 Updating event on CalDAV server...");
|
||||||
let event_href = existing_event.href.as_ref()
|
let event_href = existing_event.href.as_ref()
|
||||||
.ok_or_else(|| ApiError::Internal("Event missing href for update".to_string()))?;
|
.ok_or_else(|| ApiError::Internal("Event missing href for update".to_string()))?;
|
||||||
client.update_event(&calendar_path, &updated_event, event_href)
|
println!("📤 Using event href: {}", event_href);
|
||||||
.await
|
println!("📤 Calendar path: {}", calendar_path);
|
||||||
.map_err(|e| ApiError::Internal(format!("Failed to update event series: {}", e)))?;
|
|
||||||
|
match client.update_event(&calendar_path, &updated_event, event_href).await {
|
||||||
|
Ok(_) => {
|
||||||
|
println!("✅ CalDAV update completed successfully");
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
println!("❌ CalDAV update failed: {}", e);
|
||||||
|
return Err(ApiError::Internal(format!("Failed to update event series: {}", e)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
println!("📤 Skipping CalDAV update (already handled in this_only scope)");
|
||||||
}
|
}
|
||||||
|
|
||||||
println!("✅ Event series updated successfully with UID: {}", request.series_uid);
|
println!("✅ Event series updated successfully with UID: {}", request.series_uid);
|
||||||
@@ -602,43 +681,92 @@ fn update_entire_series(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Update this occurrence and all future occurrences
|
/// Update this occurrence and all future occurrences
|
||||||
fn update_this_and_future(
|
async fn update_this_and_future(
|
||||||
existing_event: &mut VEvent,
|
existing_event: &mut VEvent,
|
||||||
request: &UpdateEventSeriesRequest,
|
request: &UpdateEventSeriesRequest,
|
||||||
start_datetime: chrono::DateTime<chrono::Utc>,
|
start_datetime: chrono::DateTime<chrono::Utc>,
|
||||||
end_datetime: chrono::DateTime<chrono::Utc>,
|
end_datetime: chrono::DateTime<chrono::Utc>,
|
||||||
|
client: &CalDAVClient,
|
||||||
|
calendar_path: &str,
|
||||||
) -> Result<(VEvent, u32), ApiError> {
|
) -> Result<(VEvent, u32), ApiError> {
|
||||||
// For now, treat this the same as update_entire_series
|
// Full implementation:
|
||||||
// In a full implementation, this would:
|
|
||||||
// 1. Add UNTIL to the original series to stop at the occurrence date
|
// 1. Add UNTIL to the original series to stop at the occurrence date
|
||||||
// 2. Create a new series starting from the occurrence date with updated properties
|
// 2. Create a new series starting from the occurrence date with updated properties
|
||||||
|
|
||||||
// For simplicity, we'll modify the original series with an UNTIL date if occurrence_date is provided
|
println!("🔄 this_and_future: occurrence_date = {:?}", request.occurrence_date);
|
||||||
if let Some(occurrence_date) = &request.occurrence_date {
|
|
||||||
// Parse occurrence date and set as UNTIL for the original series
|
let occurrence_date = request.occurrence_date.as_ref()
|
||||||
match chrono::NaiveDate::parse_from_str(occurrence_date, "%Y-%m-%d") {
|
.ok_or_else(|| ApiError::BadRequest("occurrence_date is required for this_and_future updates".to_string()))?;
|
||||||
Ok(date) => {
|
|
||||||
let until_datetime = date.and_hms_opt(0, 0, 0)
|
// Parse occurrence date
|
||||||
|
let occurrence_date_parsed = chrono::NaiveDate::parse_from_str(occurrence_date, "%Y-%m-%d")
|
||||||
|
.map_err(|_| ApiError::BadRequest("Invalid occurrence date format".to_string()))?;
|
||||||
|
|
||||||
|
// Step 1: Add UNTIL to the original series to stop before the occurrence date
|
||||||
|
let until_datetime = occurrence_date_parsed.and_hms_opt(0, 0, 0)
|
||||||
.ok_or_else(|| ApiError::BadRequest("Invalid occurrence date".to_string()))?;
|
.ok_or_else(|| ApiError::BadRequest("Invalid occurrence date".to_string()))?;
|
||||||
let utc_until = chrono::Utc.from_utc_datetime(&until_datetime);
|
let utc_until = chrono::Utc.from_utc_datetime(&until_datetime);
|
||||||
|
|
||||||
// Create modified RRULE with UNTIL clause
|
// Create modified RRULE with UNTIL clause for the original series
|
||||||
let mut rrule = existing_event.rrule.clone().unwrap_or_else(|| "FREQ=WEEKLY".to_string());
|
let original_rrule = existing_event.rrule.clone().unwrap_or_else(|| "FREQ=WEEKLY".to_string());
|
||||||
|
let parts: Vec<&str> = original_rrule.split(';').filter(|part| {
|
||||||
// Remove existing UNTIL or COUNT if present
|
|
||||||
let parts: Vec<&str> = rrule.split(';').filter(|part| {
|
|
||||||
!part.starts_with("UNTIL=") && !part.starts_with("COUNT=")
|
!part.starts_with("UNTIL=") && !part.starts_with("COUNT=")
|
||||||
}).collect();
|
}).collect();
|
||||||
|
|
||||||
rrule = format!("{};UNTIL={}", parts.join(";"), utc_until.format("%Y%m%dT%H%M%SZ"));
|
existing_event.rrule = Some(format!("{};UNTIL={}", parts.join(";"), utc_until.format("%Y%m%dT%H%M%SZ")));
|
||||||
existing_event.rrule = Some(rrule);
|
println!("🔄 this_and_future: Updated original series RRULE: {:?}", existing_event.rrule);
|
||||||
},
|
|
||||||
Err(_) => return Err(ApiError::BadRequest("Invalid occurrence date format".to_string())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Then apply the same updates as all_in_series for the rest of the properties
|
// Step 2: Create a new series starting from the occurrence date with updated properties
|
||||||
update_entire_series(existing_event, request, start_datetime, end_datetime)
|
let new_series_uid = format!("series-{}", uuid::Uuid::new_v4());
|
||||||
|
let mut new_series = existing_event.clone();
|
||||||
|
|
||||||
|
// Update the new series with new properties
|
||||||
|
new_series.uid = new_series_uid.clone();
|
||||||
|
new_series.dtstart = start_datetime;
|
||||||
|
new_series.dtend = Some(end_datetime);
|
||||||
|
new_series.summary = if request.title.trim().is_empty() { None } else { Some(request.title.clone()) };
|
||||||
|
new_series.description = if request.description.trim().is_empty() { None } else { Some(request.description.clone()) };
|
||||||
|
new_series.location = if request.location.trim().is_empty() { None } else { Some(request.location.clone()) };
|
||||||
|
|
||||||
|
new_series.status = Some(match request.status.to_lowercase().as_str() {
|
||||||
|
"tentative" => EventStatus::Tentative,
|
||||||
|
"cancelled" => EventStatus::Cancelled,
|
||||||
|
_ => EventStatus::Confirmed,
|
||||||
|
});
|
||||||
|
|
||||||
|
new_series.class = Some(match request.class.to_lowercase().as_str() {
|
||||||
|
"private" => EventClass::Private,
|
||||||
|
"confidential" => EventClass::Confidential,
|
||||||
|
_ => EventClass::Public,
|
||||||
|
});
|
||||||
|
|
||||||
|
new_series.priority = request.priority;
|
||||||
|
|
||||||
|
// Reset the RRULE for the new series (remove UNTIL)
|
||||||
|
let new_rrule_parts: Vec<&str> = original_rrule.split(';').filter(|part| {
|
||||||
|
!part.starts_with("UNTIL=") && !part.starts_with("COUNT=")
|
||||||
|
}).collect();
|
||||||
|
new_series.rrule = Some(new_rrule_parts.join(";"));
|
||||||
|
|
||||||
|
// Update timestamps
|
||||||
|
let now = chrono::Utc::now();
|
||||||
|
new_series.dtstamp = now;
|
||||||
|
new_series.created = Some(now);
|
||||||
|
new_series.last_modified = Some(now);
|
||||||
|
new_series.href = None; // Will be set when created
|
||||||
|
|
||||||
|
println!("🔄 this_and_future: Creating new series with UID: {}", new_series_uid);
|
||||||
|
println!("🔄 this_and_future: New series RRULE: {:?}", new_series.rrule);
|
||||||
|
|
||||||
|
// Create the new series on CalDAV server
|
||||||
|
client.create_event(calendar_path, &new_series)
|
||||||
|
.await
|
||||||
|
.map_err(|e| ApiError::Internal(format!("Failed to create new series: {}", e)))?;
|
||||||
|
|
||||||
|
println!("✅ this_and_future: Created new series successfully");
|
||||||
|
|
||||||
|
// Return the original event (with UNTIL added) - it will be updated by the main handler
|
||||||
|
Ok((existing_event.clone(), 2)) // 2 operations: updated original + created new series
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Update only a single occurrence (create an exception)
|
/// Update only a single occurrence (create an exception)
|
||||||
|
|||||||
@@ -437,32 +437,47 @@ pub fn App() -> Html {
|
|||||||
let recurrence_str = original_event.rrule.unwrap_or_default();
|
let recurrence_str = original_event.rrule.unwrap_or_default();
|
||||||
let recurrence_days = vec![false; 7]; // Default - could be enhanced to parse existing recurrence
|
let recurrence_days = vec![false; 7]; // Default - could be enhanced to parse existing recurrence
|
||||||
|
|
||||||
|
// Determine if this is a recurring event that needs series endpoint
|
||||||
|
let has_recurrence = !recurrence_str.is_empty() && recurrence_str.to_uppercase() != "NONE";
|
||||||
|
|
||||||
let result = if let Some(scope) = update_scope.as_ref() {
|
let result = if let Some(scope) = update_scope.as_ref() {
|
||||||
// Use series endpoint
|
// Use series endpoint for recurring event operations
|
||||||
calendar_service.update_series(
|
if !has_recurrence {
|
||||||
|
web_sys::console::log_1(&"⚠️ Warning: update_scope provided for non-recurring event, using regular endpoint instead".into());
|
||||||
|
// Fall through to regular endpoint
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(calendar_service.update_series(
|
||||||
&token,
|
&token,
|
||||||
&password,
|
&password,
|
||||||
backend_uid,
|
backend_uid.clone(),
|
||||||
original_event.summary.unwrap_or_default(),
|
original_event.summary.clone().unwrap_or_default(),
|
||||||
original_event.description.unwrap_or_default(),
|
original_event.description.clone().unwrap_or_default(),
|
||||||
start_date.clone(),
|
start_date.clone(),
|
||||||
start_time.clone(),
|
start_time.clone(),
|
||||||
end_date.clone(),
|
end_date.clone(),
|
||||||
end_time.clone(),
|
end_time.clone(),
|
||||||
original_event.location.unwrap_or_default(),
|
original_event.location.clone().unwrap_or_default(),
|
||||||
original_event.all_day,
|
original_event.all_day,
|
||||||
status_str,
|
status_str.clone(),
|
||||||
class_str,
|
class_str.clone(),
|
||||||
original_event.priority,
|
original_event.priority,
|
||||||
original_event.organizer.as_ref().map(|o| o.cal_address.clone()).unwrap_or_default(),
|
original_event.organizer.as_ref().map(|o| o.cal_address.clone()).unwrap_or_default(),
|
||||||
original_event.attendees.iter().map(|a| a.cal_address.clone()).collect::<Vec<_>>().join(","),
|
original_event.attendees.iter().map(|a| a.cal_address.clone()).collect::<Vec<_>>().join(","),
|
||||||
original_event.categories.join(","),
|
original_event.categories.join(","),
|
||||||
reminder_str,
|
reminder_str.clone(),
|
||||||
recurrence_str,
|
recurrence_str.clone(),
|
||||||
original_event.calendar_path,
|
original_event.calendar_path.clone(),
|
||||||
scope.clone(),
|
scope.clone(),
|
||||||
occurrence_date,
|
occurrence_date,
|
||||||
).await
|
).await)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let result = if let Some(series_result) = result {
|
||||||
|
series_result
|
||||||
} else {
|
} else {
|
||||||
// Use regular endpoint
|
// Use regular endpoint
|
||||||
calendar_service.update_event(
|
calendar_service.update_event(
|
||||||
@@ -507,21 +522,10 @@ pub fn App() -> Html {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
// Check if this is a network error that occurred after success
|
|
||||||
let err_str = format!("{}", err);
|
|
||||||
if err_str.contains("Failed to fetch") || err_str.contains("Network request failed") {
|
|
||||||
web_sys::console::log_1(&"Update may have succeeded despite network error, reloading...".into());
|
|
||||||
// Still reload as the update likely succeeded
|
|
||||||
wasm_bindgen_futures::spawn_local(async {
|
|
||||||
gloo_timers::future::sleep(std::time::Duration::from_millis(200)).await;
|
|
||||||
web_sys::window().unwrap().location().reload().unwrap();
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
web_sys::console::error_1(&format!("Failed to update event: {}", err).into());
|
web_sys::console::error_1(&format!("Failed to update event: {}", err).into());
|
||||||
web_sys::window().unwrap().alert_with_message(&format!("Failed to update event: {}", err)).unwrap();
|
web_sys::window().unwrap().alert_with_message(&format!("Failed to update event: {}", err)).unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -216,55 +216,17 @@ pub fn week_view(props: &WeekViewProps) -> Html {
|
|||||||
until_utc.format("%Y-%m-%d %H:%M:%S UTC"),
|
until_utc.format("%Y-%m-%d %H:%M:%S UTC"),
|
||||||
edit.event.dtstart.format("%Y-%m-%d %H:%M:%S UTC")).into());
|
edit.event.dtstart.format("%Y-%m-%d %H:%M:%S UTC")).into());
|
||||||
|
|
||||||
// Use the original series start time (not the dragged occurrence time)
|
// Use the dragged times for the new series (not the original series times)
|
||||||
let original_start = original_series.dtstart.with_timezone(&chrono::Local).naive_local();
|
let new_start = edit.new_start; // The dragged start time
|
||||||
let original_end = original_series.dtend.unwrap_or(original_series.dtstart).with_timezone(&chrono::Local).naive_local();
|
let new_end = edit.new_end; // The dragged end time
|
||||||
|
|
||||||
// Send until_date to backend instead of modifying RRULE on frontend
|
// Send until_date to backend instead of modifying RRULE on frontend
|
||||||
update_callback.emit((original_series, original_start, original_end, true, Some(until_utc), Some("this_and_future".to_string()), None)); // preserve_rrule = true, backend will add UNTIL
|
let occurrence_date = edit.event.dtstart.format("%Y-%m-%d").to_string();
|
||||||
|
update_callback.emit((original_series, new_start, new_end, true, Some(until_utc), Some("this_and_future".to_string()), Some(occurrence_date))); // preserve_rrule = true, backend will add UNTIL
|
||||||
}
|
}
|
||||||
|
|
||||||
// 2. Create new series starting from this occurrence with modified times
|
// The backend will handle creating the new series as part of the this_and_future update
|
||||||
if let Some(create_callback) = &on_create_event_request {
|
web_sys::console::log_1(&format!("✅ this_and_future update request sent - backend will handle both UPDATE (add UNTIL) and CREATE (new series) operations").into());
|
||||||
// Convert the recurring event to EventCreationData for the create callback
|
|
||||||
let event_data = EventCreationData {
|
|
||||||
title: edit.event.summary.clone().unwrap_or_default(),
|
|
||||||
description: edit.event.description.clone().unwrap_or_default(),
|
|
||||||
start_date: edit.new_start.date(),
|
|
||||||
start_time: edit.new_start.time(),
|
|
||||||
end_date: edit.new_end.date(),
|
|
||||||
end_time: edit.new_end.time(),
|
|
||||||
location: edit.event.location.clone().unwrap_or_default(),
|
|
||||||
all_day: edit.event.all_day,
|
|
||||||
status: EventStatus::Confirmed, // Default status
|
|
||||||
class: EventClass::Public, // Default class
|
|
||||||
priority: edit.event.priority,
|
|
||||||
organizer: edit.event.organizer.as_ref().map(|o| o.cal_address.clone()).unwrap_or_default(),
|
|
||||||
attendees: edit.event.attendees.iter().map(|a| a.cal_address.clone()).collect::<Vec<_>>().join(","),
|
|
||||||
categories: edit.event.categories.join(","),
|
|
||||||
reminder: ReminderType::None, // Default reminder
|
|
||||||
recurrence: if let Some(rrule) = &edit.event.rrule {
|
|
||||||
if rrule.contains("FREQ=DAILY") {
|
|
||||||
RecurrenceType::Daily
|
|
||||||
} else if rrule.contains("FREQ=WEEKLY") {
|
|
||||||
RecurrenceType::Weekly
|
|
||||||
} else if rrule.contains("FREQ=MONTHLY") {
|
|
||||||
RecurrenceType::Monthly
|
|
||||||
} else if rrule.contains("FREQ=YEARLY") {
|
|
||||||
RecurrenceType::Yearly
|
|
||||||
} else {
|
|
||||||
RecurrenceType::None
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
RecurrenceType::None
|
|
||||||
},
|
|
||||||
recurrence_days: vec![false; 7], // Default days
|
|
||||||
selected_calendar: edit.event.calendar_path.clone(),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Create the new series
|
|
||||||
create_callback.emit(event_data);
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
RecurringEditAction::AllEvents => {
|
RecurringEditAction::AllEvents => {
|
||||||
// Modify the entire series
|
// Modify the entire series
|
||||||
|
|||||||
@@ -862,8 +862,7 @@ impl CalendarService {
|
|||||||
start_date, start_time, end_date, end_time, location,
|
start_date, start_time, end_date, end_time, location,
|
||||||
all_day, status, class, priority, organizer, attendees,
|
all_day, status, class, priority, organizer, attendees,
|
||||||
categories, reminder, recurrence, recurrence_days,
|
categories, reminder, recurrence, recurrence_days,
|
||||||
calendar_path, exception_dates, update_action, until_date,
|
calendar_path, exception_dates, update_action, until_date
|
||||||
"all_in_series".to_string() // Default scope for backward compatibility
|
|
||||||
).await
|
).await
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -892,8 +891,7 @@ impl CalendarService {
|
|||||||
calendar_path: Option<String>,
|
calendar_path: Option<String>,
|
||||||
exception_dates: Vec<DateTime<Utc>>,
|
exception_dates: Vec<DateTime<Utc>>,
|
||||||
update_action: Option<String>,
|
update_action: Option<String>,
|
||||||
until_date: Option<DateTime<Utc>>,
|
until_date: Option<DateTime<Utc>>
|
||||||
update_scope: String
|
|
||||||
) -> Result<(), String> {
|
) -> Result<(), String> {
|
||||||
let window = web_sys::window().ok_or("No global window exists")?;
|
let window = web_sys::window().ok_or("No global window exists")?;
|
||||||
|
|
||||||
@@ -901,46 +899,7 @@ impl CalendarService {
|
|||||||
opts.set_method("POST");
|
opts.set_method("POST");
|
||||||
opts.set_mode(RequestMode::Cors);
|
opts.set_mode(RequestMode::Cors);
|
||||||
|
|
||||||
// Determine if this is a series event based on recurrence
|
// Always use regular endpoint - recurring events should use update_series() instead
|
||||||
let is_series = !recurrence.is_empty() && recurrence.to_uppercase() != "NONE";
|
|
||||||
|
|
||||||
let (body, url) = if is_series {
|
|
||||||
// Use series-specific endpoint and payload for recurring events
|
|
||||||
let body = serde_json::json!({
|
|
||||||
"series_uid": event_uid,
|
|
||||||
"title": title,
|
|
||||||
"description": description,
|
|
||||||
"start_date": start_date,
|
|
||||||
"start_time": start_time,
|
|
||||||
"end_date": end_date,
|
|
||||||
"end_time": end_time,
|
|
||||||
"location": location,
|
|
||||||
"all_day": all_day,
|
|
||||||
"status": status,
|
|
||||||
"class": class,
|
|
||||||
"priority": priority,
|
|
||||||
"organizer": organizer,
|
|
||||||
"attendees": attendees,
|
|
||||||
"categories": categories,
|
|
||||||
"reminder": reminder,
|
|
||||||
"recurrence": recurrence,
|
|
||||||
"recurrence_days": recurrence_days,
|
|
||||||
"recurrence_interval": 1_u32, // Default interval
|
|
||||||
"recurrence_end_date": until_date.as_ref().map(|dt| dt.format("%Y-%m-%d").to_string()),
|
|
||||||
"recurrence_count": None as Option<u32>, // No count limit by default
|
|
||||||
"calendar_path": calendar_path,
|
|
||||||
"update_scope": update_scope.clone(),
|
|
||||||
"occurrence_date": if update_scope == "this_only" {
|
|
||||||
// For single occurrence updates, use the original event's start date as occurrence_date
|
|
||||||
Some(start_date.clone())
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
});
|
|
||||||
let url = format!("{}/calendar/events/series/update", self.base_url);
|
|
||||||
(body, url)
|
|
||||||
} else {
|
|
||||||
// Use regular endpoint for non-recurring events
|
|
||||||
let body = serde_json::json!({
|
let body = serde_json::json!({
|
||||||
"uid": event_uid,
|
"uid": event_uid,
|
||||||
"title": title,
|
"title": title,
|
||||||
@@ -967,8 +926,6 @@ impl CalendarService {
|
|||||||
"until_date": until_date.as_ref().map(|dt| dt.to_rfc3339())
|
"until_date": until_date.as_ref().map(|dt| dt.to_rfc3339())
|
||||||
});
|
});
|
||||||
let url = format!("{}/calendar/events/update", self.base_url);
|
let url = format!("{}/calendar/events/update", self.base_url);
|
||||||
(body, url)
|
|
||||||
};
|
|
||||||
|
|
||||||
let body_string = serde_json::to_string(&body)
|
let body_string = serde_json::to_string(&body)
|
||||||
.map_err(|e| format!("JSON serialization failed: {}", e))?;
|
.map_err(|e| format!("JSON serialization failed: {}", e))?;
|
||||||
@@ -1166,6 +1123,9 @@ impl CalendarService {
|
|||||||
|
|
||||||
let url = format!("{}/calendar/events/series/update", self.base_url);
|
let url = format!("{}/calendar/events/series/update", self.base_url);
|
||||||
|
|
||||||
|
web_sys::console::log_1(&format!("🔄 update_series: Making request to URL: {}", url).into());
|
||||||
|
web_sys::console::log_1(&format!("🔄 update_series: Request body: {}", serde_json::to_string_pretty(&body).unwrap_or_default()).into());
|
||||||
|
|
||||||
let body_string = serde_json::to_string(&body)
|
let body_string = serde_json::to_string(&body)
|
||||||
.map_err(|e| format!("JSON serialization failed: {}", e))?;
|
.map_err(|e| format!("JSON serialization failed: {}", e))?;
|
||||||
opts.set_body(&body_string.into());
|
opts.set_body(&body_string.into());
|
||||||
|
|||||||
Reference in New Issue
Block a user