feat: Complete import functionality with RRULE fixes and comprehensive testing

- Fix RRULE BYDAY filtering for daily frequency events (Tether sync weekdays only)
- Fix timezone transfer in recurring event expansion
- Add comprehensive timezone-aware iCal generation
- Add extensive test suite for recurrence and timezone functionality
- Update dependencies and configuration examples
- Implement cleanup logic for orphaned events
- Add detailed import plan documentation

This completes the core import functionality with proper timezone handling,
RRULE parsing, and duplicate prevention mechanisms.
This commit is contained in:
Alvaro Soliverez 2025-11-21 12:04:46 -03:00
parent 932b6ae463
commit 640ae119d1
14 changed files with 3057 additions and 182 deletions

292
Cargo.lock generated
View file

@ -168,6 +168,12 @@ version = "0.21.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
[[package]]
name = "base64"
version = "0.22.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
[[package]]
name = "bitflags"
version = "1.3.2"
@ -208,16 +214,18 @@ dependencies = [
"anyhow",
"base64 0.21.7",
"chrono",
"chrono-tz",
"chrono-tz 0.8.6",
"clap",
"config",
"icalendar",
"md5",
"quick-xml",
"reqwest",
"rrule",
"serde",
"serde_json",
"tempfile",
"thiserror",
"thiserror 1.0.69",
"tokio",
"tokio-test",
"toml 0.8.23",
@ -265,7 +273,18 @@ checksum = "d59ae0466b83e838b81a54256c39d5d7c20b9d7daa10510a242d9b75abd5936e"
dependencies = [
"chrono",
"chrono-tz-build",
"phf",
"phf 0.11.3",
]
[[package]]
name = "chrono-tz"
version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6139a8597ed92cf816dfb33f5dd6cf0bb93a6adc938f11039f371bc5bcd26c3"
dependencies = [
"chrono",
"phf 0.12.1",
"serde",
]
[[package]]
@ -275,7 +294,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "433e39f13c9a060046954e0592a8d0a4bcb1040125cbf91cb8ee58964cfb350f"
dependencies = [
"parse-zoneinfo",
"phf",
"phf 0.11.3",
"phf_codegen",
]
@ -379,6 +398,51 @@ dependencies = [
"typenum",
]
[[package]]
name = "darling"
version = "0.20.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee"
dependencies = [
"darling_core",
"darling_macro",
]
[[package]]
name = "darling_core"
version = "0.20.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e"
dependencies = [
"fnv",
"ident_case",
"proc-macro2",
"quote",
"strsim",
"syn",
]
[[package]]
name = "darling_macro"
version = "0.20.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead"
dependencies = [
"darling_core",
"quote",
"syn",
]
[[package]]
name = "deranged"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e"
dependencies = [
"powerfmt",
"serde",
]
[[package]]
name = "digest"
version = "0.10.7"
@ -406,6 +470,12 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0688c2a7f92e427f44895cd63841bff7b29f8d7a1648b9e7e07a4a365b2e1257"
[[package]]
name = "dyn-clone"
version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555"
[[package]]
name = "encoding_rs"
version = "0.8.35"
@ -563,7 +633,7 @@ dependencies = [
"futures-sink",
"futures-util",
"http",
"indexmap",
"indexmap 2.11.4",
"slab",
"tokio",
"tokio-util",
@ -591,6 +661,12 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
name = "hex"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]]
name = "http"
version = "0.2.12"
@ -798,6 +874,12 @@ dependencies = [
"zerovec",
]
[[package]]
name = "ident_case"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
[[package]]
name = "idna"
version = "1.1.0"
@ -819,6 +901,17 @@ dependencies = [
"icu_properties",
]
[[package]]
name = "indexmap"
version = "1.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
dependencies = [
"autocfg",
"hashbrown 0.12.3",
"serde",
]
[[package]]
name = "indexmap"
version = "2.11.4"
@ -827,6 +920,8 @@ checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5"
dependencies = [
"equivalent",
"hashbrown 0.16.0",
"serde",
"serde_core",
]
[[package]]
@ -942,6 +1037,12 @@ dependencies = [
"regex-automata",
]
[[package]]
name = "md5"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771"
[[package]]
name = "memchr"
version = "2.7.6"
@ -1025,6 +1126,12 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "num-conv"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
[[package]]
name = "num-traits"
version = "0.2.19"
@ -1202,7 +1309,16 @@ version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078"
dependencies = [
"phf_shared",
"phf_shared 0.11.3",
]
[[package]]
name = "phf"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "913273894cec178f401a31ec4b656318d95473527be05c0752cc41cdc32be8b7"
dependencies = [
"phf_shared 0.12.1",
]
[[package]]
@ -1212,7 +1328,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a"
dependencies = [
"phf_generator",
"phf_shared",
"phf_shared 0.11.3",
]
[[package]]
@ -1221,7 +1337,7 @@ version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d"
dependencies = [
"phf_shared",
"phf_shared 0.11.3",
"rand",
]
@ -1234,6 +1350,15 @@ dependencies = [
"siphasher",
]
[[package]]
name = "phf_shared"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06005508882fb681fd97892ecff4b7fd0fee13ef1aa569f8695dae7ab9099981"
dependencies = [
"siphasher",
]
[[package]]
name = "pin-project-lite"
version = "0.2.16"
@ -1261,6 +1386,12 @@ dependencies = [
"zerovec",
]
[[package]]
name = "powerfmt"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
[[package]]
name = "proc-macro2"
version = "1.0.101"
@ -1319,6 +1450,26 @@ dependencies = [
"bitflags 2.9.4",
]
[[package]]
name = "ref-cast"
version = "1.0.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a0ae411dbe946a674d89546582cea4ba2bb8defac896622d6496f14c23ba5cf"
dependencies = [
"ref-cast-impl",
]
[[package]]
name = "ref-cast-impl"
version = "1.0.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1165225c21bff1f3bbce98f5a1f889949bc902d3575308cc7b0de30b4f6d27c7"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "regex"
version = "1.11.3"
@ -1417,6 +1568,20 @@ dependencies = [
"serde",
]
[[package]]
name = "rrule"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "720acfb4980b9d8a6a430f6d7a11933e701ebbeba5eee39cc9d8c5f932aaff74"
dependencies = [
"chrono",
"chrono-tz 0.10.4",
"log",
"regex",
"serde_with",
"thiserror 2.0.17",
]
[[package]]
name = "rust-ini"
version = "0.18.0"
@ -1498,6 +1663,30 @@ dependencies = [
"windows-sys 0.61.1",
]
[[package]]
name = "schemars"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4cd191f9397d57d581cddd31014772520aa448f65ef991055d7f61582c65165f"
dependencies = [
"dyn-clone",
"ref-cast",
"serde",
"serde_json",
]
[[package]]
name = "schemars"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0"
dependencies = [
"dyn-clone",
"ref-cast",
"serde",
"serde_json",
]
[[package]]
name = "scopeguard"
version = "1.2.0"
@ -1601,6 +1790,38 @@ dependencies = [
"serde",
]
[[package]]
name = "serde_with"
version = "3.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2c45cd61fefa9db6f254525d46e392b852e0e61d9a1fd36e5bd183450a556d5"
dependencies = [
"base64 0.22.1",
"chrono",
"hex",
"indexmap 1.9.3",
"indexmap 2.11.4",
"schemars 0.9.0",
"schemars 1.0.4",
"serde",
"serde_derive",
"serde_json",
"serde_with_macros",
"time",
]
[[package]]
name = "serde_with_macros"
version = "3.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de90945e6565ce0d9a25098082ed4ee4002e047cb59892c318d66821e14bb30f"
dependencies = [
"darling",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "sha2"
version = "0.10.9"
@ -1754,7 +1975,16 @@ version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
dependencies = [
"thiserror-impl",
"thiserror-impl 1.0.69",
]
[[package]]
name = "thiserror"
version = "2.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8"
dependencies = [
"thiserror-impl 2.0.17",
]
[[package]]
@ -1768,6 +1998,17 @@ dependencies = [
"syn",
]
[[package]]
name = "thiserror-impl"
version = "2.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "thread_local"
version = "1.1.9"
@ -1777,6 +2018,37 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "time"
version = "0.3.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40"
dependencies = [
"deranged",
"itoa",
"num-conv",
"powerfmt",
"serde",
"time-core",
"time-macros",
]
[[package]]
name = "time-core"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c"
[[package]]
name = "time-macros"
version = "0.2.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49"
dependencies = [
"num-conv",
"time-core",
]
[[package]]
name = "tinystr"
version = "0.8.1"
@ -1911,7 +2183,7 @@ version = "0.22.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a"
dependencies = [
"indexmap",
"indexmap 2.11.4",
"serde",
"serde_spanned",
"toml_datetime",

View file

@ -25,6 +25,9 @@ reqwest = { version = "0.11", features = ["json", "rustls-tls"] }
# iCalendar parsing
icalendar = "0.15"
# RRULE recurrence processing
rrule = { version = "0.14", features = ["serde"] }
# Serialization
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
@ -62,6 +65,9 @@ url = "2.3"
# TOML parsing
toml = "0.8"
# MD5 hashing for unique identifier generation
md5 = "0.7"
[dev-dependencies]
tokio-test = "0.4"
tempfile = "3.0"

View file

@ -1,5 +1,474 @@
# Nextcloud CalDAV Import Implementation Plan
## 🚨 IMMEDIATE BUGS TO FIX
### Bug #1: Orphaned Event Deletion Not Working
**Status**: ❌ **CRITICAL** - Orphaned events are not being deleted from target calendar
**Location**: Likely in `src/nextcloud_import.rs` - `ImportEngine` cleanup logic
**Symptoms**:
- Events deleted from source calendar remain in Nextcloud target
- `strict_with_cleanup` behavior not functioning correctly
- Target calendar accumulates stale events over time
**Root Cause Analysis Needed**:
```rust
// Check these areas in the import logic:
// 1. Event comparison logic - are UIDs matching correctly?
// 2. Delete operation implementation - is HTTP DELETE being sent?
// 3. Calendar discovery - are we looking at the right target calendar?
// 4. Error handling - are delete failures being silently ignored?
```
**Investigation Steps**:
1. Add detailed logging for orphaned event detection
2. Verify event UID matching between source and target
3. Test DELETE operation directly on Nextcloud CalDAV endpoint
4. Check if ETag handling is interfering with deletions
**Expected Fix Location**: `src/nextcloud_import.rs` - `ImportEngine::import_events()` method
**🔍 Bug #1 - ACTUAL ROOT CAUSE DISCOVERED**:
- **Issue**: CalDAV query to Nextcloud target calendar is only returning 1 event when there should be 2+ events
- **Evidence**: Enhanced debugging shows `🎯 TARGET EVENTS FETCHED: 1 total events`
- **Missing Event**: "caldav test" event (Oct 31) not being detected by CalDAV query
- **Location**: `src/minicaldav_client.rs` - `get_events()` method or CalDAV query parameters
- **Next Investigation**: Add raw CalDAV response logging to see what Nextcloud is actually returning
**🔧 Bug #1 - ENHANCED DEBUGGING ADDED**:
- ✅ Added comprehensive logging for target event detection
- ✅ Added date range validation debugging
- ✅ Added special detection for "caldav test" event
- ✅ Added detailed source vs target UID comparison
- ✅ Enhanced deletion analysis with step-by-step visibility
**🎯 Bug #1 - STATUS**: Partially Fixed - Infrastructure in place, need to investigate CalDAV query issue
**🔧 ADDITIONAL FIXES COMPLETED**:
- ✅ **FIXED**: Principal URL construction error - now correctly extracts username from base URL
- ✅ **FIXED**: `--list-events --import-info` no longer shows 404 errors during calendar discovery
- ✅ **FIXED**: Warning and error handling for non-multistatus responses
- ✅ **FIXED**: Removed unused imports and cleaned up compilation warnings
- ✅ **FIXED**: Bug #1 - Multiple event parsing - Modified XML parsing loop to process ALL calendar-data elements instead of breaking after first one
- ✅ **COMPLETED**: Bug #1 - Orphaned Event Deletion - CalDAV query issue resolved, enhanced debugging added, infrastructure working correctly
---
### Bug #2: Recurring Event Import Issue
**Status**: ✅ **COMPLETED** - RRULE parser implemented and issue resolved
**Root Cause**: The `--list-events` command was not showing expanded individual occurrences of recurring events
**Location**: `src/main.rs` - event listing logic, `src/minicaldav_client.rs` - iCalendar parsing
**Resolution**: The issue was already resolved by the expansion logic in the sync process. Recurring events are properly expanded during sync and displayed with 🔄 markers.
**Key Findings**:
- Recurring events are already being expanded during the sync process in `parse_icalendar_data()`
- Individual occurrences have their recurrence cleared (as expected) but are marked with unique IDs containing "-occurrence-"
- The `--list-events` command correctly shows all expanded events with 🔄 markers for recurring instances
- Users can see multiple instances of recurring events (e.g., "Tether Sync" appearing at different dates)
**CalDAV/iCalendar Recurring Event Properties**:
According to RFC 5545, recurring events use these properties:
- **RRULE**: Defines recurrence pattern (e.g., `FREQ=WEEKLY;COUNT=10`)
- **EXDATE**: Exception dates for recurring events
- **RDATE**: Additional dates for recurrence
- **RECURRENCE-ID**: Identifies specific instances of recurring events
**Current Problem Analysis**:
```rust
// Current approach in build_calendar_event():
let event = CalendarEvent {
// ... basic properties
// ❌ MISSING: RRULE parsing and expansion
// ❌ MISSING: EXDATE handling
// ❌ MISSING: Individual occurrence generation
};
// The parser extracts RRULE but doesn't expand it:
if line.contains(':') {
let parts: Vec<&str> = line.splitn(2, ':').collect();
current_event.insert(parts[0].to_string(), parts[1].to_string()); // RRULE stored but not processed
}
```
**Correct Solution Approach**:
```rust
// Two-phase approach needed:
// Phase 1: Detect recurring events during parsing
if let Some(rrule) = properties.get("RRULE") {
// This is a recurring event
debug!("Found recurring event with RRULE: {}", rrule);
return self.expand_recurring_event(properties, calendar_href, start_date, end_date).await;
}
// Phase 2: Expand recurring events into individual occurrences
async fn expand_recurring_event(&self, properties: &HashMap<String, String>,
calendar_href: &str, start_range: DateTime<Utc>,
end_range: DateTime<Utc>) -> Result<Vec<CalendarEvent>> {
let mut occurrences = Vec::new();
let base_event = self.build_base_event(properties, calendar_href)?;
// Parse RRULE to generate occurrences within date range
if let Some(rrule) = properties.get("RRULE") {
let generated_dates = self.parse_rrule_and_generate_dates(rrule, base_event.start, base_event.end, start_range, end_range)?;
for (occurrence_start, occurrence_end) in generated_dates {
let mut occurrence = base_event.clone();
occurrence.start = occurrence_start;
occurrence.end = occurrence_end;
occurrence.recurrence_id = Some(occurrence_start);
occurrence.id = format!("{}-{}", base_event.id, occurrence_start.timestamp());
occurrence.href = format!("{}/{}-{}.ics", calendar_href, base_event.id, occurrence_start.timestamp());
occurrences.push(occurrence);
}
}
Ok(occurrences)
}
```
**Alternative Title-Based Detection**:
When RRULE parsing fails, use title duplication as fallback:
```rust
// Group events by title to detect likely recurring events
fn group_by_title(events: &[CalendarEvent]) -> HashMap<String, Vec<CalendarEvent>> {
let mut grouped: HashMap<String, Vec<CalendarEvent>> = HashMap::new();
for event in events {
let title = event.summary.to_lowercase();
grouped.entry(title).or_insert_with(Vec::new).push(event.clone());
}
// Filter for titles with multiple occurrences (likely recurring)
grouped.into_iter()
.filter(|(_, events)| events.len() > 1)
.collect()
}
```
**🎯 BUG #2 - RECURRENCE SOLUTION APPROACH CONFIRMED**:
Based on testing Zoho's CalDAV implementation, the server correctly returns RRULE strings but does **NOT** provide pre-expanded individual instances. This confirms we need to implement client-side expansion.
**Option 1: Time-Bounded Recurrence Expansion (SELECTED)**
- Parse RRULE strings from Zoho
- Expand ONLY occurrences within the sync timeframe
- Import individual instances to Nextcloud
- Preserves recurrence pattern while respecting sync boundaries
**Implementation Strategy**:
```rust
// Parse RRULE and generate occurrences within date range
async fn expand_recurring_event_timeframe(&self, properties: &HashMap<String, String>,
calendar_href: &str,
sync_start: DateTime<Utc>,
sync_end: DateTime<Utc>) -> Result<Vec<CalendarEvent>> {
let base_event = self.build_base_event(properties, calendar_href)?;
let mut occurrences = Vec::new();
if let Some(rrule) = properties.get("RRULE") {
// Parse RRULE (e.g., "FREQ=WEEKLY;BYDAY=MO;COUNT=10")
let recurrence = self.parse_rrule(rrule)?;
// Generate ONLY occurrences within sync timeframe
let generated_dates = self.expand_recurrence_within_range(
&recurrence,
base_event.start,
base_event.end,
sync_start,
sync_end
)?;
info!("🔄 Expanding recurring event: {} -> {} occurrences within timeframe",
base_event.summary, generated_dates.len());
for (occurrence_start, occurrence_end) in generated_dates {
let mut occurrence = base_event.clone();
occurrence.start = occurrence_start;
occurrence.end = occurrence_end;
occurrence.recurrence_id = Some(occurrence_start);
occurrence.id = format!("{}-{}", base_event.id, occurrence_start.timestamp());
occurrence.href = format!("{}/{}-{}.ics", calendar_href, base_event.id, occurrence_start.timestamp());
occurrences.push(occurrence);
}
}
Ok(occurrences)
}
```
**Key Benefits of Time-Bounded Approach**:
- ✅ **Efficient**: Only generates needed occurrences (no infinite expansion)
- ✅ **Sync-friendly**: Respects sync date ranges (default: past 30 days to future 30 days)
- ✅ **Complete**: All occurrences in timeframe become individual events in Nextcloud
- ✅ **Zoho Compatible**: Works with Zoho's RRULE-only approach
- ✅ **Standard**: Follows RFC 5545 recurrence rules
**Example Sync Behavior**:
```
Source (Zoho): Weekly meeting "Team Standup" (RRULE:FREQ=WEEKLY;BYDAY=MO)
Sync timeframe: Oct 10 - Dec 9, 2025
Generated occurrences to import:
- Team Standup (Oct 13, 2025)
- Team Standup (Oct 20, 2025)
- Team Standup (Oct 27, 2025)
- Team Standup (Nov 3, 2025)
- Team Standup (Nov 10, 2025)
- Team Standup (Nov 17, 2025)
- Team Standup (Nov 24, 2025)
- Team Standup (Dec 1, 2025)
- Team Standup (Dec 8, 2025)
Result: 9 individual events imported to Nextcloud
```
**Fix Implementation Steps**:
1. **Add RRULE parsing** to CalendarEvent struct in `src/minicaldav_client.rs`
2. **Implement recurrence expansion** with time-bounded generation
3. **Integrate with parsing pipeline** to detect and expand recurring events
4. **Update import logic** to handle all generated occurrences
5. **Add exception handling** for EXDATE and modified instances
**Expected Fix Location**:
- `src/minicaldav_client.rs` - enhance `parse_icalendar_data()`, add `expand_recurring_event_timeframe()`
- `src/event.rs` - add `recurrence` field to CalendarEvent struct
- `src/main.rs` - update event conversion to preserve recurrence information
**Implementation Phases**:
**Phase 1: RRULE Parsing Infrastructure**
```rust
// Add to CalendarEvent struct
pub struct CalendarEvent {
pub id: String,
pub href: String,
pub summary: String,
pub description: Option<String>,
pub start: DateTime<Utc>,
pub end: DateTime<Utc>,
pub location: Option<String>,
pub status: Option<String>,
pub recurrence: Option<RecurrenceRule>, // NEW: RRULE support
pub recurrence_id: Option<DateTime<Utc>>, // NEW: For individual instances
// ... existing fields
}
// Add RRULE parsing method
impl MiniCalDavClient {
fn parse_rrule(&self, rrule_str: &str) -> Result<RecurrenceRule, CalDavError> {
// Parse RRULE components like "FREQ=WEEKLY;BYDAY=MO;COUNT=10"
// Return structured RecurrenceRule
}
fn expand_recurrence_within_range(&self,
recurrence: &RecurrenceRule,
base_start: DateTime<Utc>,
base_end: DateTime<Utc>,
range_start: DateTime<Utc>,
range_end: DateTime<Utc>) -> Result<Vec<(DateTime<Utc>, DateTime<Utc>)>, CalDavError> {
// Generate occurrences only within the specified date range
// Handle different frequencies (DAILY, WEEKLY, MONTHLY, YEARLY)
// Apply BYDAY, BYMONTH, COUNT, UNTIL constraints
}
}
```
**Phase 2: Integration with Event Parsing**
```rust
// Modify parse_icalendar_data() to detect and expand recurring events
impl MiniCalDavClient {
pub async fn parse_icalendar_data(&self,
ical_data: &str,
calendar_href: &str,
sync_start: DateTime<Utc>,
sync_end: DateTime<Utc>) -> Result<Vec<CalendarEvent>, CalDavError> {
let mut events = Vec::new();
// Parse each VEVENT in the iCalendar data
for event_data in self.extract_vevents(ical_data) {
let properties = self.parse_event_properties(&event_data);
// Check if this is a recurring event
if properties.contains_key("RRULE") {
info!("🔄 Found recurring event: {}", properties.get("SUMMARY").unwrap_or(&"Unnamed".to_string()));
// Expand within sync timeframe
let expanded_events = self.expand_recurring_event_timeframe(
&properties, calendar_href, sync_start, sync_end
).await?;
events.extend(expanded_events);
} else {
// Regular (non-recurring) event
let event = self.build_calendar_event(&properties, calendar_href)?;
events.push(event);
}
}
Ok(events)
}
}
```
**Phase 3: Enhanced Event Conversion**
```rust
// Update main.rs to handle expanded recurring events
impl From<CalendarEvent> for Event {
fn from(calendar_event: CalendarEvent) -> Self {
Event {
id: calendar_event.id,
uid: calendar_event.id,
title: calendar_event.summary,
description: calendar_event.description,
start: calendar_event.start,
end: calendar_event.end,
location: calendar_event.location,
timezone: Some("UTC".to_string()),
recurrence: calendar_event.recurrence, // FIXED: Now preserves recurrence info
status: calendar_event.status,
created_at: Utc::now(),
updated_at: Utc::now(),
}
}
}
```
**RRULE Format Support**:
```
Supported RRULE components:
- FREQ: DAILY, WEEKLY, MONTHLY, YEARLY
- INTERVAL: N (every N days/weeks/months/years)
- COUNT: N (maximum N occurrences)
- UNTIL: date (last occurrence date)
- BYDAY: MO,TU,WE,TH,FR,SA,SU (for WEEKLY)
- BYMONTHDAY: 1-31 (for MONTHLY)
- BYMONTH: 1-12 (for YEARLY)
Example RRULEs:
- "FREQ=DAILY;COUNT=10" - Daily for 10 occurrences
- "FREQ=WEEKLY;BYDAY=MO,WE,FR" - Mon/Wed/Fri weekly
- "FREQ=MONTHLY;BYDAY=2TU" - Second Tuesday of each month
- "FREQ=YEARLY;BYMONTH=12;BYDAY=1MO" - First Monday in December
```
---
## 🚀 **BUG #1: ORPHANED EVENT DELETION - IN PROGRESS**
### **Status**: 🔧 **WORKING** - Enhanced debugging added, analysis in progress
### **Root Cause Analysis**:
The orphaned event deletion logic exists but has insufficient visibility into what's happening during the UID matching and deletion process.
### **Enhanced Debugging Added**:
**1. Detailed Deletion Analysis Logging** (`src/nextcloud_import.rs:743-790`):
```rust
info!("🔍 DELETION ANALYSIS:");
info!(" Target UID: '{}'", target_uid);
info!(" Target Summary: '{}'", target_event.summary);
info!(" Source UIDs count: {}", source_uids.len());
info!(" UID in source: {}", source_uids.contains(target_uid.as_str()));
info!(" Is orphaned: {}", is_orphaned);
```
**2. Comprehensive DELETE Operation Logging** (`src/minicaldav_client.rs:1364-1440`):
```rust
info!("🗑️ Attempting to delete event: {}", event_url);
info!(" Calendar URL: {}", calendar_url);
info!(" Event UID: '{}'", event_uid);
info!(" ETag: {:?}", etag);
info!("📊 DELETE response status: {} ({})", status, status_code);
```
**3. Enhanced Event Existence Checking** (`src/minicaldav_client.rs:1340-1385`):
```rust
info!("🔍 Checking if event exists: {}", event_url);
info!("📋 Event ETag: {:?}", etag);
info!("📋 Content-Type: {:?}", content_type);
```
### **Debugging Workflow**:
**Step 1: Run with enhanced logging**:
```bash
# Test with dry run to see what would be deleted
./target/release/caldav-sync --debug --import-nextcloud --dry-run --import-behavior strict_with_cleanup
# Test actual deletion (will show detailed step-by-step process)
./target/release/caldav-sync --debug --import-nextcloud --import-behavior strict_with_cleanup
```
**Step 2: Look for these key indicators in the logs**:
**🔍 DELETION ANALYSIS:**
- Shows UID matching between source and target
- Reveals if events are correctly identified as orphaned
- Lists all source UIDs for comparison
**🗑️ DELETION EXECUTION:**
- Shows the exact event URL being deleted
- Displays ETag handling
- Shows HTTP response status codes
**📊 HTTP RESPONSE ANALYSIS:**
- Detailed error categorization (401, 403, 404, 409, 412)
- Clear success/failure indicators
### **Common Issues to Look For**:
1. **UID Mismatch**: Events that should match but don't due to formatting differences
2. **ETag Conflicts**: 412 responses indicating concurrent modifications
3. **Permission Issues**: 403 responses indicating insufficient deletion rights
4. **URL Construction**: Incorrect event URLs preventing proper deletion
### **Next Debugging Steps**:
1. **Run the enhanced logging** to capture detailed deletion process
2. **Analyze the UID matching** to identify orphaned detection issues
3. **Check HTTP response codes** to pinpoint deletion failures
4. **Verify calendar permissions** if 403 errors occur
This enhanced debugging will provide complete visibility into the orphaned event deletion process and help identify the exact root cause.
---
### Debugging Commands for Investigation
```bash
# 1. List source events to see what we're working with
./target/release/caldav-sync --debug --list-events
# 2. List target events to see what's already there
./target/release/caldav-sync --debug --list-import-events
# 3. Run import with dry run to see what would be processed
./target/release/caldav-sync --debug --import-nextcloud --dry-run
# 4. Test recurring events specifically - compare list vs import
./target/release/caldav-sync --debug --list-events | grep -i "recurring\|daily\|weekly"
./target/release/caldav-sync --debug --import-nextcloud --dry-run | grep -i "recurring\|daily\|weekly"
# 5. Run with different CalDAV approaches to isolate source issues
./target/release/caldav-sync --debug --approach zoho-events-list --list-events
./target/release/caldav-sync --debug --approach zoho-export --list-events
# 6. Check calendar discovery
./target/release/caldav-sync --debug --list-calendars --import-info
# 7. Count events to identify missing ones
echo "Source events:" && ./target/release/caldav-sync --list-events | wc -l
echo "Target events:" && ./target/release/caldav-sync --list-import-events | wc -l
```
### Success Criteria for These Fixes
- [ ] **Orphaned Deletion**: Events deleted from source are properly removed from Nextcloud
- [ ] **Complete Import**: All valid source events are successfully imported
- [ ] **Clear Logging**: Detailed logs show which events are processed/skipped/failed
- [ ] **Consistent Behavior**: Same results on multiple runs with identical data
---
## Current State Analysis
### Current Code Overview

View file

@ -91,6 +91,6 @@ enabled = true
# Import behavior settings
overwrite_existing = true # Source always wins - overwrite target events
delete_missing = false # Don't delete events missing from source
dry_run = false # Set to true for preview mode
batch_size = 50 # Number of events to process in each batch
create_target_calendar = true # Create target calendar if it doesn't exist

View file

@ -10,6 +10,9 @@ pub mod minicaldav_client;
pub mod nextcloud_import;
pub mod real_sync;
#[cfg(test)]
pub mod test_recurrence;
// Re-export main types for convenience
pub use config::{Config, ServerConfig, CalendarConfig, FilterConfig, SyncConfig};
pub use error::{CalDavError, CalDavResult};

View file

@ -75,13 +75,17 @@ struct Cli {
nextcloud_calendar: Option<String>,
/// Import behavior: strict, strict_with_cleanup
#[arg(long, default_value = "strict")]
#[arg(long, default_value = "strict_with_cleanup")]
import_behavior: String,
/// Dry run - show what would be imported without actually doing it
#[arg(long)]
dry_run: bool,
/// Use simplified iCalendar format (avoids Zoho parsing issues)
#[arg(long)]
simple_ical: bool,
/// List events from import target calendar and exit
#[arg(long)]
list_import_events: bool,
@ -543,7 +547,7 @@ async fn run_sync(config: Config, cli: &Cli) -> CalDavResult<()> {
event_type: caldav_sync::event::EventType::Public, // TODO: Extract from event
organizer: None, // TODO: Extract from event
attendees: Vec::new(), // TODO: Extract from event
recurrence: None, // TODO: Extract from event
recurrence: event.recurrence.clone(), // FIXED: Extract from event
alarms: Vec::new(), // TODO: Extract from event
properties: std::collections::HashMap::new(),
created: event.last_modified.unwrap_or_else(Utc::now),
@ -1020,15 +1024,53 @@ async fn run_sync(config: Config, cli: &Cli) -> CalDavResult<()> {
let sync_result = sync_engine.sync_full().await?;
info!("Sync completed: {} events processed", sync_result.events_processed);
// Get and display events
let events = sync_engine.get_local_events();
println!("Found {} events:", events.len());
// Get and display events with recurring event expansion
let raw_events = sync_engine.get_local_events();
for event in events {
// Define date range for expanding recurring events (past 30 days to future 30 days)
let now = Utc::now();
let start_range = now - Duration::days(30);
let end_range = now + Duration::days(30);
info!("📊 Raw events count: {}", raw_events.len());
let mut recurring_count = 0;
for event in &raw_events {
if event.recurrence.is_some() {
recurring_count += 1;
}
}
info!("📊 Recurring events in raw data: {}", recurring_count);
// Expand recurring events into individual occurrences
let mut expanded_events = Vec::new();
for event in &raw_events {
if event.recurrence.is_some() {
info!("🔄 Expanding recurring event '{}' for list display", event.summary);
let occurrences = event.expand_occurrences(start_range, end_range);
info!(" Generated {} occurrences", occurrences.len());
expanded_events.extend(occurrences);
} else {
expanded_events.push(event.clone());
}
}
info!("📊 Final expanded events count: {}", expanded_events.len());
// Sort events by start time for display
expanded_events.sort_by(|a, b| a.start.cmp(&b.start));
println!("Found {} events ({} raw events from recurring):", expanded_events.len(), raw_events.len());
for event in expanded_events {
let start_tz = event.start_tzid.as_deref().unwrap_or("UTC");
let end_tz = event.end_tzid.as_deref().unwrap_or("UTC");
println!(" - {} ({} {} to {} {})",
// Mark recurring event occurrences
let recurring_marker = if event.id.contains("-occurrence-") { " 🔄" } else { "" };
println!(" - {}{} ({} {} to {} {})",
event.summary,
recurring_marker,
event.start.format("%Y-%m-%d %H:%M"),
start_tz,
event.end.format("%Y-%m-%d %H:%M"),

View file

@ -1,10 +1,10 @@
//! Direct HTTP-based CalDAV client implementation
use anyhow::Result;
use reqwest::{Client, header};
use reqwest::{Client, header, Response};
use serde::{Deserialize, Serialize};
use chrono::{DateTime, Utc, TimeZone};
use tracing::{debug, info, warn};
use tracing::{debug, info, warn, error};
use base64::engine::general_purpose::STANDARD as BASE64;
use base64::Engine;
use std::time::Duration;
@ -30,6 +30,70 @@ pub struct RealCalDavClient {
}
impl RealCalDavClient {
/// Handle HTTP response and check for error status codes
/// Returns error for any 4xx or 5xx status codes, except for specific cases that should be handled differently
async fn handle_http_response(&self, response: Response, operation: &str) -> Result<Response> {
let status = response.status();
let status_code = status.as_u16();
// Check for error status codes
if status_code >= 400 {
let url = response.url().clone();
// Try to get error response body for more context
let error_body = match response.text().await {
Ok(body) => {
if body.len() > 500 {
format!("{} (truncated)", &body[..500])
} else {
body
}
}
Err(_) => "Unable to read error response".to_string(),
};
// Log the error with full context
error!("HTTP Error during {}: {} {}", operation, status, url);
error!("Error response body: {}", error_body);
// Categorize and return appropriate error
if (400..=499).contains(&status_code) {
match status_code {
400 => Err(anyhow::anyhow!("Bad Request (400): Invalid request syntax or parameters during {}", operation)),
401 => Err(anyhow::anyhow!("Unauthorized (401): Authentication failed during {}. Check your credentials.", operation)),
403 => Err(anyhow::anyhow!("Forbidden (403): Access denied to {} operation. Insufficient permissions.", operation)),
404 => Err(anyhow::anyhow!("Not Found (404): Resource not found during {} operation. The calendar or event may not exist.", operation)),
405 => Err(anyhow::anyhow!("Method Not Allowed (405): HTTP method not supported for {} operation.", operation)),
408 => Err(anyhow::anyhow!("Request Timeout (408): Server timed out waiting for request during {}.", operation)),
409 => Err(anyhow::anyhow!("Conflict (409): Resource conflict during {} operation. The resource may have been modified.", operation)),
410 => Err(anyhow::anyhow!("Gone (410): Resource no longer available during {} operation.", operation)),
412 => Err(anyhow::anyhow!("Precondition Failed (412): Precondition check failed during {} operation. ETag mismatch or conflict.", operation)),
413 => Err(anyhow::anyhow!("Payload Too Large (413): Request entity too large during {} operation.", operation)),
422 => Err(anyhow::anyhow!("Unprocessable Entity (422): Server understands request but cannot process it during {} operation.", operation)),
423 => Err(anyhow::anyhow!("Locked (423): Resource is locked during {} operation.", operation)),
429 => Err(anyhow::anyhow!("Too Many Requests (429): Rate limit exceeded during {} operation. Try again later.", operation)),
_ => Err(anyhow::anyhow!("Client Error ({}): {} failed during {} operation.", status_code, status.canonical_reason().unwrap_or("Unknown"), operation)),
}
} else if (500..=599).contains(&status_code) {
match status_code {
500 => Err(anyhow::anyhow!("Internal Server Error (500): Server encountered an unexpected error during {} operation.", operation)),
501 => Err(anyhow::anyhow!("Not Implemented (501): Server does not support this functionality for {} operation.", operation)),
502 => Err(anyhow::anyhow!("Bad Gateway (502): Server received invalid response from upstream during {} operation.", operation)),
503 => Err(anyhow::anyhow!("Service Unavailable (503): Server temporarily unable to handle {} request. Try again later.", operation)),
504 => Err(anyhow::anyhow!("Gateway Timeout (504): Server timed out waiting for upstream during {} operation.", operation)),
507 => Err(anyhow::anyhow!("Insufficient Storage (507): Server has insufficient storage for {} operation.", operation)),
_ => Err(anyhow::anyhow!("Server Error ({}): {} failed during {} operation.", status_code, status.canonical_reason().unwrap_or("Unknown"), operation)),
}
} else {
// This should not happen, but handle it gracefully
Err(anyhow::anyhow!("Unexpected HTTP Status ({}): {} failed during {} operation.", status_code, status.canonical_reason().unwrap_or("Unknown"), operation))
}
} else {
// Success status code (2xx, 3xx) - return the response for further processing
Ok(response)
}
}
/// Create a new CalDAV client with authentication
pub async fn new(base_url: &str, username: &str, password: &str) -> Result<Self> {
info!("Creating CalDAV client for: {}", base_url);
@ -160,6 +224,9 @@ impl RealCalDavClient {
.send()
.await?;
// Handle HTTP response errors
let response = self.handle_http_response(response, "PROPFIND calendar discovery").await?;
if response.status().as_u16() != 207 {
return Err(anyhow::anyhow!("PROPFIND failed with status: {}", response.status()));
}
@ -176,17 +243,25 @@ impl RealCalDavClient {
/// Construct Nextcloud principal URL from base URL
fn construct_nextcloud_principal_url(&self) -> Option<String> {
// Extract base server URL and username from the current base URL
// Current format: https://cloud.soliverez.com.ar/remote.php/dav/calendars/alvaro/
// Current format: https://cloud.soliverez.com.ar/remote.php/dav/calendars/alvaro/trabajo-alvaro
// Principal format: https://cloud.soliverez.com.ar/remote.php/dav/principals/users/alvaro/
if self.base_url.contains("/remote.php/dav/calendars/") {
let parts: Vec<&str> = self.base_url.split("/remote.php/dav/calendars/").collect();
if parts.len() == 2 {
let server_part = parts[0];
let user_part = parts[1].trim_end_matches('/');
let path_part = parts[1].trim_end_matches('/');
// Construct principal URL
// Extract username from path (first part before any slash)
let user_part = if let Some(slash_pos) = path_part.find('/') {
&path_part[..slash_pos]
} else {
path_part
};
// Construct principal URL with just the username
let principal_url = format!("{}/remote.php/dav/principals/users/{}", server_part, user_part);
info!("Constructed principal URL: {} from base URL: {}", principal_url, self.base_url);
return Some(principal_url);
}
}
@ -252,6 +327,9 @@ impl RealCalDavClient {
.send()
.await?;
// Handle HTTP response errors
let response = self.handle_http_response(response, "direct calendar access").await?;
if response.status().as_u16() != 207 {
return Err(anyhow::anyhow!("Direct calendar access failed with status: {}", response.status()));
}
@ -378,6 +456,9 @@ impl RealCalDavClient {
.send()
.await?;
// Handle HTTP response errors
let response = self.handle_http_response(response, &format!("REPORT events using approach {}", method_name)).await?;
let status = response.status();
let status_code = status.as_u16();
info!("Approach {} response status: {} ({})", i + 1, status, status_code);
@ -579,7 +660,26 @@ impl RealCalDavClient {
return Ok(Vec::new());
}
debug!("Parsing CalDAV response XML:\n{}", xml);
info!("🔍 CALDAV RESPONSE ANALYSIS:");
info!(" Response length: {} characters", xml.len());
info!(" Response starts with: {}", &xml[..std::cmp::min(100, xml.len())]);
// Count occurrences of key patterns (case-insensitive for XML namespaces)
let multistatus_count = xml.matches("<D:multistatus>").count() + xml.matches("<d:multistatus>").count();
let response_count = xml.matches("<D:response>").count() + xml.matches("<d:response>").count();
let href_count = xml.matches("<D:href>").count() + xml.matches("<d:href>").count();
let calendar_data_count = xml.matches("<C:calendar-data>").count() + xml.matches("<c:calendar-data>").count() + xml.matches("<cal:calendar-data>").count();
info!("🔍 PATTERNS FOUND: multistatus={}, response={}, href={}, calendar-data={}", multistatus_count, response_count, href_count, calendar_data_count);
let ical_count = xml.matches("BEGIN:VEVENT").count();
info!(" 📊 PATTERN COUNTS:");
info!(" <D:multistatus>: {}", multistatus_count);
info!(" <D:response>: {}", response_count);
info!(" <D:href>: {}", href_count);
info!(" <C:calendar-data>: {}", calendar_data_count);
info!(" BEGIN:VEVENT: {}", ical_count);
debug!("Full CalDAV response XML:\n{}", xml);
// Check if response is plain iCalendar data (not wrapped in XML)
if xml.starts_with("BEGIN:VCALENDAR") {
@ -587,8 +687,9 @@ impl RealCalDavClient {
return self.parse_icalendar_data(xml, calendar_href);
}
// Check if this is a multistatus REPORT response
if xml.contains("<D:multistatus>") {
// Check if this is a multistatus REPORT response (case-insensitive)
if xml.contains("<D:multistatus>") || xml.contains("<d:multistatus>") {
info!("Detected multistatus response, delegating to multistatus parser");
return self.parse_multistatus_response(xml, calendar_href).await;
}
@ -604,21 +705,37 @@ impl RealCalDavClient {
let mut found_calendar_data = false;
for (start_tag, end_tag) in calendar_data_patterns {
if let Some(start) = xml.find(start_tag) {
if let Some(end) = xml.find(end_tag) {
let ical_data = &xml[start + start_tag.len()..end];
debug!("Found iCalendar data using {}: {}", start_tag, ical_data);
let mut start_pos = 0;
let mut chunk_count = 0;
// Process ALL calendar-data elements of this type, not just the first one
while let Some(start) = xml[start_pos..].find(start_tag) {
let absolute_start = start_pos + start;
if let Some(end) = xml[absolute_start..].find(end_tag) {
let absolute_end = absolute_start + end;
let ical_data = &xml[absolute_start + start_tag.len()..absolute_end];
chunk_count += 1;
debug!("Found iCalendar data chunk #{} using {}: {}", chunk_count, start_tag,
if ical_data.len() > 200 { format!("{}...", &ical_data[..200]) } else { ical_data.to_string() });
// Parse the iCalendar data
if let Ok(parsed_events) = self.parse_icalendar_data(ical_data, calendar_href) {
info!("✅ Parsed {} events from {} chunk #{}", parsed_events.len(), start_tag, chunk_count);
events.extend(parsed_events);
found_calendar_data = true;
break;
} else {
warn!("Failed to parse iCalendar data using {}, trying next pattern", start_tag);
}
warn!("❌ Failed to parse iCalendar data chunk #{} using {}", chunk_count, start_tag);
}
start_pos = absolute_end + end_tag.len();
} else {
break;
}
}
// If we found events using this pattern, we can continue to try other patterns
// (in case the response uses mixed namespaces)
}
if found_calendar_data {
@ -647,28 +764,112 @@ impl RealCalDavClient {
async fn parse_multistatus_response(&self, xml: &str, calendar_href: &str) -> Result<Vec<CalendarEvent>> {
let mut events = Vec::new();
info!("🔍 MULTISTATUS PARSING ANALYSIS:");
// Count total responses in multistatus (case-insensitive)
let total_responses = xml.matches("<D:response>").count() + xml.matches("<d:response>").count();
let calendar_data_responses = xml.matches("<C:calendar-data>").count() + xml.matches("<c:calendar-data>").count() + xml.matches("<cal:calendar-data>").count();
let href_responses = xml.matches("<D:href>").count() + xml.matches("<d:href>").count();
info!(" 📊 MULTISTATUS CONTENT:");
info!(" Total <D:response> elements: {}", total_responses);
info!(" <C:calendar-data> elements: {}", calendar_data_responses);
info!(" <D:href> elements: {}", href_responses);
// If we have calendar-data elements, we should parse them directly instead of fetching individually
if calendar_data_responses > 0 {
info!(" ✅ Found embedded calendar data - parsing directly");
// Try to parse calendar-data directly from the multistatus
// Support all namespace variants: <C:calendar-data>, <c:calendar-data>, <cal:calendar-data>
let calendar_data_patterns = vec![
("<C:calendar-data>", "</C:calendar-data>", 19),
("<c:calendar-data>", "</c:calendar-data>", 19),
("<cal:calendar-data>", "</cal:calendar-data>", 20),
];
for (start_tag, end_tag, tag_len) in calendar_data_patterns {
let mut start_pos = 0;
let mut event_count = 0;
while let Some(data_start) = xml[start_pos..].find(start_tag) {
let absolute_start = start_pos + data_start;
if let Some(data_end) = xml[absolute_start..].find(end_tag) {
let absolute_end = absolute_start + data_end;
let calendar_data_content = &xml[absolute_start + tag_len..absolute_end];
event_count += 1;
info!(" 📅 Parsing calendar data chunk #{} using {}", event_count, start_tag);
// Parse the iCalendar data directly
match self.parse_icalendar_data(calendar_data_content, calendar_href) {
Ok(mut parsed_events) => {
info!(" ✅ Parsed {} events from chunk #{}", parsed_events.len(), event_count);
info!(" 📈 Total events before adding: {}", events.len());
for (i, event) in parsed_events.iter().enumerate() {
info!(" Event {}: {}", i + 1, event.summary);
}
events.append(&mut parsed_events);
info!(" 📈 Total events after adding: {}", events.len());
}
Err(e) => {
warn!(" ❌ Failed to parse calendar data chunk #{}: {}", event_count, e);
}
}
start_pos = absolute_end + end_tag.len();
} else {
break;
}
}
// If we found events using this pattern, we can stop trying other patterns
if !events.is_empty() {
break;
}
}
} else {
info!(" No embedded calendar data - will fetch individual .ics files");
}
// If no events from calendar-data, fall back to href-based fetching
if events.is_empty() {
info!(" 🔗 FALLBACK: Parsing href elements to fetch individual events");
// Parse multi-status response
let mut start_pos = 0;
let mut response_count = 0;
while let Some(response_start) = xml[start_pos..].find("<D:response>") {
let absolute_start = start_pos + response_start;
if let Some(response_end) = xml[absolute_start..].find("</D:response>") {
let absolute_end = absolute_start + response_end;
let response_content = &xml[absolute_start..absolute_end + 14];
response_count += 1;
info!(" 🔗 Processing response #{}", response_count);
// Extract href
if let Some(href_start) = response_content.find("<D:href>") {
if let Some(href_end) = response_content.find("</D:href>") {
let href_content = &response_content[href_start + 9..href_end];
info!(" Found href: {}", href_content);
// Check if this is a .ics file event (not the calendar collection itself)
if href_content.contains(".ics") {
info!("Found event href: {}", href_content);
// Try to fetch the individual event
match self.fetch_single_event(href_content, calendar_href).await {
Ok(Some(event)) => events.push(event),
Ok(None) => warn!("Failed to get event data for {}", href_content),
Err(e) => warn!("Failed to fetch event {}: {}", href_content, e),
Ok(fetched_events) => {
if !fetched_events.is_empty() {
debug!("Successfully fetched {} events from: {}", fetched_events.len(), href_content);
events.extend(fetched_events);
} else {
debug!("No event data returned for {}", href_content);
}
}
Err(e) => {
warn!("Failed to fetch event {}: {}", href_content, e);
}
}
}
}
@ -679,8 +880,10 @@ impl RealCalDavClient {
break;
}
}
}
debug!("MULTISTATUS parsing complete: {} events", events.len());
info!("Parsed {} real events from multistatus response", events.len());
Ok(events)
}
@ -688,6 +891,8 @@ impl RealCalDavClient {
fn parse_icalendar_data(&self, ical_data: &str, calendar_href: &str) -> Result<Vec<CalendarEvent>> {
let mut events = Vec::new();
debug!("Parsing iCalendar data ({} chars)", ical_data.len());
// Handle iCalendar line folding (unfold continuation lines)
let unfolded_data = self.unfold_icalendar(ical_data);
@ -708,9 +913,85 @@ impl RealCalDavClient {
if line == "END:VEVENT" {
if in_event && !current_event.is_empty() {
if let Ok(event) = self.build_calendar_event(&current_event, calendar_href) {
// Check if this is a recurring event and expand occurrences within time range
if let Some(ref recurrence_rule) = event.recurrence {
debug!("Expanding recurring event: {}", event.summary);
// Define time range: past 30 days to future 30 days
let now = Utc::now();
let start_range = now - chrono::Duration::days(30);
let end_range = now + chrono::Duration::days(30);
// Convert CalendarEvent to Event for expansion
let event_start = event.start;
let event_end = event.end;
let _event_duration = event_end.signed_duration_since(event_start);
// Create a temporary Event object for expansion
let temp_event = crate::event::Event {
uid: event.uid.clone().unwrap_or_else(|| format!("temp-{}", now.timestamp())),
summary: event.summary.clone(),
description: event.description.clone(),
start: event_start,
end: event_end,
all_day: false, // Could be determined from event properties
location: event.location.clone(),
status: crate::event::EventStatus::Confirmed, // Default
event_type: crate::event::EventType::Public, // Default
organizer: None,
attendees: Vec::new(),
recurrence: Some(recurrence_rule.clone()),
alarms: Vec::new(),
properties: std::collections::HashMap::new(),
created: event.created.unwrap_or(now),
last_modified: event.last_modified.unwrap_or(now),
sequence: event.sequence,
timezone: event.start_tzid.clone().or_else(|| event.end_tzid.clone()),
};
// Expand occurrences using the time-bounded recurrence expansion
let occurrences = temp_event.expand_occurrences(start_range, end_range);
let occurrences_count = occurrences.len();
info!("Expanded '{}' to {} occurrences", event.summary, occurrences_count);
// Convert expanded Event objects back to CalendarEvent objects
for occurrence in occurrences {
let occurrence_calendar_event = CalendarEvent {
id: occurrence.uid.clone(),
href: format!("{}/{}.ics", calendar_href, occurrence.uid),
summary: occurrence.summary,
description: occurrence.description,
start: occurrence.start,
end: occurrence.end,
location: occurrence.location,
status: Some(match occurrence.status {
crate::event::EventStatus::Confirmed => "CONFIRMED".to_string(),
crate::event::EventStatus::Tentative => "TENTATIVE".to_string(),
crate::event::EventStatus::Cancelled => "CANCELLED".to_string(),
}),
created: Some(occurrence.created),
last_modified: Some(occurrence.last_modified),
sequence: occurrence.sequence,
transparency: event.transparency.clone(),
uid: Some(occurrence.uid),
recurrence_id: None, // Individual occurrences don't have recurrence IDs
etag: None,
start_tzid: event.start_tzid.clone(),
end_tzid: event.end_tzid.clone(),
original_start: Some(occurrence.start.format("%Y%m%dT%H%M%SZ").to_string()),
original_end: Some(occurrence.end.format("%Y%m%dT%H%M%SZ").to_string()),
recurrence: None, // Individual occurrences don't have recurrence rules
};
events.push(occurrence_calendar_event);
}
debug!("Added {} expanded occurrences (total: {})", occurrences_count, events.len());
} else {
// Non-recurring event, add as-is
events.push(event);
}
}
}
in_event = false;
continue;
}
@ -757,6 +1038,7 @@ impl RealCalDavClient {
}
}
debug!("parse_icalendar_data returning {} events", events.len());
Ok(events)
}
@ -809,6 +1091,23 @@ impl RealCalDavClient {
let original_start = properties.get("DTSTART").cloned();
let original_end = properties.get("DTEND").cloned();
// Parse RRULE if present using rrule crate
let recurrence = if let Some(rrule_str) = properties.get("RRULE") {
debug!("Parsing RRULE: {}", rrule_str);
match crate::event::RecurrenceRule::from_str(rrule_str) {
Ok(recurrence_rule) => {
info!("Successfully parsed RRULE: {}", rrule_str);
Some(recurrence_rule)
}
Err(e) => {
warn!("Failed to parse RRULE '{}': {}", rrule_str, e);
None
}
}
} else {
None
};
let event = CalendarEvent {
id: uid.clone(),
href: format!("{}/{}.ics", calendar_href, uid),
@ -832,6 +1131,8 @@ impl RealCalDavClient {
end_tzid,
original_start,
original_end,
// NEW: RRULE support
recurrence,
};
Ok(event)
@ -909,8 +1210,22 @@ impl RealCalDavClient {
None
}
/// Fetch a single event .ics file and parse it
async fn fetch_single_event(&self, event_url: &str, calendar_href: &str) -> Result<Option<CalendarEvent>> {
/// Parse RRULE string into RecurrenceRule struct
/// Example: "FREQ=WEEKLY;BYDAY=MO,WE,FR;COUNT=10"
fn parse_rrule(&self, rrule_str: &str) -> Result<crate::event::RecurrenceRule> {
debug!("Parsing RRULE: {}", rrule_str);
// Simply store the RRULE string as-is - parsing will be done on demand
let recurrence_rule = crate::event::RecurrenceRule::from_str(rrule_str)
.map_err(|e| anyhow::anyhow!("Failed to create RecurrenceRule: {}", e))?;
debug!("Parsed RRULE: {}", recurrence_rule.as_str());
Ok(recurrence_rule)
}
async fn fetch_single_event(&self, event_url: &str, calendar_href: &str) -> Result<Vec<CalendarEvent>> {
info!("Fetching single event from: {}", event_url);
// Try multiple approaches to fetch the event
@ -935,6 +1250,15 @@ impl RealCalDavClient {
info!("Trying approach: {}", approach_name);
match req.send().await {
Ok(response) => {
// Handle HTTP response errors
let response = match self.handle_http_response(response, &format!("fetch_single_event using approach {}", approach_name)).await {
Ok(resp) => resp,
Err(e) => {
warn!("Approach '{}' failed due to HTTP error: {}", approach_name, e);
continue;
}
};
let status = response.status();
info!("Approach '{}' response status: {}", approach_name, status);
@ -950,10 +1274,12 @@ impl RealCalDavClient {
// Parse the iCalendar data
if let Ok(mut events) = self.parse_icalendar_data(&ical_data, calendar_href) {
if !events.is_empty() {
// Update the href to the correct URL
events[0].href = event_url.to_string();
info!("Successfully parsed event with approach '{}': {}", approach_name, events[0].summary);
return Ok(Some(events.remove(0)));
// Update the href to the correct URL for all events
for event in &mut events {
event.href = event_url.to_string();
}
info!("Successfully parsed {} events with approach '{}': {} (showing first)", events.len(), approach_name, events[0].summary);
return Ok(events);
} else {
warn!("Approach '{}' got {} bytes but parsed 0 events", approach_name, ical_data.len());
}
@ -961,8 +1287,7 @@ impl RealCalDavClient {
warn!("Approach '{}' failed to parse iCalendar data", approach_name);
}
} else {
let error_text = response.text().await.unwrap_or_else(|_| "Unable to read error response".to_string());
warn!("Approach '{}' failed: {} - {}", approach_name, status, error_text);
warn!("Approach '{}' failed with status: {}", approach_name, status);
}
}
Err(e) => {
@ -972,7 +1297,7 @@ impl RealCalDavClient {
}
warn!("All approaches failed for event: {}", event_url);
Ok(None)
Ok(vec![])
}
/// Parse PROPFIND response to extract event hrefs and fetch individual events
@ -1021,11 +1346,13 @@ impl RealCalDavClient {
// Try to fetch this resource as an .ics file
match self.fetch_single_event(&full_url, calendar_href).await {
Ok(Some(event)) => {
events.push(event);
Ok(fetched_events) => {
if !fetched_events.is_empty() {
info!("Successfully fetched {} events from resource: {}", fetched_events.len(), href_content);
events.extend(fetched_events);
} else {
debug!("Resource {} returned no events", href_content);
}
Ok(None) => {
debug!("Resource {} is not an event", href_content);
}
Err(e) => {
warn!("Failed to fetch resource {}: {}", href_content, e);
@ -1040,58 +1367,302 @@ impl RealCalDavClient {
}
info!("Fetched {} individual events", events.len());
// Debug: show first few URLs being constructed
if !events.is_empty() {
info!("First few URLs tried:");
for (idx, event) in events.iter().take(3).enumerate() {
info!(" [{}] URL: {}", idx + 1, event.href);
}
} else {
info!("No events fetched successfully");
}
Ok(events)
}
}
/// Extract calendar name from URL
fn extract_calendar_name(&self, url: &str) -> String {
// Extract calendar name from URL path
if let Some(last_slash) = url.rfind('/') {
let name_part = &url[last_slash + 1..];
if !name_part.is_empty() {
return name_part.to_string();
}
}
/// Calendar information from CalDAV server
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CalendarInfo {
pub url: String,
pub name: String,
pub display_name: Option<String>,
pub color: Option<String>,
pub description: Option<String>,
pub timezone: Option<String>,
pub supported_components: Vec<String>,
}
"Default Calendar".to_string()
}
/// Calendar event from CalDAV server
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CalendarEvent {
pub id: String,
pub href: String,
pub summary: String,
pub description: Option<String>,
pub start: DateTime<Utc>,
pub end: DateTime<Utc>,
pub location: Option<String>,
pub status: Option<String>,
pub created: Option<DateTime<Utc>>,
pub last_modified: Option<DateTime<Utc>>,
pub sequence: i32,
pub transparency: Option<String>,
pub uid: Option<String>,
pub recurrence_id: Option<DateTime<Utc>>,
pub etag: Option<String>,
// Enhanced timezone information
pub start_tzid: Option<String>,
pub end_tzid: Option<String>,
pub original_start: Option<String>,
pub original_end: Option<String>,
// NEW: RRULE support
pub recurrence: Option<crate::event::RecurrenceRule>,
}
/// Extract display name from href/URL
fn extract_display_name_from_href(&self, href: &str) -> String {
// If href ends with a slash, extract the parent directory name
// Otherwise, extract the last path component
if href.ends_with('/') {
// Remove trailing slash
let href_without_slash = href.trim_end_matches('/');
if let Some(last_slash) = href_without_slash.rfind('/') {
let name_part = &href_without_slash[last_slash + 1..];
if !name_part.is_empty() {
return name_part.replace('_', " ").split('-').map(|word| {
let mut chars = word.chars();
match chars.next() {
None => String::new(),
Some(first) => first.to_uppercase().collect::<String>() + &chars.as_str().to_lowercase(),
}
}).collect::<Vec<String>>().join(" ");
}
}
impl RealCalDavClient {
/// Create or update an event using HTTP PUT
pub async fn put_event(&self, calendar_url: &str, event_uid: &str, ical_data: &str, etag: Option<&str>) -> Result<Option<String>> {
let event_url = if calendar_url.ends_with('/') {
format!("{}{}.ics", calendar_url, event_uid)
} else {
// Use the existing extract_calendar_name logic
return self.extract_calendar_name(href);
format!("{}/{}.ics", calendar_url, event_uid)
};
info!("Putting event to: {}", event_url);
let mut request = self.client
.put(&event_url)
.header("Content-Type", "text/calendar; charset=utf-8")
.header("User-Agent", "caldav-sync/0.1.0")
.body(ical_data.to_string());
// Add ETag header if provided (for updates)
if let Some(etag_value) = etag {
request = request.header("If-Match", etag_value);
debug!("Adding If-Match header: {}", etag_value);
} else {
// For new events, use If-None-Match to prevent accidental overwrites
request = request.header("If-None-Match", "*");
debug!("Adding If-None-Match: * header for new event");
}
"Default Calendar".to_string()
let response = request.send().await
.map_err(|e| anyhow::anyhow!("Failed to send PUT request: {}", e))?;
// Handle HTTP response errors
let response = self.handle_http_response(response, "put_event").await?;
let status = response.status();
info!("PUT response status: {}", status);
if status.is_success() {
// Extract ETag from response headers if available
let new_etag = response.headers().get("ETag")
.and_then(|value| value.to_str().ok())
.map(|s| s.to_string());
if let Some(ref etag) = new_etag {
debug!("New event ETag: {}", etag);
}
info!("Successfully created/updated event: {}", event_uid);
Ok(new_etag)
} else if status.as_u16() == 412 {
// Precondition failed - ETag mismatch
Err(anyhow::anyhow!("Event conflict: ETag mismatch. Event may have been modified by another client."))
} else if status.as_u16() == 409 {
// Conflict - event already exists
Err(anyhow::anyhow!("Event conflict: Event already exists on server."))
} else {
let error_text = response.text().await.unwrap_or_else(|_| "Unable to read error response".to_string());
Err(anyhow::anyhow!("Failed to create/update event: {} - {}", status, error_text))
}
}
/// Get the ETag for a specific event
pub async fn get_event_etag(&self, calendar_url: &str, event_uid: &str) -> Result<Option<String>> {
let event_url = if calendar_url.ends_with('/') {
format!("{}{}.ics", calendar_url, event_uid)
} else {
format!("{}/{}.ics", calendar_url, event_uid)
};
debug!("Getting ETag for event: {}", event_url);
let response = self.client
.head(&event_url)
.header("User-Agent", "caldav-sync/0.1.0")
.send().await
.map_err(|e| anyhow::anyhow!("Failed to send HEAD request: {}", e))?;
let status = response.status();
debug!("HEAD response status: {}", status);
if status.is_success() {
let etag = response.headers().get("ETag")
.and_then(|value| value.to_str().ok())
.map(|s| s.to_string());
if let Some(ref etag) = etag {
debug!("Found ETag: {}", etag);
} else {
debug!("No ETag found in response headers");
}
Ok(etag)
} else if status.as_u16() == 404 {
// Event not found - this is expected when checking for existence
debug!("Event not found: {}", event_url);
Ok(None)
} else {
// For other error codes, use the HTTP error handler
let status = response.status(); // Store status before consuming response
let response = self.handle_http_response(response, "get_event_etag").await?;
let error_text = response.text().await.unwrap_or_else(|_| "Unable to read error response".to_string());
Err(anyhow::anyhow!("Failed to get event ETag: {} - {}", status, error_text))
}
}
/// Delete an event using HTTP DELETE
pub async fn delete_event(&self, calendar_url: &str, event_uid: &str, etag: Option<&str>) -> Result<()> {
let event_url = if calendar_url.ends_with('/') {
format!("{}{}.ics", calendar_url, event_uid)
} else {
format!("{}/{}.ics", calendar_url, event_uid)
};
info!("Deleting event: {}", event_url);
// First check if event exists to avoid unnecessary 404 errors
match self.check_event_exists(&event_url).await {
Ok(_) => {
info!("Event exists, proceeding with deletion");
}
Err(e) => {
warn!("Event does not exist or check failed: {}, skipping deletion", e);
info!("This is normal for idempotent deletion operations");
return Ok(());
}
}
let mut request = self.client
.delete(&event_url)
.header("User-Agent", "caldav-sync/0.1.0");
// Add ETag header if provided (recommended by CalDAV spec)
if let Some(etag_value) = etag {
request = request.header("If-Match", etag_value);
info!("Adding If-Match header for deletion: {}", etag_value);
} else {
info!("No ETag provided for deletion (may cause conflicts)");
}
let response = request.send().await
.map_err(|e| anyhow::anyhow!("Failed to send DELETE request: {}", e))?;
// For delete operations, we need to handle 404 as success (idempotent deletion)
// So we check the status before calling handle_http_response
let status = response.status();
let status_code = status.as_u16();
info!("DELETE response status: {} ({})", status, status_code);
if status.is_success() {
info!("Successfully deleted event: {}", event_uid);
Ok(())
} else if status_code == 404 {
// Event not found - consider this a success for idempotent deletion
info!("Event not found (already deleted): {}", event_url);
Ok(())
} else if status_code == 412 {
// Precondition failed - ETag mismatch
let error_msg = format!("Event deletion conflict: ETag mismatch. Event may have been modified by another client.");
error!("{}", error_msg);
Err(anyhow::anyhow!(error_msg))
} else if status_code == 401 {
let error_msg = format!("Authentication failed during deletion. Check credentials.");
error!("{}", error_msg);
Err(anyhow::anyhow!(error_msg))
} else if status_code == 403 {
let error_msg = format!("Permission denied during deletion. Check calendar permissions.");
error!("{}", error_msg);
Err(anyhow::anyhow!(error_msg))
} else if status_code == 409 {
let error_msg = format!("Conflict during deletion. Event may be locked or modified.");
error!("{}", error_msg);
Err(anyhow::anyhow!(error_msg))
} else if status_code >= 400 {
error!("HTTP error during deletion: {} {}", status, status_code);
// For other HTTP errors, use the standard error handling
let _response = self.handle_http_response(response, "delete_event").await?;
Err(anyhow::anyhow!("Unexpected status after error handling"))
} else {
let error_text = response.text().await.unwrap_or_else(|_| "Unable to read error response".to_string());
error!("Unexpected response during deletion: {} - {}", status, error_text);
Err(anyhow::anyhow!("Failed to delete event: {} - {}", status, error_text))
}
}
/// Check if an event exists by making a HEAD request to the event URL
pub async fn check_event_exists(&self, event_url: &str) -> Result<()> {
debug!("Checking if event exists: {}", event_url);
let response = self.client
.head(event_url)
.header("User-Agent", "caldav-sync/0.1.0")
.send().await
.map_err(|e| anyhow::anyhow!("Failed to send HEAD request to check event existence: {}", e))?;
let status = response.status();
let status_code = status.as_u16();
debug!("HEAD response status: {} ({})", status, status_code);
if status.is_success() {
debug!("Event exists: {}", event_url);
// Log some response headers for debugging
if let Some(etag) = response.headers().get("ETag") {
debug!("Event ETag: {:?}", etag);
}
if let Some(content_type) = response.headers().get("Content-Type") {
debug!("Content-Type: {:?}", content_type);
}
Ok(())
} else if status_code == 404 {
debug!("Event does not exist: {}", event_url);
Err(anyhow::anyhow!("Event not found: {}", status))
} else if status_code == 401 {
let error_msg = "Authentication failed while checking event existence";
error!("{}: {}", error_msg, status);
Err(anyhow::anyhow!("{}: {}", error_msg, status))
} else if status_code == 403 {
let error_msg = "Permission denied while checking event existence";
error!("{}: {}", error_msg, status);
Err(anyhow::anyhow!("{}: {}", error_msg, status))
} else {
let error_text = response.text().await.unwrap_or_else(|_| "Unable to read error response".to_string());
debug!("Failed to check event existence: {} - {}", status, error_text);
Err(anyhow::anyhow!("Failed to check event existence: {} - {}", status, error_text))
}
}
/// Check if a target calendar exists and is accessible
pub async fn validate_target_calendar(&self, calendar_url: &str) -> Result<bool> {
info!("Validating target calendar: {}", calendar_url);
let response = self.client
.head(calendar_url)
.header("User-Agent", "caldav-sync/0.1.0")
.send().await
.map_err(|e| anyhow::anyhow!("Failed to validate target calendar: {}", e))?;
// Handle HTTP response errors
let response = self.handle_http_response(response, "validate_target_calendar").await?;
let status = response.status();
debug!("Calendar validation response status: {}", status);
if status.is_success() {
info!("Target calendar is accessible: {}", calendar_url);
Ok(true)
} else if status.as_u16() == 404 {
info!("Target calendar not found: {}", calendar_url);
Ok(false)
} else {
let error_text = response.text().await.unwrap_or_else(|_| "Unable to read error response".to_string());
Err(anyhow::anyhow!("Target calendar validation failed: {} - {}", status, error_text))
}
}
/// Extract display name from XML response, trying multiple formats
@ -1151,41 +1722,44 @@ impl RealCalDavClient {
debug!("No display name found in XML response");
None
}
}
/// Calendar information from CalDAV server
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CalendarInfo {
pub url: String,
pub name: String,
pub display_name: Option<String>,
pub color: Option<String>,
pub description: Option<String>,
pub timezone: Option<String>,
pub supported_components: Vec<String>,
}
/// Extract display name from href/URL
fn extract_display_name_from_href(&self, href: &str) -> String {
// If href ends with a slash, extract the parent directory name
// Otherwise, extract the last path component
if href.ends_with('/') {
// Remove trailing slash
let href_without_slash = href.trim_end_matches('/');
if let Some(last_slash) = href_without_slash.rfind('/') {
let name_part = &href_without_slash[last_slash + 1..];
if !name_part.is_empty() {
return name_part.replace('_', " ").split('-').map(|word| {
let mut chars = word.chars();
match chars.next() {
None => String::new(),
Some(first) => first.to_uppercase().collect::<String>() + &chars.as_str().to_lowercase(),
}
}).collect::<Vec<String>>().join(" ");
}
}
} else {
// Use the existing extract_calendar_name logic
return self.extract_calendar_name(href);
}
/// Calendar event from CalDAV server
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CalendarEvent {
pub id: String,
pub href: String,
pub summary: String,
pub description: Option<String>,
pub start: DateTime<Utc>,
pub end: DateTime<Utc>,
pub location: Option<String>,
pub status: Option<String>,
pub created: Option<DateTime<Utc>>,
pub last_modified: Option<DateTime<Utc>>,
pub sequence: i32,
pub transparency: Option<String>,
pub uid: Option<String>,
pub recurrence_id: Option<DateTime<Utc>>,
pub etag: Option<String>,
// Enhanced timezone information
pub start_tzid: Option<String>,
pub end_tzid: Option<String>,
pub original_start: Option<String>,
pub original_end: Option<String>,
"Default Calendar".to_string()
}
/// Extract calendar name from URL
fn extract_calendar_name(&self, url: &str) -> String {
// Extract calendar name from URL path
if let Some(last_slash) = url.rfind('/') {
let name_part = &url[last_slash + 1..];
if !name_part.is_empty() {
return name_part.to_string();
}
}
"Default Calendar".to_string()
}
}

View file

@ -11,7 +11,7 @@ use serde::{Deserialize, Serialize};
use tracing::{info, warn, debug};
/// Import behavior strategies for unidirectional sync
#[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub enum ImportBehavior {
/// Strict import: target calendar must exist, no cleanup
Strict,
@ -171,6 +171,14 @@ pub struct ConflictInfo {
pub timestamp: DateTime<Utc>,
}
/// Event action result for processing
#[derive(Debug, Clone, PartialEq)]
pub enum EventAction {
Created,
Updated,
Skipped,
}
/// Conflict resolution strategies
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum ConflictResolution {
@ -219,24 +227,115 @@ impl ImportEngine {
// Validate events before processing
let validated_events = self.validate_events(&events, &mut result);
result.total_events = validated_events.len();
// Expand recurring events into individual occurrences
let expanded_events = self.expand_recurring_events(&validated_events, &mut result);
result.total_events = expanded_events.len();
info!("Expanded {} events into {} individual occurrences", validated_events.len(), expanded_events.len());
// Build target calendar URL
let target_calendar_url = self.build_target_calendar_url();
// Create CalDAV client for target server
let target_client = crate::minicaldav_client::RealCalDavClient::new(
&self.config.target_server.url,
&self.config.target_server.username,
&self.config.target_server.password,
).await.map_err(|e| anyhow::anyhow!("Failed to create target CalDAV client: {}", e))?;
// Determine date range for fetching existing events and expanding recurring events
let (min_date, max_date) = if let Some((first_event, last_event)) = expanded_events.first().zip(expanded_events.last()) {
let min_start = first_event.start - chrono::Duration::days(7); // 7 days buffer
let max_end = last_event.end + chrono::Duration::days(7); // 7 days buffer
// Ensure min_date is before max_date
if min_start >= max_end {
warn!("Invalid date range calculated: start {} >= end {}, using fallback range", min_start, max_end);
let now = chrono::Utc::now();
(now - chrono::Duration::days(30), now + chrono::Duration::days(365))
} else {
(min_start, max_end)
}
} else {
// No events to process
warn!("No valid events to import");
result.complete();
return Ok(result);
};
// Fetch all existing events from target calendar once
let existing_events = match self.fetch_existing_events(&target_client, &target_calendar_url, min_date, max_date).await {
Ok(events) => {
info!("Fetched {} existing events from target calendar", events.len());
events
}
Err(e) => {
warn!("Failed to fetch existing events from target calendar: {}. Assuming target calendar is empty.", e);
// If we can't fetch existing events, assume it's empty (new calendar)
Vec::new()
}
};
// Build a lookup table of existing events by UID for efficient comparison
let existing_events_by_uid: std::collections::HashMap<String, crate::minicaldav_client::CalendarEvent> =
existing_events.into_iter()
.filter_map(|event| event.uid.clone().map(|uid| (uid, event)))
.collect();
info!("Created lookup table with {} existing events", existing_events_by_uid.len());
if self.dry_run {
info!("DRY RUN: Would process {} events", result.total_events);
for (i, event) in validated_events.iter().enumerate() {
info!("DRY RUN [{}]: {} ({})", i + 1, event.summary, event.uid);
for (i, event) in expanded_events.iter().enumerate() {
if existing_events_by_uid.contains_key(&event.uid) {
info!("DRY RUN [{}]: {} ({}) - EXISTS", i + 1, event.summary, event.uid);
} else {
info!("DRY RUN [{}]: {} ({}) - NEW", i + 1, event.summary, event.uid);
}
result.imported = validated_events.len();
}
// Analyze target calendar for cleanup operations (if StrictWithCleanup)
if self.behavior == ImportBehavior::StrictWithCleanup {
info!("DRY RUN: Analyzing target calendar for cleanup...");
match self.analyze_cleanup_operations(&expanded_events).await {
Ok((orphaned_count, orphaned_events)) => {
info!("DRY RUN: Would delete {} orphaned events from target calendar", orphaned_count);
for event in orphaned_events {
let event_uid = event.uid.as_deref().unwrap_or("unknown");
info!("DRY RUN [DELETE]: {} ({})", event.summary, event_uid);
}
result.deleted = orphaned_count;
}
Err(e) => {
warn!("DRY RUN: Failed to analyze cleanup operations: {}", e);
}
}
}
result.imported = expanded_events.len();
result.complete();
return Ok(result);
}
// Process each event
for event in validated_events {
match self.process_single_event(&event).await {
Ok(_) => {
// Process each event using the pre-fetched data
for event in &expanded_events {
match self.process_single_event_with_existing_data(&target_client, &target_calendar_url, &event, &existing_events_by_uid).await {
Ok(event_action) => {
match event_action {
EventAction::Created => {
result.imported += 1;
debug!("Successfully imported event: {}", event.summary);
debug!("Successfully created event: {}", event.summary);
}
EventAction::Updated => {
result.updated += 1;
debug!("Successfully updated event: {}", event.summary);
}
EventAction::Skipped => {
result.skipped += 1;
debug!("Skipped unchanged event: {}", event.summary);
}
}
}
Err(e) => {
result.failed += 1;
@ -254,8 +353,31 @@ impl ImportEngine {
}
result.complete();
info!("Import completed: {} imported, {} failed, {} skipped",
result.imported, result.failed, result.skipped);
info!("Import completed: {} imported, {} updated, {} failed, {} skipped",
result.imported, result.updated, result.failed, result.skipped);
// Perform cleanup if using StrictWithCleanup behavior
if self.behavior == ImportBehavior::StrictWithCleanup && !self.dry_run {
info!("Performing cleanup of orphaned events...");
match self.delete_orphaned_events(&expanded_events).await {
Ok(deleted_uids) => {
result.deleted = deleted_uids.len();
info!("Cleanup completed: {} orphaned events deleted", deleted_uids.len());
}
Err(e) => {
warn!("Cleanup failed: {}", e);
// Add warning to errors but don't fail the import
let cleanup_error = ImportError {
event_uid: None,
event_summary: None,
message: format!("Cleanup failed: {}", e),
error_type: ImportErrorType::Other,
timestamp: Utc::now(),
};
result.errors.push(cleanup_error);
}
}
}
Ok(result)
}
@ -287,6 +409,39 @@ impl ImportEngine {
validated
}
/// Expand recurring events into individual occurrences within a reasonable date range
fn expand_recurring_events(&self, events: &[Event], _result: &mut ImportResult) -> Vec<Event> {
let mut expanded = Vec::new();
let now = chrono::Utc::now();
// Define a reasonable expansion range (past 6 months to 2 years ahead)
// Use broader range to ensure cleanup works correctly
let start_range = now - chrono::Duration::days(180); // 6 months ago
let end_range = now + chrono::Duration::days(365 * 2); // 2 years ahead
info!("Expanding recurring events from {} to {}",
start_range.format("%Y-%m-%d"),
end_range.format("%Y-%m-%d"));
for event in events {
// If event has recurrence rule, expand it
if let Some(_recurrence_rule) = &event.recurrence {
debug!("Expanding recurring event: {} ({})", event.summary, event.uid);
let occurrences = event.expand_occurrences(start_range, end_range);
info!("Event '{}' expanded into {} occurrences", event.summary, occurrences.len());
expanded.extend(occurrences);
} else {
// Non-recurring event, add as-is
expanded.push(event.clone());
}
}
info!("Expanded {} total events into {} individual occurrences", events.len(), expanded.len());
expanded
}
/// Validate a single event for Nextcloud compatibility
fn validate_event(&self, event: &Event) -> Result<()> {
// Check required fields
@ -319,21 +474,391 @@ impl ImportEngine {
Ok(())
}
/// Process a single event import
async fn process_single_event(&self, event: &Event) -> Result<()> {
info!("Processing event: {} ({})", event.summary, event.uid);
/// Build the target calendar URL from server configuration
fn build_target_calendar_url(&self) -> String {
// Check if it's already a full calendar URL or a base URL
if self.config.target_server.url.contains("/remote.php/dav/calendars/") {
// URL already contains the full calendar path - use as-is
if self.config.target_server.url.ends_with('/') {
self.config.target_server.url.trim_end_matches('/').to_string()
} else {
self.config.target_server.url.clone()
}
} else {
// URL is a base server URL - construct the full calendar path
if self.config.target_server.url.ends_with('/') {
format!("{}remote.php/dav/calendars/{}/{}/",
self.config.target_server.url.trim_end_matches('/'),
self.config.target_server.username,
self.config.target_calendar.name)
} else {
format!("{}/remote.php/dav/calendars/{}/{}/",
self.config.target_server.url,
self.config.target_server.username,
self.config.target_calendar.name)
}
}
}
// TODO: Implement the actual import logic
// This will involve:
// 1. Check if event already exists on target
// 2. Handle conflicts based on behavior
// 3. Convert event to iCalendar format
// 4. Upload to Nextcloud server
/// Create a new event on the target calendar
async fn create_event(&self, client: &crate::minicaldav_client::RealCalDavClient, calendar_url: &str, event: &Event) -> Result<()> {
debug!("Creating event: {}", event.summary);
// Generate simplified iCalendar data for the event (avoids Zoho parsing issues)
let ical_data = event.to_ical_simple()
.map_err(|e| anyhow::anyhow!("Failed to generate iCalendar data: {}", e))?;
debug!("Generated iCalendar data ({} chars)", ical_data.len());
// Double-check if event exists (in case our pre-fetched data is stale)
match client.get_event_etag(calendar_url, &event.uid).await {
Ok(Some(existing_etag)) => {
debug!("Event '{}' was unexpectedly found during creation. Updating instead.", event.summary);
debug!("Found existing ETag: {}", existing_etag);
// Update the existing event
match client.put_event(calendar_url, &event.uid, &ical_data, Some(&existing_etag)).await {
Ok(Some(new_etag)) => {
debug!("Successfully updated existing event: {} (ETag: {})", event.summary, new_etag);
}
Ok(None) => {
debug!("Successfully updated existing event: {} (no ETag returned)", event.summary);
}
Err(e) => {
return Err(anyhow::anyhow!("Failed to update existing event '{}': {}", event.summary, e));
}
}
}
Ok(None) => {
// Event doesn't exist, proceed with creation
match client.put_event(calendar_url, &event.uid, &ical_data, None).await {
Ok(Some(new_etag)) => {
debug!("Successfully created event: {} (ETag: {})", event.summary, new_etag);
}
Ok(None) => {
debug!("Successfully created event: {} (no ETag returned)", event.summary);
}
Err(e) => {
return Err(anyhow::anyhow!("Failed to create event '{}': {}", event.summary, e));
}
}
}
Err(e) => {
return Err(anyhow::anyhow!("Failed to check event existence before creation: {}", e));
}
}
debug!("Event processing logic not yet implemented - simulating success");
Ok(())
}
/// Update an existing event on the target calendar
async fn update_event(&self, client: &crate::minicaldav_client::RealCalDavClient, calendar_url: &str, event: &Event, etag: Option<&str>) -> Result<()> {
debug!("Updating event: {}", event.summary);
// Generate simplified iCalendar data for the event (avoids Zoho parsing issues)
let ical_data = event.to_ical_simple()
.map_err(|e| anyhow::anyhow!("Failed to generate iCalendar data: {}", e))?;
debug!("Generated iCalendar data ({} chars)", ical_data.len());
// Try to update the event with the provided ETag
match client.put_event(calendar_url, &event.uid, &ical_data, etag).await {
Ok(Some(new_etag)) => {
debug!("Successfully updated event: {} (ETag: {})", event.summary, new_etag);
}
Ok(None) => {
debug!("Successfully updated event: {} (no ETag returned)", event.summary);
}
Err(e) => {
// Check if this is an ETag mismatch (412 error)
if e.to_string().contains("Precondition Failed") || e.to_string().contains("412") {
debug!("ETag mismatch for event '{}'. Re-fetching current ETag and retrying...", event.summary);
// Re-fetch the current ETag
match client.get_event_etag(calendar_url, &event.uid).await {
Ok(Some(current_etag)) => {
debug!("Retrieved current ETag for event '{}': {}", event.summary, current_etag);
// Retry the update with the current ETag
match client.put_event(calendar_url, &event.uid, &ical_data, Some(&current_etag)).await {
Ok(Some(new_etag)) => {
debug!("Successfully updated event on retry: {} (ETag: {})", event.summary, new_etag);
}
Ok(None) => {
debug!("Successfully updated event on retry: {} (no ETag returned)", event.summary);
}
Err(retry_err) => {
return Err(anyhow::anyhow!("Failed to update event '{}' even after retry: {}", event.summary, retry_err));
}
}
}
Ok(None) => {
// Event doesn't exist anymore, try creating it
debug!("Event '{}' no longer exists, attempting to create it instead", event.summary);
match client.put_event(calendar_url, &event.uid, &ical_data, None).await {
Ok(Some(new_etag)) => {
debug!("Successfully created event: {} (ETag: {})", event.summary, new_etag);
}
Ok(None) => {
debug!("Successfully created event: {} (no ETag returned)", event.summary);
}
Err(create_err) => {
return Err(anyhow::anyhow!("Failed to create event '{}' after update failed: {}", event.summary, create_err));
}
}
}
Err(etag_err) => {
return Err(anyhow::anyhow!("Failed to re-fetch ETag for event '{}': {}", event.summary, etag_err));
}
}
} else {
return Err(anyhow::anyhow!("Failed to update event '{}': {}", event.summary, e));
}
}
}
Ok(())
}
/// Analyze cleanup operations without actually deleting events (for dry run)
async fn analyze_cleanup_operations(&self, source_events: &[Event]) -> Result<(usize, Vec<crate::minicaldav_client::CalendarEvent>)> {
let mut orphaned_events = Vec::new();
if self.behavior != ImportBehavior::StrictWithCleanup {
debug!("Skipping cleanup analysis (behavior: {})", self.behavior);
return Ok((0, orphaned_events));
}
info!("Analyzing target calendar for orphaned events...");
// Create CalDAV client for target server
let target_client = crate::minicaldav_client::RealCalDavClient::new(
&self.config.target_server.url,
&self.config.target_server.username,
&self.config.target_server.password,
).await.map_err(|e| anyhow::anyhow!("Failed to create target CalDAV client: {}", e))?;
// Build target calendar URL
let target_calendar_url = self.build_target_calendar_url();
// Use a broader date range to find ALL events for cleanup analysis
// We want to catch orphaned events regardless of when they occur
let now = chrono::Utc::now();
let start_date = now - chrono::Duration::days(365 * 2); // 2 years ago
let end_date = now + chrono::Duration::days(365 * 2); // 2 years ahead
info!("Scanning target calendar for events from {} to {} for cleanup analysis",
start_date.format("%Y-%m-%d"), end_date.format("%Y-%m-%d"));
info!("🔍 TARGET EVENT FETCH DEBUG:");
info!(" Target calendar URL: {}", target_calendar_url);
info!(" Date range: {} to {}", start_date.format("%Y-%m-%d"), end_date.format("%Y-%m-%d"));
info!(" Current date: {}", now.format("%Y-%m-%d"));
info!(" Oct 31, 2025 should be in range: true (using broad 2-year range)");
// Get all events from target calendar
match target_client.get_events(&target_calendar_url, start_date, end_date).await {
Ok(target_events) => {
info!("🎯 TARGET EVENTS FETCHED: {} total events", target_events.len());
let source_uids: std::collections::HashSet<&str> = source_events
.iter()
.map(|e| e.uid.as_str())
.collect();
debug!("=== CLEANUP ANALYSIS DEBUG ===");
debug!("Source UIDs ({}): {:?}", source_uids.len(), source_uids);
debug!("Target events found: {}", target_events.len());
// Log all source events for debugging
for (i, source_event) in source_events.iter().enumerate() {
debug!("Source event {}: UID='{}', Summary='{}', Date={}",
i + 1, source_event.uid, source_event.summary, source_event.start.format("%Y-%m-%d"));
}
for (i, target_event) in target_events.iter().enumerate() {
let target_uid = target_event.uid.as_deref().unwrap_or_else(|| "NO_UID");
let target_summary = target_event.summary.as_str();
info!("🎯 TARGET EVENT {}: UID='{}', Summary='{}', Start='{:?}'",
i + 1, target_uid, target_summary, target_event.start);
// Special detection for the test event
if target_summary.contains("caldav test") || target_uid.contains("test") {
info!("*** FOUND TEST EVENT IN CLEANUP: UID='{}', Summary='{}' ***",
target_uid, target_summary);
}
if let Some(target_uid) = &target_event.uid {
// Handle Nextcloud's UID suffix for imported events
let cleaned_uid = if target_uid.ends_with("-1") {
&target_uid[..target_uid.len()-2]
} else {
target_uid.as_str()
};
let is_orphaned = !source_uids.contains(cleaned_uid);
debug!(" Target UID: '{}', Cleaned UID: '{}' in source: {} -> Orphaned: {}",
target_uid, cleaned_uid, source_uids.contains(cleaned_uid), is_orphaned);
if is_orphaned {
debug!("*** ORPHANED EVENT DETECTED: {} ({}) ***", target_event.summary, target_uid);
orphaned_events.push(target_event.clone());
}
}
}
debug!("Total orphaned events detected: {}", orphaned_events.len());
debug!("=== END CLEANUP ANALYSIS DEBUG ===");
}
Err(e) => {
return Err(anyhow::anyhow!("Failed to get target events for cleanup analysis: {}", e));
}
}
info!("Found {} orphaned events that would be deleted", orphaned_events.len());
Ok((orphaned_events.len(), orphaned_events))
}
/// Delete orphaned events from target calendar (StrictWithCleanup mode only)
async fn delete_orphaned_events(&self, source_events: &[Event]) -> Result<Vec<String>> {
let mut deleted_events = Vec::new();
if self.behavior != ImportBehavior::StrictWithCleanup {
debug!("Skipping orphaned event deletion (behavior: {})", self.behavior);
return Ok(deleted_events);
}
info!("Looking for orphaned events to delete...");
// Create CalDAV client for target server
let target_client = crate::minicaldav_client::RealCalDavClient::new(
&self.config.target_server.url,
&self.config.target_server.username,
&self.config.target_server.password,
).await.map_err(|e| anyhow::anyhow!("Failed to create target CalDAV client: {}", e))?;
// Build target calendar URL
let target_calendar_url = self.build_target_calendar_url();
// Use the same broad date range as cleanup analysis to ensure consistency
// We want to find and delete ALL orphaned events, regardless of when they occur
let now = chrono::Utc::now();
let start_date = now - chrono::Duration::days(365 * 2); // 2 years ago
let end_date = now + chrono::Duration::days(365 * 2); // 2 years ahead
info!("Scanning target calendar for events from {} to {} for orphaned event deletion",
start_date.format("%Y-%m-%d"), end_date.format("%Y-%m-%d"));
// Get all events from target calendar
match target_client.get_events(&target_calendar_url, start_date, end_date).await {
Ok(target_events) => {
let source_uids: std::collections::HashSet<&str> = source_events
.iter()
.map(|e| e.uid.as_str())
.collect();
info!("🐛 DETAILED DELETION DEBUG:");
info!(" Source events count: {}", source_events.len());
info!(" Target events count: {}", target_events.len());
info!(" Source UIDs collected: {}", source_uids.len());
info!(" Source UIDs: {:?}", source_uids);
// Debug: Show all source event details
for (i, event) in source_events.iter().enumerate() {
info!(" Source Event {}: UID='{:?}', Summary='{:?}'",
i, event.uid, event.summary);
}
// Debug: Show all target event details
for (i, event) in target_events.iter().enumerate() {
info!(" Target Event {}: UID='{:?}', Summary='{:?}'",
i, event.uid, event.summary);
}
for target_event in target_events {
if let Some(target_uid) = &target_event.uid {
let is_orphaned = !source_uids.contains(target_uid.as_str());
info!("🔍 DELETION ANALYSIS:");
info!(" Target UID: '{}'", target_uid);
info!(" Target Summary: '{}'", target_event.summary);
info!(" Source UIDs count: {}", source_uids.len());
info!(" UID in source: {}", source_uids.contains(target_uid.as_str()));
info!(" Is orphaned: {}", is_orphaned);
// Log all source UIDs for comparison
if source_uids.len() <= 10 {
info!(" All source UIDs: {:?}", source_uids);
} else {
info!(" First 10 source UIDs: {:?}", source_uids.iter().take(10).collect::<Vec<_>>());
}
if is_orphaned {
info!("🗑️ DELETING orphaned event: {} ({})",
target_event.summary, target_uid);
if !self.dry_run {
info!("🚀 Executing DELETE request for UID: {}", target_uid);
match target_client.delete_event(&target_calendar_url, target_uid, target_event.etag.as_deref()).await {
Ok(_) => {
deleted_events.push(target_uid.clone());
info!("✅ Successfully deleted orphaned event: {}", target_uid);
}
Err(e) => {
warn!("❌ Failed to delete orphaned event '{}': {}", target_uid, e);
}
}
} else {
info!("🔍 DRY RUN: Would delete orphaned event: {}", target_event.summary);
deleted_events.push(target_uid.clone());
}
} else {
info!("✅ Keeping event (exists in source): {} ({})", target_event.summary, target_uid);
}
} else {
warn!("⚠️ Target event has no UID: {} (href: {})", target_event.summary, target_event.href);
}
}
}
Err(e) => {
warn!("Failed to get target events for cleanup: {}", e);
}
}
info!("Deleted {} orphaned events", deleted_events.len());
Ok(deleted_events)
}
/// Convert CalendarEvent to Event for comparison
fn calendar_event_to_event(&self, calendar_event: &crate::minicaldav_client::CalendarEvent) -> Result<Event> {
let event = Event {
uid: calendar_event.uid.clone().unwrap_or_else(|| calendar_event.id.clone()),
summary: calendar_event.summary.clone(),
description: calendar_event.description.clone(),
start: calendar_event.start,
end: calendar_event.end,
all_day: false, // TODO: Determine from event data
location: calendar_event.location.clone(),
status: match calendar_event.status.as_deref() {
Some("CONFIRMED") => crate::event::EventStatus::Confirmed,
Some("TENTATIVE") => crate::event::EventStatus::Tentative,
Some("CANCELLED") => crate::event::EventStatus::Cancelled,
_ => crate::event::EventStatus::Confirmed,
},
event_type: crate::event::EventType::Public, // Default
organizer: None,
attendees: Vec::new(),
recurrence: None,
alarms: Vec::new(),
properties: std::collections::HashMap::new(),
created: calendar_event.created.unwrap_or_else(chrono::Utc::now),
last_modified: calendar_event.last_modified.unwrap_or_else(chrono::Utc::now),
sequence: calendar_event.sequence,
timezone: calendar_event.start_tzid.clone(),
};
Ok(event)
}
/// Classify error type for reporting
fn classify_error(&self, error: &anyhow::Error) -> ImportErrorType {
let error_str = error.to_string().to_lowercase();
@ -356,6 +881,65 @@ impl ImportEngine {
ImportErrorType::Other
}
}
/// Fetch existing events from target calendar for the given date range
async fn fetch_existing_events(&self, client: &crate::minicaldav_client::RealCalDavClient, calendar_url: &str, start_date: chrono::DateTime<Utc>, end_date: chrono::DateTime<Utc>) -> Result<Vec<crate::minicaldav_client::CalendarEvent>> {
info!("Fetching existing events from target calendar: {} between {} and {}",
calendar_url,
start_date.format("%Y-%m-%d %H:%M:%S UTC"),
end_date.format("%Y-%m-%d %H:%M:%S UTC"));
match client.get_events(calendar_url, start_date, end_date).await {
Ok(events) => {
info!("Successfully fetched {} existing events", events.len());
Ok(events)
}
Err(e) => {
// If we get a 404, it means the calendar doesn't exist yet
if e.to_string().contains("404") {
warn!("Target calendar not found (404), assuming it's new: {}", calendar_url);
Ok(Vec::new())
} else {
Err(anyhow::anyhow!("Failed to fetch existing events: {}", e))
}
}
}
}
/// Process a single event using pre-fetched existing events data
async fn process_single_event_with_existing_data(&self, client: &crate::minicaldav_client::RealCalDavClient, calendar_url: &str, event: &Event, existing_events_by_uid: &std::collections::HashMap<String, crate::minicaldav_client::CalendarEvent>) -> Result<EventAction> {
debug!("Processing event: {} ({})", event.summary, event.uid);
debug!("Target calendar URL: {}", calendar_url);
// Check if event already exists on target using the pre-fetched data
match existing_events_by_uid.get(&event.uid) {
Some(existing_event) => {
debug!("Event already exists on target: {} ({})", event.uid, existing_event.etag.as_ref().unwrap_or(&"no ETag".to_string()));
// Convert CalendarEvent to Event for comparison
let existing_event_struct = self.calendar_event_to_event(existing_event)?;
if event.needs_update(&existing_event_struct) {
debug!("Event needs update: {}", event.summary);
// Update the event
self.update_event(client, calendar_url, event, existing_event.etag.as_deref()).await?;
Ok(EventAction::Updated)
} else {
debug!("Event is up to date: {}", event.summary);
Ok(EventAction::Skipped)
}
}
None => {
debug!("Event does not exist on target, creating: {}", event.uid);
// Create the event
self.create_event(client, calendar_url, event).await?;
Ok(EventAction::Created)
}
}
}
}
#[cfg(test)]

View file

@ -1,7 +1,7 @@
//! Synchronization engine for CalDAV calendars using real CalDAV implementation
use crate::{config::Config, minicaldav_client::RealCalDavClient, error::CalDavResult};
use chrono::{DateTime, Utc, Duration};
use chrono::{DateTime, Utc, Duration, Timelike, Datelike};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use tokio::time::sleep;
@ -64,6 +64,8 @@ pub struct SyncEvent {
pub source_calendar: String,
pub start_tzid: Option<String>,
pub end_tzid: Option<String>,
// NEW: RRULE support
pub recurrence: Option<crate::event::RecurrenceRule>,
}
/// Synchronization result
@ -234,7 +236,16 @@ impl SyncEngine {
// Get events for this calendar
match self.client.get_events(&calendar.url, start_date, end_date).await {
Ok(events) => {
info!("Found {} events in calendar: {}", events.len(), calendar.name);
info!("📊 Received {} events from calendar: {}", events.len(), calendar.name);
// Debug: Check if any events have recurrence
let recurring_in_batch = events.iter().filter(|e| e.recurrence.is_some()).count();
info!("📊 Recurring events in batch: {}", recurring_in_batch);
for (i, event) in events.iter().enumerate() {
if event.recurrence.is_some() {
info!("📊 Event #{} '{}' has recurrence: {:?}", i, event.summary, event.recurrence.is_some());
}
}
// Process events
for event in events {
@ -251,8 +262,15 @@ impl SyncEngine {
source_calendar: calendar.name.clone(),
start_tzid: event.start_tzid,
end_tzid: event.end_tzid,
// NEW: RRULE support
recurrence: event.recurrence,
};
// Debug: Check if key already exists (collision detection)
if self.local_events.contains_key(&event.id) {
tracing::warn!("⚠️ HashMap key collision: UID '{}' already exists in cache", event.id);
}
// Add to local cache
self.local_events.insert(event.id.clone(), sync_event);
total_events += 1;
@ -278,6 +296,86 @@ impl SyncEngine {
}
}
impl SyncEvent {
/// Expand recurring events into individual occurrences
pub fn expand_occurrences(&self, start_range: DateTime<Utc>, end_range: DateTime<Utc>) -> Vec<SyncEvent> {
// If this is not a recurring event, return just this event
if self.recurrence.is_none() {
return vec![self.clone()];
}
let mut occurrences = Vec::new();
let recurrence_rule = self.recurrence.as_ref().unwrap();
// For now, implement a very basic RRULE expansion using simple date arithmetic
let mut current_start = self.start;
let event_duration = self.end.signed_duration_since(self.start);
let mut occurrence_count = 0;
// Limit occurrences to prevent infinite loops
let max_occurrences = recurrence_rule.count().unwrap_or(1000).min(1000);
while current_start <= end_range && occurrence_count < max_occurrences {
// Check if we've reached the count limit
if let Some(count) = recurrence_rule.count() {
if occurrence_count >= count {
break;
}
}
// Check if we've reached the until limit
if let Some(until) = recurrence_rule.until() {
if current_start > until {
break;
}
}
// Check if this occurrence falls within our desired range
if current_start >= start_range && current_start <= end_range {
let mut occurrence = self.clone();
occurrence.start = current_start;
occurrence.end = current_start + event_duration;
// Create a unique ID for this occurrence
let occurrence_date = current_start.format("%Y%m%d").to_string();
// Include a hash of the original event details to ensure uniqueness across different recurring series
let series_identifier = format!("{:x}", md5::compute(format!("{}-{}", self.id, self.summary)));
occurrence.id = format!("{}-occurrence-{}-{}", series_identifier, occurrence_date, self.id);
// Clear the recurrence rule for individual occurrences
occurrence.recurrence = None;
occurrences.push(occurrence);
}
// Calculate next occurrence based on RRULE components
let interval = recurrence_rule.interval() as i64;
current_start = match recurrence_rule.frequency().to_lowercase().as_str() {
"daily" => current_start + chrono::Duration::days(interval),
"weekly" => current_start + chrono::Duration::weeks(interval),
"monthly" => add_months(current_start, interval as u32),
"yearly" => add_months(current_start, (interval * 12) as u32),
"hourly" => current_start + chrono::Duration::hours(interval),
"minutely" => current_start + chrono::Duration::minutes(interval),
"secondly" => current_start + chrono::Duration::seconds(interval),
_ => current_start + chrono::Duration::days(interval), // Default to daily
};
occurrence_count += 1;
}
tracing::info!(
"🔄 Expanded recurring SyncEvent '{}' to {} occurrences between {} and {}",
self.summary,
occurrences.len(),
start_range.format("%Y-%m-%d"),
end_range.format("%Y-%m-%d")
);
occurrences
}
}
impl Default for SyncState {
fn default() -> Self {
Self {
@ -288,3 +386,55 @@ impl Default for SyncState {
}
}
}
/// Add months to a DateTime (approximate handling)
fn add_months(dt: DateTime<Utc>, months: u32) -> DateTime<Utc> {
let naive_date = dt.naive_utc();
let year = naive_date.year();
let month = naive_date.month() as i32 + months as i32;
let new_year = year + (month - 1) / 12;
let new_month = ((month - 1) % 12) + 1;
// Keep the same day if possible, otherwise use the last day of the month
let day = naive_date.day().min(days_in_month(new_year as i32, new_month as u32));
// Try to create the new date with the same time, fallback to first day of month if invalid
if let Some(new_naive_date) = chrono::NaiveDate::from_ymd_opt(new_year, new_month as u32, day) {
if let Some(new_naive_dt) = new_naive_date.and_hms_opt(naive_date.hour(), naive_date.minute(), naive_date.second()) {
return DateTime::from_naive_utc_and_offset(new_naive_dt, Utc);
}
}
// Fallback: use first day of the month with the same time
if let Some(new_naive_date) = chrono::NaiveDate::from_ymd_opt(new_year, new_month as u32, 1) {
if let Some(new_naive_dt) = new_naive_date.and_hms_opt(naive_date.hour(), naive_date.minute(), naive_date.second()) {
return DateTime::from_naive_utc_and_offset(new_naive_dt, Utc);
}
}
// Ultimate fallback: use start of the month
if let Some(new_naive_date) = chrono::NaiveDate::from_ymd_opt(new_year, new_month as u32, 1) {
if let Some(new_naive_dt) = new_naive_date.and_hms_opt(0, 0, 0) {
return DateTime::from_naive_utc_and_offset(new_naive_dt, Utc);
}
}
// If all else fails, return the original date
dt
}
/// Get the number of days in a month
fn days_in_month(year: i32, month: u32) -> u32 {
match month {
1 | 3 | 5 | 7 | 8 | 10 | 12 => 31,
4 | 6 | 9 | 11 => 30,
2 => {
if (year % 4 == 0 && year % 100 != 0) || (year % 400 == 0) {
29
} else {
28
}
}
_ => 30, // Should never happen
}
}

177
src/test_recurrence.rs Normal file
View file

@ -0,0 +1,177 @@
//! Test module for recurrence rule termination handling
#[cfg(test)]
mod tests {
use crate::event::{Event, RecurrenceRule, EventStatus, EventType};
use chrono::{Utc, Duration};
#[test]
fn test_count_termination() {
// Create a daily recurring event with COUNT=5
let base_time = Utc::now();
let event = Event {
uid: "test-count".to_string(),
summary: "Test Count Event".to_string(),
description: None,
start: base_time,
end: base_time + Duration::hours(1),
all_day: false,
location: None,
status: EventStatus::Confirmed,
event_type: EventType::Public,
organizer: None,
attendees: Vec::new(),
recurrence: Some(RecurrenceRule::from_str("FREQ=DAILY;COUNT=5").unwrap()),
alarms: Vec::new(),
properties: std::collections::HashMap::new(),
created: base_time,
last_modified: base_time,
sequence: 0,
timezone: None,
};
// Test expansion with a wide time range
let start_range = base_time - Duration::days(30);
let end_range = base_time + Duration::days(30);
let occurrences = event.expand_occurrences(start_range, end_range);
// Should have exactly 5 occurrences due to COUNT=5
assert_eq!(occurrences.len(), 5, "COUNT=5 should generate exactly 5 occurrences");
println!("✅ COUNT termination test passed: {} occurrences generated", occurrences.len());
}
#[test]
fn test_until_termination() {
// Create a weekly recurring event with UNTIL
let base_time = Utc::now();
let until_date = base_time + Duration::days(21); // 3 weeks from now
let rrule_str = format!("FREQ=WEEKLY;UNTIL={}", until_date.format("%Y%m%dT%H%M%SZ"));
let event = Event {
uid: "test-until".to_string(),
summary: "Test Until Event".to_string(),
description: None,
start: base_time,
end: base_time + Duration::hours(1),
all_day: false,
location: None,
status: EventStatus::Confirmed,
event_type: EventType::Public,
organizer: None,
attendees: Vec::new(),
recurrence: Some(RecurrenceRule::from_str(&rrule_str).unwrap()),
alarms: Vec::new(),
properties: std::collections::HashMap::new(),
created: base_time,
last_modified: base_time,
sequence: 0,
timezone: None,
};
// Test expansion with a wide time range
let start_range = base_time - Duration::days(30);
let end_range = base_time + Duration::days(60); // Beyond UNTIL date
let occurrences = event.expand_occurrences(start_range, end_range);
// Should have occurrences up to but not beyond the UNTIL date
// With weekly frequency and 3 weeks until date, should have 3-4 occurrences
assert!(occurrences.len() >= 3 && occurrences.len() <= 4,
"WEEKLY with UNTIL=3weeks should generate 3-4 occurrences, got {}", occurrences.len());
// Check that no occurrence exceeds the UNTIL date
for occurrence in &occurrences {
assert!(occurrence.start <= until_date,
"Occurrence start {} should not exceed UNTIL date {}",
occurrence.start, until_date);
}
println!("✅ UNTIL termination test passed: {} occurrences generated, all before UNTIL date", occurrences.len());
}
#[test]
fn test_time_bounded_expansion() {
// Create a daily recurring event with no termination
let base_time = Utc::now();
let event = Event {
uid: "test-bounded".to_string(),
summary: "Test Time Bounded Event".to_string(),
description: None,
start: base_time,
end: base_time + Duration::hours(1),
all_day: false,
location: None,
status: EventStatus::Confirmed,
event_type: EventType::Public,
organizer: None,
attendees: Vec::new(),
recurrence: Some(RecurrenceRule::from_str("FREQ=DAILY").unwrap()),
alarms: Vec::new(),
properties: std::collections::HashMap::new(),
created: base_time,
last_modified: base_time,
sequence: 0,
timezone: None,
};
// Test with 30-day time window
let start_range = base_time - Duration::days(30);
let end_range = base_time + Duration::days(30);
let occurrences = event.expand_occurrences(start_range, end_range);
// Should have approximately 60-61 occurrences (30 days past + 30 days future + today)
assert!(occurrences.len() >= 60 && occurrences.len() <= 61,
"Time-bounded expansion should generate ~61 occurrences, got {}", occurrences.len());
// Check that all occurrences are within the time range
for occurrence in &occurrences {
assert!(occurrence.start >= start_range,
"Occurrence start {} should not be before start range {}",
occurrence.start, start_range);
assert!(occurrence.start <= end_range,
"Occurrence start {} should not be after end range {}",
occurrence.start, end_range);
}
println!("✅ Time-bounded expansion test passed: {} occurrences generated within 30-day window", occurrences.len());
}
#[test]
fn test_complex_rrule() {
// Test a more complex RRULE with multiple parameters
let base_time = Utc::now();
let event = Event {
uid: "test-complex".to_string(),
summary: "Test Complex Event".to_string(),
description: None,
start: base_time,
end: base_time + Duration::hours(1),
all_day: false,
location: None,
status: EventStatus::Confirmed,
event_type: EventType::Public,
organizer: None,
attendees: Vec::new(),
recurrence: Some(RecurrenceRule::from_str("FREQ=WEEKLY;INTERVAL=2;BYDAY=MO,WE,FR;COUNT=6").unwrap()),
alarms: Vec::new(),
properties: std::collections::HashMap::new(),
created: base_time,
last_modified: base_time,
sequence: 0,
timezone: None,
};
let start_range = base_time - Duration::days(30);
let end_range = base_time + Duration::days(60);
let occurrences = event.expand_occurrences(start_range, end_range);
// Should have exactly 6 occurrences due to COUNT=6
assert_eq!(occurrences.len(), 6, "COUNT=6 should generate exactly 6 occurrences");
println!("✅ Complex RRULE test passed: {} occurrences generated for biweekly Mon/Wed/Fri", occurrences.len());
}
}

31
test_rrule.rs Normal file
View file

@ -0,0 +1,31 @@
use rrule::{RRuleSet};
use chrono::{DateTime, Utc};
fn main() {
let rrule_str = "FREQ=WEEKLY;BYDAY=MO,WE,FR;COUNT=10";
println!("Testing RRULE: {}", rrule_str);
// Test different approaches
match RRuleSet::from_str(rrule_str) {
Ok(rrule_set) => {
println!("Successfully parsed RRULE");
// Check available methods
let start = Utc::now();
let end = start + chrono::Duration::days(30);
// Try the between method
match rrule_set.between(start, end, true) {
Ok(occurrences) => {
println!("Found {} occurrences", occurrences.len());
}
Err(e) => {
println!("Error calling between: {}", e);
}
}
}
Err(e) => {
println!("Error parsing RRULE: {}", e);
}
}
}

22
test_timezone.rs Normal file
View file

@ -0,0 +1,22 @@
use chrono::{DateTime, Utc, NaiveDateTime};
fn main() {
let start = DateTime::from_naive_utc_and_offset(
NaiveDateTime::parse_from_str("20231225T083000", "%Y%m%dT%H%M%S").unwrap(),
Utc
);
let end = start + chrono::Duration::minutes(30);
let mut event = caldav_sync::event::Event::new("Tether Sync".to_string(), start, end);
event.timezone = Some("America/Toronto".to_string());
let ical = event.to_ical().unwrap();
println!("=== Event with Timezone (America/Toronto) ===");
println!("{}", ical);
println!("\n");
let utc_event = caldav_sync::event::Event::new("UTC Event".to_string(), start, end);
let ical_utc = utc_event.to_ical().unwrap();
println!("=== Event without Timezone (fallback to UTC) ===");
println!("{}", ical_utc);
}

View file

@ -225,6 +225,277 @@ mod filter_tests {
}
}
#[cfg(test)]
mod live_caldav_tests {
use caldav_sync::Config;
use caldav_sync::minicaldav_client::RealCalDavClient;
use caldav_sync::event::Event;
use chrono::{DateTime, Utc, Duration};
use tokio;
use std::path::PathBuf;
/// Test basic CRUD operations on the import calendar using the test configuration
#[tokio::test]
async fn test_create_update_delete_event() -> Result<(), Box<dyn std::error::Error>> {
println!("🧪 Starting CRUD test with import calendar...");
// Load test configuration
let config_path = PathBuf::from("config-test-import.toml");
let config = Config::from_file(&config_path)?;
// Validate configuration
config.validate()?;
// Create CalDAV client for target server (Nextcloud)
let import_config = config.get_import_config().ok_or("No import configuration found")?;
let target_client = RealCalDavClient::new(
&import_config.target_server.url,
&import_config.target_server.username,
&import_config.target_server.password,
).await?;
// Build target calendar URL
let target_calendar_url = format!("{}/", import_config.target_server.url.trim_end_matches('/'));
// Validate target calendar
let is_valid = target_client.validate_target_calendar(&target_calendar_url).await?;
assert!(is_valid, "Target calendar should be accessible");
println!("✅ Target calendar is accessible");
// Create test event for today
let now = Utc::now();
let today_start = now.date_naive().and_hms_opt(10, 0, 0).unwrap().and_utc();
let today_end = today_start + Duration::hours(1);
let test_uid = format!("test-event-{}", now.timestamp());
let mut test_event = Event::new(
format!("Test Event {}", test_uid),
today_start,
today_end,
);
test_event.uid = test_uid.clone();
test_event.description = Some("This is a test event for CRUD operations".to_string());
test_event.location = Some("Test Location".to_string());
println!("📝 Creating test event: {}", test_event.summary);
// Convert event to iCalendar format
let ical_data = test_event.to_ical()?;
// Test 1: Create event
let create_result = target_client.put_event(
&target_calendar_url,
&test_uid,
&ical_data,
None // No ETag for creation
).await;
match create_result {
Ok(_) => println!("✅ Event created successfully"),
Err(e) => {
println!("❌ Failed to create event: {}", e);
return Err(e.into());
}
}
// Wait a moment to ensure the event is processed
tokio::time::sleep(tokio::time::Duration::from_millis(500)).await;
// Test 2: Verify event exists
println!("🔍 Verifying event exists...");
let etag_result = target_client.get_event_etag(&target_calendar_url, &test_uid).await;
let original_etag = match etag_result {
Ok(Some(etag)) => {
println!("✅ Event verified, ETag: {}", etag);
etag
}
Ok(None) => {
println!("❌ Event not found after creation");
return Err("Event not found after creation".into());
}
Err(e) => {
println!("❌ Failed to verify event: {}", e);
return Err(e.into());
}
}
// Test 3: Update event (change date to tomorrow)
println!("📝 Updating event for tomorrow...");
let tomorrow_start = today_start + Duration::days(1);
let tomorrow_end = tomorrow_start + Duration::hours(1);
test_event.start = tomorrow_start;
test_event.end = tomorrow_end;
test_event.summary = format!("Test Event {} (Updated for Tomorrow)", test_uid);
test_event.description = Some("This event has been updated to tomorrow".to_string());
test_event.sequence += 1; // Increment sequence for update
// Convert updated event to iCalendar format
let updated_ical_data = test_event.to_ical()?;
let update_result = target_client.put_event(
&target_calendar_url,
&test_uid,
&updated_ical_data,
Some(&original_etag) // Use ETag for update
).await;
match update_result {
Ok(_) => println!("✅ Event updated successfully"),
Err(e) => {
println!("❌ Failed to update event: {}", e);
return Err(e.into());
}
}
// Wait a moment to ensure the update is processed
tokio::time::sleep(tokio::time::Duration::from_millis(500)).await;
// Test 4: Verify event was updated (ETag should change)
println!("🔍 Verifying event update...");
let new_etag_result = target_client.get_event_etag(&target_calendar_url, &test_uid).await;
match new_etag_result {
Ok(Some(new_etag)) => {
if new_etag != original_etag {
println!("✅ Event updated, new ETag: {}", new_etag);
} else {
println!("⚠️ Event ETag didn't change after update");
}
}
Ok(None) => {
println!("❌ Event not found after update");
return Err("Event not found after update".into());
}
Err(e) => {
println!("❌ Failed to verify updated event: {}", e);
return Err(e.into());
}
}
// Test 5: Delete event
println!("🗑️ Deleting event...");
let delete_result = target_client.delete_event(
&target_calendar_url,
&test_uid,
None // No ETag for deletion (let server handle it)
).await;
match delete_result {
Ok(_) => println!("✅ Event deleted successfully"),
Err(e) => {
println!("❌ Failed to delete event: {}", e);
return Err(e.into());
}
}
// Wait a moment to ensure the deletion is processed
tokio::time::sleep(tokio::time::Duration::from_millis(500)).await;
// Test 6: Verify event was deleted
println!("🔍 Verifying event deletion...");
let final_check = target_client.get_event_etag(&target_calendar_url, &test_uid).await;
match final_check {
Ok(None) => println!("✅ Event successfully deleted"),
Ok(Some(etag)) => {
println!("❌ Event still exists after deletion, ETag: {}", etag);
return Err("Event still exists after deletion".into());
}
Err(e) => {
println!("❌ Failed to verify deletion: {}", e);
return Err(e.into());
}
}
println!("🎉 All CRUD operations completed successfully!");
Ok(())
}
/// Test HTTP error handling by attempting to delete a non-existent event
#[tokio::test]
async fn test_delete_nonexistent_event() -> Result<(), Box<dyn std::error::Error>> {
println!("🧪 Testing deletion of non-existent event...");
// Load test configuration
let config_path = PathBuf::from("config-test-import.toml");
let config = Config::from_file(&config_path)?;
// Create CalDAV client for target server
let import_config = config.get_import_config().ok_or("No import configuration found")?;
let target_client = RealCalDavClient::new(
&import_config.target_server.url,
&import_config.target_server.username,
&import_config.target_server.password,
).await?;
// Build target calendar URL
let target_calendar_url = format!("{}/", import_config.target_server.url.trim_end_matches('/'));
// Try to delete a non-existent event
let fake_uid = "non-existent-event-12345";
println!("🗑️ Testing deletion of non-existent event: {}", fake_uid);
let delete_result = target_client.delete_event(
&target_calendar_url,
fake_uid,
None
).await;
match delete_result {
Ok(_) => {
println!("✅ Non-existent event deletion handled gracefully (idempotent)");
Ok(())
}
Err(e) => {
println!("❌ Failed to handle non-existent event deletion gracefully: {}", e);
Err(e.into())
}
}
}
/// Test event existence checking
#[tokio::test]
async fn test_event_existence_check() -> Result<(), Box<dyn std::error::Error>> {
println!("🧪 Testing event existence check...");
// Load test configuration
let config_path = PathBuf::from("config-test-import.toml");
let config = Config::from_file(&config_path)?;
// Create CalDAV client for target server
let import_config = config.get_import_config().ok_or("No import configuration found")?;
let target_client = RealCalDavClient::new(
&import_config.target_server.url,
&import_config.target_server.username,
&import_config.target_server.password,
).await?;
// Build target calendar URL
let target_calendar_url = format!("{}/", import_config.target_server.url.trim_end_matches('/'));
// Test non-existent event
let fake_uid = "non-existent-event-67890";
let fake_event_url = format!("{}{}.ics", target_calendar_url, fake_uid);
println!("🔍 Testing existence check for non-existent event: {}", fake_uid);
let existence_result = target_client.check_event_exists(&fake_event_url).await;
match existence_result {
Ok(_) => {
println!("❌ Non-existent event reported as existing");
Err("Non-existent event reported as existing".into())
}
Err(e) => {
println!("✅ Non-existent event correctly reported as missing: {}", e);
Ok(())
}
}
}
}
#[cfg(test)]
mod integration_tests {
use super::*;

274
tests/live_caldav_test.rs Normal file
View file

@ -0,0 +1,274 @@
use caldav_sync::Config;
use caldav_sync::minicaldav_client::RealCalDavClient;
use caldav_sync::event::Event;
use chrono::{DateTime, Utc, Duration};
use tokio;
use std::path::PathBuf;
/// Test basic CRUD operations on the import calendar using the test configuration
#[tokio::test]
async fn test_create_update_delete_event() -> Result<(), Box<dyn std::error::Error>> {
println!("🧪 Starting CRUD test with import calendar...");
// Load test configuration
let config_path = PathBuf::from("config-test-import.toml");
let config = Config::from_file(&config_path)?;
// Validate configuration
config.validate()?;
// Create CalDAV client for target server (Nextcloud)
let import_config = config.get_import_config().ok_or("No import configuration found")?;
let target_client = RealCalDavClient::new(
&import_config.target_server.url,
&import_config.target_server.username,
&import_config.target_server.password,
).await?;
// Build target calendar URL
let target_calendar_url = format!("{}/", import_config.target_server.url.trim_end_matches('/'));
// Validate target calendar
let is_valid = target_client.validate_target_calendar(&target_calendar_url).await?;
assert!(is_valid, "Target calendar should be accessible");
println!("✅ Target calendar is accessible");
// Create test event for today
let now = Utc::now();
let today_start = now.date_naive().and_hms_opt(10, 0, 0).unwrap().and_utc();
let today_end = today_start + Duration::hours(1);
let test_uid = format!("test-event-{}", now.timestamp());
let mut test_event = Event::new(
format!("Test Event {}", test_uid),
today_start,
today_end,
);
test_event.uid = test_uid.clone();
test_event.description = Some("This is a test event for CRUD operations".to_string());
test_event.location = Some("Test Location".to_string());
println!("📝 Creating test event: {}", test_event.summary);
// Convert event to iCalendar format
let ical_data = test_event.to_ical()?;
// Test 1: Create event
let create_result = target_client.put_event(
&target_calendar_url,
&test_uid,
&ical_data,
None // No ETag for creation
).await;
match create_result {
Ok(_) => println!("✅ Event created successfully"),
Err(e) => {
println!("❌ Failed to create event: {}", e);
return Err(e.into());
}
}
// Wait a moment to ensure the event is processed
tokio::time::sleep(tokio::time::Duration::from_millis(500)).await;
// Test 2: Verify event exists
println!("🔍 Verifying event exists...");
let etag_result = target_client.get_event_etag(&target_calendar_url, &test_uid).await;
let original_etag = match etag_result {
Ok(Some(etag)) => {
println!("✅ Event verified, ETag: {}", etag);
etag
}
Ok(None) => {
println!("❌ Event not found after creation");
return Err("Event not found after creation".into());
}
Err(e) => {
println!("❌ Failed to verify event: {}", e);
return Err(e.into());
}
};
// Test 3: Update event (change date to tomorrow)
println!("📝 Updating event for tomorrow...");
let tomorrow_start = today_start + Duration::days(1);
let tomorrow_end = tomorrow_start + Duration::hours(1);
test_event.start = tomorrow_start;
test_event.end = tomorrow_end;
test_event.summary = format!("Test Event {} (Updated for Tomorrow)", test_uid);
test_event.description = Some("This event has been updated to tomorrow".to_string());
test_event.sequence += 1; // Increment sequence for update
// Convert updated event to iCalendar format
let updated_ical_data = test_event.to_ical()?;
let update_result = target_client.put_event(
&target_calendar_url,
&test_uid,
&updated_ical_data,
Some(&original_etag) // Use ETag for update
).await;
match update_result {
Ok(_) => println!("✅ Event updated successfully"),
Err(e) => {
println!("❌ Failed to update event: {}", e);
return Err(e.into());
}
}
// Wait a moment to ensure the update is processed
tokio::time::sleep(tokio::time::Duration::from_millis(500)).await;
// Test 4: Verify event was updated (ETag should change)
println!("🔍 Verifying event update...");
let new_etag_result = target_client.get_event_etag(&target_calendar_url, &test_uid).await;
match new_etag_result {
Ok(Some(new_etag)) => {
if new_etag != original_etag {
println!("✅ Event updated, new ETag: {}", new_etag);
} else {
println!("⚠️ Event ETag didn't change after update");
}
}
Ok(None) => {
println!("❌ Event not found after update");
return Err("Event not found after update".into());
}
Err(e) => {
println!("❌ Failed to verify updated event: {}", e);
return Err(e.into());
}
}
// Test 5: Delete event
println!("🗑️ Deleting event...");
let delete_result = target_client.delete_event(
&target_calendar_url,
&test_uid,
None // No ETag for deletion (let server handle it)
).await;
match delete_result {
Ok(_) => println!("✅ Event deleted successfully"),
Err(e) => {
println!("❌ Failed to delete event: {}", e);
return Err(e.into());
}
}
// Wait a moment to ensure the deletion is processed
tokio::time::sleep(tokio::time::Duration::from_millis(500)).await;
// Test 6: Verify event was deleted
println!("🔍 Verifying event deletion...");
let final_check = target_client.get_event_etag(&target_calendar_url, &test_uid).await;
match final_check {
Ok(None) => {
println!("✅ Event successfully deleted");
}
Ok(Some(etag)) => {
println!("❌ Event still exists after deletion, ETag: {}", etag);
return Err("Event still exists after deletion".into());
}
Err(e) => {
// Check if it's a 404 error, which indicates successful deletion
if e.to_string().contains("404") || e.to_string().contains("Not Found") {
println!("✅ Event successfully deleted (confirmed by 404)");
} else {
println!("❌ Failed to verify deletion: {}", e);
return Err(e.into());
}
}
}
println!("🎉 All CRUD operations completed successfully!");
Ok(())
}
/// Test HTTP error handling by attempting to delete a non-existent event
#[tokio::test]
async fn test_delete_nonexistent_event() -> Result<(), Box<dyn std::error::Error>> {
println!("🧪 Testing deletion of non-existent event...");
// Load test configuration
let config_path = PathBuf::from("config-test-import.toml");
let config = Config::from_file(&config_path)?;
// Create CalDAV client for target server
let import_config = config.get_import_config().ok_or("No import configuration found")?;
let target_client = RealCalDavClient::new(
&import_config.target_server.url,
&import_config.target_server.username,
&import_config.target_server.password,
).await?;
// Build target calendar URL
let target_calendar_url = format!("{}/", import_config.target_server.url.trim_end_matches('/'));
// Try to delete a non-existent event
let fake_uid = "non-existent-event-12345";
println!("🗑️ Testing deletion of non-existent event: {}", fake_uid);
let delete_result = target_client.delete_event(
&target_calendar_url,
fake_uid,
None
).await;
match delete_result {
Ok(_) => {
println!("✅ Non-existent event deletion handled gracefully (idempotent)");
Ok(())
}
Err(e) => {
println!("❌ Failed to handle non-existent event deletion gracefully: {}", e);
Err(e.into())
}
}
}
/// Test event existence checking
#[tokio::test]
async fn test_event_existence_check() -> Result<(), Box<dyn std::error::Error>> {
println!("🧪 Testing event existence check...");
// Load test configuration
let config_path = PathBuf::from("config-test-import.toml");
let config = Config::from_file(&config_path)?;
// Create CalDAV client for target server
let import_config = config.get_import_config().ok_or("No import configuration found")?;
let target_client = RealCalDavClient::new(
&import_config.target_server.url,
&import_config.target_server.username,
&import_config.target_server.password,
).await?;
// Build target calendar URL
let target_calendar_url = format!("{}/", import_config.target_server.url.trim_end_matches('/'));
// Test non-existent event
let fake_uid = "non-existent-event-67890";
let fake_event_url = format!("{}{}.ics", target_calendar_url, fake_uid);
println!("🔍 Testing existence check for non-existent event: {}", fake_uid);
let existence_result = target_client.check_event_exists(&fake_event_url).await;
match existence_result {
Ok(_) => {
println!("❌ Non-existent event reported as existing");
Err("Non-existent event reported as existing".into())
}
Err(e) => {
println!("✅ Non-existent event correctly reported as missing: {}", e);
Ok(())
}
}
}