mirror of
https://github.com/spacedriveapp/spacedrive.git
synced 2025-12-11 20:15:30 +01:00
refactor: Update Swift type generation and remove obsolete test files
- Modified the Swift type generation script to include a new RustDuration struct for decoding Rust Duration format, enhancing type safety. - Updated the generated Swift types to reflect changes in duration handling and improved documentation. - Removed obsolete test files related to enum variants and event decoding, streamlining the codebase and focusing on relevant functionality.
This commit is contained in:
parent
f599f1b86c
commit
22339d82ee
@ -57,7 +57,7 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
|
||||
// Configure Swift generation
|
||||
let swift = Swift::new()
|
||||
.header("// Generated by Spacedrive using Specta - DO NOT EDIT")
|
||||
.header("// Generated by Spacedrive using Specta - DO NOT EDIT\nimport Foundation\n\n// MARK: - Duration Helper\n/// Helper struct to decode Rust Duration format {\"secs\": u64, \"nanos\": u32}\npublic struct RustDuration: Codable {\n public let secs: UInt64\n public let nanos: UInt32\n \n public var timeInterval: TimeInterval {\n return Double(secs) + Double(nanos) / 1_000_000_000.0\n }\n}\n\n// MARK: - Generated Types")
|
||||
.naming(specta_swift::NamingConvention::PascalCase) // Swift type naming convention
|
||||
.optionals(specta_swift::OptionalStyle::QuestionMark); // Use Swift ? syntax
|
||||
|
||||
|
||||
@ -1,38 +0,0 @@
|
||||
//! Test enum variant serialization formats
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use specta::{Type, TypeCollection};
|
||||
use specta_swift::Swift;
|
||||
|
||||
#[derive(Type, Serialize, Deserialize)]
|
||||
enum TestEvent {
|
||||
// Unit variant
|
||||
Started,
|
||||
// Tuple variant
|
||||
Progress(f64, String),
|
||||
// Struct variant - this is what our Event enum uses
|
||||
JobStarted { job_id: String, job_type: String },
|
||||
}
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
println!("🧪 Testing enum variant formats...");
|
||||
|
||||
// Test serialization
|
||||
let event = TestEvent::JobStarted {
|
||||
job_id: "test-123".to_string(),
|
||||
job_type: "Indexing".to_string(),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string_pretty(&event)?;
|
||||
println!("📄 Rust serializes struct variant as:\n{}", json);
|
||||
|
||||
// Generate Swift types
|
||||
let types = TypeCollection::default().register::<TestEvent>();
|
||||
|
||||
let swift = Swift::new().naming(specta_swift::NamingConvention::PascalCase);
|
||||
|
||||
let output = swift.export(&types)?;
|
||||
println!("📄 Swift enum generated as:\n{}", output);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -1,46 +0,0 @@
|
||||
//! Test Event decoding to debug the Swift issue
|
||||
|
||||
use sd_core::infra::event::Event;
|
||||
use serde_json;
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
println!("🧪 Testing Event decoding...");
|
||||
|
||||
// Create the exact event that's failing
|
||||
let event = Event::JobStarted {
|
||||
job_id: "8525ff04-3025-409a-a98f-e94737bd94d4".to_string(),
|
||||
job_type: "Indexing".to_string(),
|
||||
};
|
||||
|
||||
// Serialize just the inner event (what Swift should receive)
|
||||
let inner_json = serde_json::to_string_pretty(&event)?;
|
||||
println!(
|
||||
"📄 Inner event JSON (what Swift Event decoder should get):\n{}",
|
||||
inner_json
|
||||
);
|
||||
|
||||
// Test if we can deserialize it back
|
||||
let decoded: Event = serde_json::from_str(&inner_json)?;
|
||||
println!("✅ Successfully decoded inner event: {:?}", decoded);
|
||||
|
||||
// Now test the wrapped format (what daemon actually sends)
|
||||
let wrapped_json = serde_json::json!({
|
||||
"Event": event
|
||||
});
|
||||
let wrapped_str = serde_json::to_string_pretty(&wrapped_json)?;
|
||||
println!(
|
||||
"📄 Wrapped event JSON (what daemon sends):\n{}",
|
||||
wrapped_str
|
||||
);
|
||||
|
||||
// Test extracting the inner event from the wrapper
|
||||
if let Some(inner_value) = wrapped_json.get("Event") {
|
||||
let inner_event: Event = serde_json::from_value(inner_value.clone())?;
|
||||
println!(
|
||||
"✅ Successfully extracted and decoded inner event from wrapper: {:?}",
|
||||
inner_event
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -1,28 +0,0 @@
|
||||
//! Test Event serialization to see the exact format
|
||||
|
||||
use sd_core::infra::event::Event;
|
||||
use serde_json;
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
println!("🧪 Testing Event serialization format...");
|
||||
|
||||
// Create a sample JobStarted event like the daemon would
|
||||
let event = Event::JobStarted {
|
||||
job_id: "test-job-123".to_string(),
|
||||
job_type: "Indexing".to_string(),
|
||||
};
|
||||
|
||||
// Serialize it to see the exact JSON format
|
||||
let json = serde_json::to_string_pretty(&event)?;
|
||||
println!("📄 JobStarted event JSON:\n{}", json);
|
||||
|
||||
// This is what the daemon sends (wrapped in DaemonResponse::Event)
|
||||
let daemon_response = serde_json::json!({
|
||||
"Event": event
|
||||
});
|
||||
|
||||
let daemon_json = serde_json::to_string_pretty(&daemon_response)?;
|
||||
println!("📄 Daemon response JSON:\n{}", daemon_json);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -1,30 +0,0 @@
|
||||
//! Test field naming conversion in Specta
|
||||
|
||||
use specta::{Type, TypeCollection};
|
||||
use specta_swift::Swift;
|
||||
|
||||
#[derive(Type)]
|
||||
struct TestStruct {
|
||||
snake_case_field: String,
|
||||
another_field_name: u32,
|
||||
}
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
println!("🧪 Testing field naming conversion...");
|
||||
|
||||
let types = TypeCollection::default().register::<TestStruct>();
|
||||
|
||||
let swift = Swift::new().naming(specta_swift::NamingConvention::PascalCase);
|
||||
|
||||
let output = swift.export(&types)?;
|
||||
println!("📄 Generated Swift:\n{}", output);
|
||||
|
||||
// Check if snake_case_field becomes snakeCaseField
|
||||
if output.contains("snakeCaseField") {
|
||||
println!("✅ Field naming conversion is working");
|
||||
} else {
|
||||
println!("❌ Field naming conversion is NOT working");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -46,14 +46,6 @@ public class SpacedriveClient {
|
||||
|
||||
// 4. Handle response
|
||||
switch response {
|
||||
case .ok(let data):
|
||||
print("🔍 Query successful (bincode), decoding \(data.count) bytes")
|
||||
do {
|
||||
return try JSONDecoder().decode(responseType, from: data)
|
||||
} catch {
|
||||
print("❌ Query decode error: \(error)")
|
||||
throw SpacedriveError.serializationError("Failed to decode response: \(error)")
|
||||
}
|
||||
case .jsonOk(let jsonData):
|
||||
print("🔍 Query successful (JSON), decoding response")
|
||||
do {
|
||||
@ -66,7 +58,7 @@ public class SpacedriveClient {
|
||||
case .error(let error):
|
||||
print("❌ Query daemon error: \(error)")
|
||||
throw SpacedriveError.daemonError(error)
|
||||
case .pong, .event, .subscribed, .unsubscribed, .jsonOk:
|
||||
case .pong, .event, .subscribed, .unsubscribed:
|
||||
print("❌ Query unexpected response: \(response)")
|
||||
throw SpacedriveError.invalidResponse("Unexpected response to query")
|
||||
}
|
||||
@ -100,12 +92,6 @@ public class SpacedriveClient {
|
||||
|
||||
// 4. Handle response
|
||||
switch response {
|
||||
case .ok(let data):
|
||||
do {
|
||||
return try JSONDecoder().decode(responseType, from: data)
|
||||
} catch {
|
||||
throw SpacedriveError.serializationError("Failed to decode response: \(error)")
|
||||
}
|
||||
case .jsonOk(let jsonData):
|
||||
do {
|
||||
let jsonResponseData = try JSONSerialization.data(withJSONObject: jsonData.value)
|
||||
@ -115,7 +101,7 @@ public class SpacedriveClient {
|
||||
}
|
||||
case .error(let error):
|
||||
throw SpacedriveError.daemonError(error)
|
||||
case .pong, .event, .subscribed, .unsubscribed, .jsonOk:
|
||||
case .pong, .event, .subscribed, .unsubscribed:
|
||||
throw SpacedriveError.invalidResponse("Unexpected response to action")
|
||||
}
|
||||
}
|
||||
@ -248,14 +234,6 @@ public class SpacedriveClient {
|
||||
|
||||
case .shutdown:
|
||||
requestData = Data("\"Shutdown\"".utf8)
|
||||
|
||||
case .action(let method, let payload):
|
||||
let actionRequest = ActionRequest(method: method, payload: payload.base64EncodedString())
|
||||
requestData = try JSONEncoder().encode(["Action": actionRequest])
|
||||
|
||||
case .query(let method, let payload):
|
||||
let queryRequest = QueryRequest(method: method, payload: payload.base64EncodedString())
|
||||
requestData = try JSONEncoder().encode(["Query": queryRequest])
|
||||
}
|
||||
|
||||
let requestLine = requestData + Data("\n".utf8)
|
||||
@ -389,8 +367,6 @@ public class SpacedriveClient {
|
||||
/// Request types that match the Rust daemon protocol
|
||||
internal enum DaemonRequest {
|
||||
case ping
|
||||
case action(method: String, payload: Data)
|
||||
case query(method: String, payload: Data)
|
||||
case jsonAction(method: String, payload: [String: Any])
|
||||
case jsonQuery(method: String, payload: [String: Any])
|
||||
case subscribe(eventTypes: [String], filter: EventFilter?)
|
||||
@ -399,15 +375,6 @@ internal enum DaemonRequest {
|
||||
}
|
||||
|
||||
/// Helper structs for proper JSON encoding
|
||||
private struct ActionRequest: Codable {
|
||||
let method: String
|
||||
let payload: String
|
||||
}
|
||||
|
||||
private struct QueryRequest: Codable {
|
||||
let method: String
|
||||
let payload: String
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -419,7 +386,6 @@ private struct SubscribeRequest: Codable {
|
||||
/// Response types that match the Rust daemon protocol
|
||||
internal enum DaemonResponse: Codable {
|
||||
case pong
|
||||
case ok(Data)
|
||||
case jsonOk(AnyCodable)
|
||||
case error(String)
|
||||
case event(Event)
|
||||
@ -449,10 +415,7 @@ internal enum DaemonResponse: Codable {
|
||||
// Try to decode as an object with variants
|
||||
let variantContainer = try decoder.container(keyedBy: VariantKeys.self)
|
||||
|
||||
if variantContainer.contains(.ok) {
|
||||
let okData = try variantContainer.decode([UInt8].self, forKey: .ok)
|
||||
self = .ok(Data(okData))
|
||||
} else if variantContainer.contains(.jsonOk) {
|
||||
if variantContainer.contains(.jsonOk) {
|
||||
// JsonOk contains a JSON value that we need to decode manually
|
||||
let jsonValue = try variantContainer.decode(AnyCodable.self, forKey: .jsonOk)
|
||||
self = .jsonOk(jsonValue)
|
||||
@ -472,7 +435,6 @@ internal enum DaemonResponse: Codable {
|
||||
}
|
||||
|
||||
enum VariantKeys: String, CodingKey {
|
||||
case ok = "Ok"
|
||||
case jsonOk = "JsonOk"
|
||||
case error = "Error"
|
||||
case event = "Event"
|
||||
@ -553,25 +515,6 @@ internal struct AnyCodable: Codable {
|
||||
// MARK: - Convenience Methods
|
||||
|
||||
extension SpacedriveClient {
|
||||
/// Get core status - demonstrates real type-safe API usage
|
||||
/// Once types.swift is generated, this can use the actual OutputProperties type
|
||||
public func getCoreStatus() async throws -> Data {
|
||||
struct EmptyQuery: Codable {}
|
||||
|
||||
// Return raw data until we have the generated types
|
||||
let queryData = try JSONEncoder().encode(EmptyQuery())
|
||||
let request = DaemonRequest.query(method: "query:core.status.v1", payload: queryData)
|
||||
let response = try await sendRequest(request)
|
||||
|
||||
switch response {
|
||||
case .ok(let data):
|
||||
return data
|
||||
case .error(let error):
|
||||
throw SpacedriveError.daemonError(error)
|
||||
case .pong, .event, .subscribed, .unsubscribed, .jsonOk:
|
||||
throw SpacedriveError.invalidResponse("Unexpected response")
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a library using generated types
|
||||
public func createLibrary(name: String, path: String? = nil) async throws -> LibraryCreateOutput {
|
||||
@ -626,7 +569,7 @@ extension SpacedriveClient {
|
||||
case .error(let error):
|
||||
print("❌ Ping failed with daemon error: \(error)")
|
||||
throw SpacedriveError.daemonError("Ping failed: \(error)")
|
||||
case .ok, .event, .subscribed, .unsubscribed, .jsonOk:
|
||||
case .jsonOk, .event, .subscribed, .unsubscribed:
|
||||
print("❌ Ping received unexpected response")
|
||||
throw SpacedriveError.invalidResponse("Unexpected response to ping")
|
||||
}
|
||||
|
||||
@ -1,6 +1,20 @@
|
||||
// Generated by Spacedrive using Specta - DO NOT EDIT
|
||||
import Foundation
|
||||
|
||||
// MARK: - Duration Helper
|
||||
/// Helper struct to decode Rust Duration format {"secs": u64, "nanos": u32}
|
||||
public struct RustDuration: Codable {
|
||||
public let secs: UInt64
|
||||
public let nanos: UInt32
|
||||
|
||||
public var timeInterval: TimeInterval {
|
||||
return Double(secs) + Double(nanos) / 1_000_000_000.0
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Generated Types
|
||||
import Foundation
|
||||
|
||||
/// Represents an APFS container (physical storage with multiple volumes)
|
||||
public struct ApfsContainer: Codable {
|
||||
public let containerId: String
|
||||
@ -847,10 +861,10 @@ public struct GenericProgress: Codable {
|
||||
|
||||
/// Comprehensive metrics for indexing operations
|
||||
public struct IndexerMetrics: Codable {
|
||||
public let totalDuration: TimeInterval
|
||||
public let discoveryDuration: TimeInterval
|
||||
public let processingDuration: TimeInterval
|
||||
public let contentDuration: TimeInterval
|
||||
public let totalDuration: RustDuration
|
||||
public let discoveryDuration: RustDuration
|
||||
public let processingDuration: RustDuration
|
||||
public let contentDuration: RustDuration
|
||||
public let filesPerSecond: Float
|
||||
public let bytesPerSecond: Double
|
||||
public let dirsPerSecond: Float
|
||||
@ -1006,9 +1020,14 @@ public struct JobOutputFileValidationData: Codable {
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - JobOutput Codable Implementation
|
||||
// MARK: - JobOutput Adjacently Tagged Codable Implementation
|
||||
extension JobOutput: Codable {
|
||||
private enum CodingKeys: String, CodingKey {
|
||||
private enum TypeKeys: String, CodingKey {
|
||||
case tag = "type"
|
||||
case content = "data"
|
||||
}
|
||||
|
||||
private enum VariantType: String, Codable {
|
||||
case success = "Success"
|
||||
case fileCopy = "FileCopy"
|
||||
case indexed = "Indexed"
|
||||
@ -1021,67 +1040,69 @@ extension JobOutput: Codable {
|
||||
}
|
||||
|
||||
public init(from decoder: Decoder) throws {
|
||||
let container = try decoder.container(keyedBy: CodingKeys.self)
|
||||
let container = try decoder.container(keyedBy: TypeKeys.self)
|
||||
let variantType = try container.decode(VariantType.self, forKey: .tag)
|
||||
|
||||
if container.allKeys.count != 1 {
|
||||
throw DecodingError.dataCorrupted(
|
||||
DecodingError.Context(codingPath: decoder.codingPath, debugDescription: "Invalid number of keys found, expected one.")
|
||||
)
|
||||
}
|
||||
|
||||
let key = container.allKeys.first!
|
||||
switch key {
|
||||
switch variantType {
|
||||
case .success:
|
||||
self = .success
|
||||
case .fileCopy:
|
||||
let data = try container.decode(JobOutputFileCopyData.self, forKey: .fileCopy)
|
||||
let data = try container.decode(JobOutputFileCopyData.self, forKey: .content)
|
||||
self = .fileCopy(data)
|
||||
case .indexed:
|
||||
let data = try container.decode(JobOutputIndexedData.self, forKey: .indexed)
|
||||
let data = try container.decode(JobOutputIndexedData.self, forKey: .content)
|
||||
self = .indexed(data)
|
||||
case .thumbnailsGenerated:
|
||||
let data = try container.decode(JobOutputThumbnailsGeneratedData.self, forKey: .thumbnailsGenerated)
|
||||
let data = try container.decode(JobOutputThumbnailsGeneratedData.self, forKey: .content)
|
||||
self = .thumbnailsGenerated(data)
|
||||
case .thumbnailGeneration:
|
||||
let data = try container.decode(JobOutputThumbnailGenerationData.self, forKey: .thumbnailGeneration)
|
||||
let data = try container.decode(JobOutputThumbnailGenerationData.self, forKey: .content)
|
||||
self = .thumbnailGeneration(data)
|
||||
case .fileMove:
|
||||
let data = try container.decode(JobOutputFileMoveData.self, forKey: .fileMove)
|
||||
let data = try container.decode(JobOutputFileMoveData.self, forKey: .content)
|
||||
self = .fileMove(data)
|
||||
case .fileDelete:
|
||||
let data = try container.decode(JobOutputFileDeleteData.self, forKey: .fileDelete)
|
||||
let data = try container.decode(JobOutputFileDeleteData.self, forKey: .content)
|
||||
self = .fileDelete(data)
|
||||
case .duplicateDetection:
|
||||
let data = try container.decode(JobOutputDuplicateDetectionData.self, forKey: .duplicateDetection)
|
||||
let data = try container.decode(JobOutputDuplicateDetectionData.self, forKey: .content)
|
||||
self = .duplicateDetection(data)
|
||||
case .fileValidation:
|
||||
let data = try container.decode(JobOutputFileValidationData.self, forKey: .fileValidation)
|
||||
let data = try container.decode(JobOutputFileValidationData.self, forKey: .content)
|
||||
self = .fileValidation(data)
|
||||
}
|
||||
}
|
||||
|
||||
public func encode(to encoder: Encoder) throws {
|
||||
var container = encoder.container(keyedBy: CodingKeys.self)
|
||||
var container = encoder.container(keyedBy: TypeKeys.self)
|
||||
|
||||
switch self {
|
||||
case .success:
|
||||
try container.encodeNil(forKey: .success)
|
||||
try container.encode(VariantType.success, forKey: .tag)
|
||||
case .fileCopy(let data):
|
||||
try container.encode(data, forKey: .fileCopy)
|
||||
try container.encode(VariantType.fileCopy, forKey: .tag)
|
||||
try container.encode(data, forKey: .content)
|
||||
case .indexed(let data):
|
||||
try container.encode(data, forKey: .indexed)
|
||||
try container.encode(VariantType.indexed, forKey: .tag)
|
||||
try container.encode(data, forKey: .content)
|
||||
case .thumbnailsGenerated(let data):
|
||||
try container.encode(data, forKey: .thumbnailsGenerated)
|
||||
try container.encode(VariantType.thumbnailsGenerated, forKey: .tag)
|
||||
try container.encode(data, forKey: .content)
|
||||
case .thumbnailGeneration(let data):
|
||||
try container.encode(data, forKey: .thumbnailGeneration)
|
||||
try container.encode(VariantType.thumbnailGeneration, forKey: .tag)
|
||||
try container.encode(data, forKey: .content)
|
||||
case .fileMove(let data):
|
||||
try container.encode(data, forKey: .fileMove)
|
||||
try container.encode(VariantType.fileMove, forKey: .tag)
|
||||
try container.encode(data, forKey: .content)
|
||||
case .fileDelete(let data):
|
||||
try container.encode(data, forKey: .fileDelete)
|
||||
try container.encode(VariantType.fileDelete, forKey: .tag)
|
||||
try container.encode(data, forKey: .content)
|
||||
case .duplicateDetection(let data):
|
||||
try container.encode(data, forKey: .duplicateDetection)
|
||||
try container.encode(VariantType.duplicateDetection, forKey: .tag)
|
||||
try container.encode(data, forKey: .content)
|
||||
case .fileValidation(let data):
|
||||
try container.encode(data, forKey: .fileValidation)
|
||||
try container.encode(VariantType.fileValidation, forKey: .tag)
|
||||
try container.encode(data, forKey: .content)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1167,8 +1188,8 @@ public struct PathMapping: Codable {
|
||||
/// Performance and timing metrics
|
||||
public struct PerformanceMetrics: Codable {
|
||||
public let rate: Float
|
||||
public let estimatedRemaining: TimeInterval?
|
||||
public let elapsed: TimeInterval?
|
||||
public let estimatedRemaining: RustDuration?
|
||||
public let elapsed: RustDuration?
|
||||
public let errorCount: UInt64
|
||||
public let warningCount: UInt64
|
||||
|
||||
@ -1199,9 +1220,14 @@ public struct ProgressBytesData: Codable {
|
||||
public let total: UInt64
|
||||
}
|
||||
|
||||
// MARK: - Progress Codable Implementation
|
||||
// MARK: - Progress Adjacently Tagged Codable Implementation
|
||||
extension Progress: Codable {
|
||||
private enum CodingKeys: String, CodingKey {
|
||||
private enum TypeKeys: String, CodingKey {
|
||||
case tag = "type"
|
||||
case content = "data"
|
||||
}
|
||||
|
||||
private enum VariantType: String, Codable {
|
||||
case count = "Count"
|
||||
case percentage = "Percentage"
|
||||
case indeterminate = "Indeterminate"
|
||||
@ -1210,51 +1236,41 @@ extension Progress: Codable {
|
||||
}
|
||||
|
||||
public init(from decoder: Decoder) throws {
|
||||
let container = try decoder.container(keyedBy: CodingKeys.self)
|
||||
let container = try decoder.container(keyedBy: TypeKeys.self)
|
||||
let variantType = try container.decode(VariantType.self, forKey: .tag)
|
||||
|
||||
if container.allKeys.count != 1 {
|
||||
throw DecodingError.dataCorrupted(
|
||||
DecodingError.Context(codingPath: decoder.codingPath, debugDescription: "Invalid number of keys found, expected one.")
|
||||
)
|
||||
}
|
||||
|
||||
let key = container.allKeys.first!
|
||||
switch key {
|
||||
switch variantType {
|
||||
case .count:
|
||||
let data = try container.decode(ProgressCountData.self, forKey: .count)
|
||||
let data = try container.decode(ProgressCountData.self, forKey: .content)
|
||||
self = .count(data)
|
||||
case .percentage:
|
||||
// TODO: Implement tuple variant decoding for percentage
|
||||
fatalError("Tuple variant decoding not implemented")
|
||||
fatalError("Adjacently tagged tuple variants not implemented")
|
||||
case .indeterminate:
|
||||
// TODO: Implement tuple variant decoding for indeterminate
|
||||
fatalError("Tuple variant decoding not implemented")
|
||||
fatalError("Adjacently tagged tuple variants not implemented")
|
||||
case .bytes:
|
||||
let data = try container.decode(ProgressBytesData.self, forKey: .bytes)
|
||||
let data = try container.decode(ProgressBytesData.self, forKey: .content)
|
||||
self = .bytes(data)
|
||||
case .generic:
|
||||
// TODO: Implement tuple variant decoding for generic
|
||||
fatalError("Tuple variant decoding not implemented")
|
||||
fatalError("Adjacently tagged tuple variants not implemented")
|
||||
}
|
||||
}
|
||||
|
||||
public func encode(to encoder: Encoder) throws {
|
||||
var container = encoder.container(keyedBy: CodingKeys.self)
|
||||
var container = encoder.container(keyedBy: TypeKeys.self)
|
||||
|
||||
switch self {
|
||||
case .count(let data):
|
||||
try container.encode(data, forKey: .count)
|
||||
try container.encode(VariantType.count, forKey: .tag)
|
||||
try container.encode(data, forKey: .content)
|
||||
case .percentage:
|
||||
// TODO: Implement tuple variant encoding for percentage
|
||||
fatalError("Tuple variant encoding not implemented")
|
||||
fatalError("Adjacently tagged tuple variants not implemented")
|
||||
case .indeterminate:
|
||||
// TODO: Implement tuple variant encoding for indeterminate
|
||||
fatalError("Tuple variant encoding not implemented")
|
||||
fatalError("Adjacently tagged tuple variants not implemented")
|
||||
case .bytes(let data):
|
||||
try container.encode(data, forKey: .bytes)
|
||||
try container.encode(VariantType.bytes, forKey: .tag)
|
||||
try container.encode(data, forKey: .content)
|
||||
case .generic:
|
||||
// TODO: Implement tuple variant encoding for generic
|
||||
fatalError("Tuple variant encoding not implemented")
|
||||
fatalError("Adjacently tagged tuple variants not implemented")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user