[tests] Haxe SkeletonSerializer WIP

This commit is contained in:
Mario Zechner 2025-07-30 10:10:49 +02:00
parent 1570962903
commit e162835c31
9 changed files with 1863 additions and 2208 deletions

View File

@ -5,6 +5,5 @@
import FlutterMacOS
import Foundation
func RegisterGeneratedPlugins(registry: FlutterPluginRegistry) {
}

File diff suppressed because it is too large Load Diff

View File

@ -14,14 +14,18 @@ import haxe.io.Bytes;
class MockTextureLoader implements TextureLoader {
public function new() {}
public function load(page:TextureAtlasPage, path:String):Void {
public function loadPage(page:TextureAtlasPage, path:String):Void {
// Set mock dimensions - no actual texture loading needed
page.width = 1024;
page.height = 1024;
page.texture = {}; // Empty object as mock texture
}
public function unload(texture:Dynamic):Void {
public function loadRegion(region:spine.atlas.TextureAtlasRegion):Void {
// Nothing to do in headless mode
}
public function unloadPage(page:TextureAtlasPage):Void {
// Nothing to unload in headless mode
}
}
@ -98,7 +102,6 @@ class HeadlessTest {
Sys.println("\n=== ANIMATION STATE ===");
Sys.println(serializer.serializeAnimationState(state));
}
} catch (e:Dynamic) {
Sys.stderr().writeString('Error: $e\n');
Sys.exit(1);

View File

@ -212,125 +212,122 @@ When implementing the Haxe serializer generator, these patterns should be applie
---
# Haxe Serializer Generator Implementation Plan
# Revised Implementation Plan: Reflection-Based Haxe Serializer
Based on the comprehensive pattern analysis above, here's the implementation plan for a new Haxe serializer generator:
## Key Insight
## Architecture Overview
Instead of maintaining complex mapping tables, we can leverage Haxe's dynamic reflection capabilities to automatically resolve field vs method access at runtime.
The new generator will use a **rule-based transformation system** with the following components:
## Simplified Architecture
1. **Mapping Database**: Load java-haxe-diff.md mappings into a structured lookup table
2. **Context-Aware Transformer**: Apply transformations based on class context
3. **Type System**: Handle Java-to-Haxe type conversions
4. **Code Generator**: Produce clean, idiomatic Haxe code
### Core Approach: Runtime Property Resolution
```haxe
private function getPropertyValue(obj:Dynamic, javaGetter:String):Dynamic {
// Extract property name from Java getter
var propName = extractPropertyName(javaGetter); // getName() → "name"
// 1. Try direct field access first (most common case)
if (Reflect.hasField(obj, propName)) {
return Reflect.field(obj, propName);
}
// 2. Try special field variations
var specialNames = getSpecialFieldNames(javaGetter, propName);
for (name in specialNames) {
if (Reflect.hasField(obj, name)) {
return Reflect.field(obj, name);
}
}
// 3. Try method access (for computed properties)
if (Reflect.hasField(obj, javaGetter.replace("()", ""))) {
return Reflect.callMethod(obj, Reflect.field(obj, javaGetter.replace("()", "")), []);
}
// 4. Handle property syntax (get, never)
// This would need special handling or we just access the underlying getter
throw 'Property ${javaGetter} not found on object';
}
```
### Special Name Mappings
Based on the pattern analysis, we only need to handle these special cases:
```haxe
private function getSpecialFieldNames(javaGetter:String, defaultName:String):Array<String> {
return switch(javaGetter) {
case "getInt()": ["intValue"];
case "getFloat()": ["floatValue"];
case "getString()": ["stringValue"];
case "getPhysicsConstraints()": ["physics"];
case "getUpdateCache()": ["_updateCache"];
case "getSetupPose()": ["setup"];
case "getAppliedPose()": ["applied"];
default: [];
}
}
```
## Implementation Steps
### Phase 1: Build Mapping Infrastructure
### Phase 1: Core Reflection System
1. Implement `getPropertyValue` with fallback chain
2. Handle special field name mappings
3. Test with known edge cases
1. **Parse java-haxe-diff.md**
- Extract all type mappings into a structured format
- Create lookup table: `Map<ClassName, Map<JavaGetter, HaxeMapping>>`
- Store mapping type (field, method, property) and Haxe type info
2. **Create Transformation Rules Engine**
- Rule priority system (specific → general)
- Context-aware lookups (class + getter combination)
- Fallback to general patterns
### Phase 2: Implement Core Transformations
1. **Getter-to-Field Transformer**
- Check mapping database first
- Apply general pattern: `getX()``x`
- Handle special cases (getInt → intValue, etc.)
2. **Type Transformer**
- Java primitives → Haxe types
- Array handling (including nested arrays)
- Generic type resolution
3. **Access Pattern Resolver**
- Determine if result is field access or method call
- Handle property syntax `name(get, never)`
- Preserve method calls where needed
### Phase 2: Type Handling
1. Keep existing Java → Haxe type transformations
2. Use `Dynamic` for runtime resolution
3. Cast results when needed for type safety
### Phase 3: Code Generation
1. Generate simpler code using reflection helpers
2. No need for complex getter-to-field mappings
3. Handle enums with runtime type checking
1. **Property Code Generator**
- Generate correct Haxe syntax based on mapping type
- Handle nullable types properly
- Generate enum switch statements with correct Haxe enum syntax
## Advantages
2. **Method Generator**
- Handle abstract types with `Std.isOfType`
- Generate proper casting syntax
- Implement special methods (writeSkin, writeSkinEntry)
1. **Simplicity**: No need to parse mapping files or maintain lookup tables
2. **Robustness**: Automatically handles API changes
3. **Correctness**: Runtime resolution ensures we get the right value
4. **Maintainability**: Minimal special cases to maintain
### Phase 4: Validation and Testing
## Trade-offs
1. **Compile-time Validation**
- Generate code and attempt Haxe compilation
- Report type errors with context
1. **Performance**: Reflection is slower than direct access (acceptable for serialization)
2. **Type Safety**: Less compile-time checking (mitigated by runtime tests)
3. **Debugging**: Harder to trace field access (can add logging)
2. **Runtime Testing**
- Compare serialization output with Java reference
- Ensure all fields are properly serialized
## Example Generated Code
## Key Design Decisions
```haxe
private function writeAnimation(obj:Animation):Void {
// ... cycle detection ...
1. **Data-Driven Approach**: Use the mapping file as the source of truth rather than hardcoded rules
2. **Explicit Over Implicit**: When in doubt, use the exact mapping from java-haxe-diff.md
3. **Fail-Fast**: If a mapping is missing or ambiguous, fail with a clear error message
4. **Type Safety**: Leverage Haxe's type system to catch errors at compile time
json.writeObjectStart();
json.writeName("type");
json.writeValue("Animation");
## Implementation Details
// Use reflection for all properties
json.writeName("timelines");
writeArray(getPropertyValue(obj, "getTimelines()"), writeTimeline);
### Mapping Database Structure
```typescript
interface HaxeMapping {
kind: 'field' | 'method' | 'property';
haxeName: string;
haxeType: string;
propertyGetter?: string; // for (get, never) syntax
}
json.writeName("duration");
json.writeValue(getPropertyValue(obj, "getDuration()"));
interface ClassMappings {
className: string;
getters: Map<string, HaxeMapping>;
json.writeName("bones");
writeIntArray(getPropertyValue(obj, "getBones()"));
json.writeName("name");
json.writeValue(getPropertyValue(obj, "getName()"));
json.writeObjectEnd();
}
```
### Transformation Algorithm
```
1. Load all mappings from java-haxe-diff.md
2. For each property in IR:
a. Look up exact class + getter combination
b. If not found, check for class-level patterns
c. If not found, apply general transformation rules
d. Transform type from Java to Haxe
e. Generate appropriate access code
```
## Summary
### Special Handling
1. **Timeline Classes**: All timeline getters follow consistent patterns
2. **Constraint Classes**: Handle getData/getPose/getAppliedPose consistently
3. **Array Properties**: Detect 1D vs 2D arrays based on context
4. **Enum Values**: Generate proper Haxe enum access syntax
5. **Circular References**: Maintain visitedObjects tracking
## Error Handling
1. **Missing Mappings**: Log unmapped getters with class context
2. **Type Mismatches**: Detect and report Java/Haxe type incompatibilities
3. **Compilation Errors**: Capture and display Haxe compiler output
## Testing Strategy
1. **Unit Tests**: Test individual transformation rules
2. **Integration Tests**: Generate full serializer and compile
3. **Snapshot Tests**: Compare output with reference implementation
This approach ensures accuracy, maintainability, and extensibility while leveraging the comprehensive mapping data we've collected.
This reflection-based approach eliminates the complexity of maintaining mapping tables while preserving correctness. The patterns we analyzed show that most getters follow predictable conventions, with only a handful of special cases that can be handled with a simple switch statement.

View File

@ -16,9 +16,9 @@
}
},
"node_modules/@biomejs/biome": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/@biomejs/biome/-/biome-2.1.2.tgz",
"integrity": "sha512-yq8ZZuKuBVDgAS76LWCfFKHSYIAgqkxVB3mGVVpOe2vSkUTs7xG46zXZeNPRNVjiJuw0SZ3+J2rXiYx0RUpfGg==",
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/@biomejs/biome/-/biome-2.1.3.tgz",
"integrity": "sha512-KE/tegvJIxTkl7gJbGWSgun7G6X/n2M6C35COT6ctYrAy7SiPyNvi6JtoQERVK/VRbttZfgGq96j2bFmhmnH4w==",
"dev": true,
"license": "MIT OR Apache-2.0",
"bin": {
@ -32,20 +32,20 @@
"url": "https://opencollective.com/biome"
},
"optionalDependencies": {
"@biomejs/cli-darwin-arm64": "2.1.2",
"@biomejs/cli-darwin-x64": "2.1.2",
"@biomejs/cli-linux-arm64": "2.1.2",
"@biomejs/cli-linux-arm64-musl": "2.1.2",
"@biomejs/cli-linux-x64": "2.1.2",
"@biomejs/cli-linux-x64-musl": "2.1.2",
"@biomejs/cli-win32-arm64": "2.1.2",
"@biomejs/cli-win32-x64": "2.1.2"
"@biomejs/cli-darwin-arm64": "2.1.3",
"@biomejs/cli-darwin-x64": "2.1.3",
"@biomejs/cli-linux-arm64": "2.1.3",
"@biomejs/cli-linux-arm64-musl": "2.1.3",
"@biomejs/cli-linux-x64": "2.1.3",
"@biomejs/cli-linux-x64-musl": "2.1.3",
"@biomejs/cli-win32-arm64": "2.1.3",
"@biomejs/cli-win32-x64": "2.1.3"
}
},
"node_modules/@biomejs/cli-darwin-arm64": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.1.2.tgz",
"integrity": "sha512-leFAks64PEIjc7MY/cLjE8u5OcfBKkcDB0szxsWUB4aDfemBep1WVKt0qrEyqZBOW8LPHzrFMyDl3FhuuA0E7g==",
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.1.3.tgz",
"integrity": "sha512-LFLkSWRoSGS1wVUD/BE6Nlt2dSn0ulH3XImzg2O/36BoToJHKXjSxzPEMAqT9QvwVtk7/9AQhZpTneERU9qaXA==",
"cpu": [
"arm64"
],
@ -60,9 +60,9 @@
}
},
"node_modules/@biomejs/cli-darwin-x64": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.1.2.tgz",
"integrity": "sha512-Nmmv7wRX5Nj7lGmz0FjnWdflJg4zii8Ivruas6PBKzw5SJX/q+Zh2RfnO+bBnuKLXpj8kiI2x2X12otpH6a32A==",
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.1.3.tgz",
"integrity": "sha512-Q/4OTw8P9No9QeowyxswcWdm0n2MsdCwWcc5NcKQQvzwPjwuPdf8dpPPf4r+x0RWKBtl1FLiAUtJvBlri6DnYw==",
"cpu": [
"x64"
],
@ -77,9 +77,9 @@
}
},
"node_modules/@biomejs/cli-linux-arm64": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.1.2.tgz",
"integrity": "sha512-NWNy2Diocav61HZiv2enTQykbPP/KrA/baS7JsLSojC7Xxh2nl9IczuvE5UID7+ksRy2e7yH7klm/WkA72G1dw==",
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.1.3.tgz",
"integrity": "sha512-2hS6LgylRqMFmAZCOFwYrf77QMdUwJp49oe8PX/O8+P2yKZMSpyQTf3Eo5ewnsMFUEmYbPOskafdV1ds1MZMJA==",
"cpu": [
"arm64"
],
@ -94,9 +94,9 @@
}
},
"node_modules/@biomejs/cli-linux-arm64-musl": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.1.2.tgz",
"integrity": "sha512-qgHvafhjH7Oca114FdOScmIKf1DlXT1LqbOrrbR30kQDLFPEOpBG0uzx6MhmsrmhGiCFCr2obDamu+czk+X0HQ==",
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.1.3.tgz",
"integrity": "sha512-KXouFSBnoxAWZYDQrnNRzZBbt5s9UJkIm40hdvSL9mBxSSoxRFQJbtg1hP3aa8A2SnXyQHxQfpiVeJlczZt76w==",
"cpu": [
"arm64"
],
@ -111,9 +111,9 @@
}
},
"node_modules/@biomejs/cli-linux-x64": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64/-/cli-linux-x64-2.1.2.tgz",
"integrity": "sha512-Km/UYeVowygTjpX6sGBzlizjakLoMQkxWbruVZSNE6osuSI63i4uCeIL+6q2AJlD3dxoiBJX70dn1enjQnQqwA==",
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64/-/cli-linux-x64-2.1.3.tgz",
"integrity": "sha512-NxlSCBhLvQtWGagEztfAZ4WcE1AkMTntZV65ZvR+J9jp06+EtOYEBPQndA70ZGhHbEDG57bR6uNvqkd1WrEYVA==",
"cpu": [
"x64"
],
@ -128,9 +128,9 @@
}
},
"node_modules/@biomejs/cli-linux-x64-musl": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.1.2.tgz",
"integrity": "sha512-xlB3mU14ZUa3wzLtXfmk2IMOGL+S0aHFhSix/nssWS/2XlD27q+S6f0dlQ8WOCbYoXcuz8BCM7rCn2lxdTrlQA==",
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.1.3.tgz",
"integrity": "sha512-KaLAxnROouzIWtl6a0Y88r/4hW5oDUJTIqQorOTVQITaKQsKjZX4XCUmHIhdEk8zMnaiLZzRTAwk1yIAl+mIew==",
"cpu": [
"x64"
],
@ -145,9 +145,9 @@
}
},
"node_modules/@biomejs/cli-win32-arm64": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.1.2.tgz",
"integrity": "sha512-G8KWZli5ASOXA3yUQgx+M4pZRv3ND16h77UsdunUL17uYpcL/UC7RkWTdkfvMQvogVsAuz5JUcBDjgZHXxlKoA==",
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.1.3.tgz",
"integrity": "sha512-V9CUZCtWH4u0YwyCYbQ3W5F4ZGPWp2C2TYcsiWFNNyRfmOW1j/TY/jAurl33SaRjgZPO5UUhGyr9m6BN9t84NQ==",
"cpu": [
"arm64"
],
@ -162,9 +162,9 @@
}
},
"node_modules/@biomejs/cli-win32-x64": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/@biomejs/cli-win32-x64/-/cli-win32-x64-2.1.2.tgz",
"integrity": "sha512-9zajnk59PMpjBkty3bK2IrjUsUHvqe9HWwyAWQBjGLE7MIBjbX2vwv1XPEhmO2RRuGoTkVx3WCanHrjAytICLA==",
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/@biomejs/cli-win32-x64/-/cli-win32-x64-2.1.3.tgz",
"integrity": "sha512-dxy599q6lgp8ANPpR8sDMscwdp9oOumEsVXuVCVT9N2vAho8uYXlCz53JhxX6LtJOXaE73qzgkGQ7QqvFlMC0g==",
"cpu": [
"x64"
],

View File

@ -8,7 +8,7 @@ import type { Property, SerializerIR } from './types';
const __dirname = path.dirname(fileURLToPath(import.meta.url));
function transformType(javaType: string): string {
function transformType (javaType: string): string {
// Java → Haxe type mappings
const primitiveMap: Record<string, string> = {
'String': 'String',
@ -36,302 +36,352 @@ function transformType(javaType: string): string {
// Java Array<T> stays Array<T> in Haxe
if (simpleName.startsWith('Array<')) {
return simpleName;
const match = simpleName.match(/Array<(.+)>/);
if (match) {
return `Array<${transformType(match[1])}>`;
}
}
// Handle special generic types
if (simpleName === 'Constraint') {
return 'Constraint<Any, Any, Any>'; // Use Any for generic parameters
return 'Constraint<Dynamic, Dynamic, Dynamic>';
}
if (simpleName === 'ConstraintData') {
return 'ConstraintData<Any, Any>'; // Use Any for generic parameters
return 'ConstraintData<Dynamic, Dynamic>';
}
// Handle TransformConstraintData inner classes
if (['FromProperty', 'FromRotate', 'FromScaleX', 'FromScaleY', 'FromShearY', 'FromX', 'FromY',
'ToProperty', 'ToRotate', 'ToScaleX', 'ToScaleY', 'ToShearY', 'ToX', 'ToY'].includes(simpleName)) {
return 'spine.TransformConstraintData.' + simpleName;
}
// Object types: keep class name, remove package
return simpleName;
}
function mapJavaGetterToHaxeField(javaGetter: string, objName: string): string {
// Map Java getter methods to Haxe field access
// Based on analysis of existing Haxe classes in spine-haxe/spine-haxe/spine/
if (javaGetter.endsWith('()')) {
const methodName = javaGetter.slice(0, -2);
// Remove get/is prefix and convert to camelCase field
if (methodName.startsWith('get')) {
const fieldName = methodName.slice(3);
const haxeField = fieldName.charAt(0).toLowerCase() + fieldName.slice(1);
return `${objName}.${haxeField}`;
}
if (methodName.startsWith('is')) {
const fieldName = methodName.slice(2);
const haxeField = fieldName.charAt(0).toLowerCase() + fieldName.slice(1);
return `${objName}.${haxeField}`;
}
// Some methods might be direct field names
return `${objName}.${methodName}`;
}
// Direct field access (already in correct format)
return `${objName}.${javaGetter}`;
}
function generatePropertyCode(property: Property, indent: string, enumMappings: { [enumName: string]: { [javaValue: string]: string } }): string[] {
function generateReflectionBasedHaxe (ir: SerializerIR): string {
const lines: string[] = [];
const accessor = mapJavaGetterToHaxeField(property.getter, 'obj');
switch (property.kind) {
case "primitive":
lines.push(`${indent}json.writeValue(${accessor});`);
break;
case "object":
if (property.isNullable) {
lines.push(`${indent}if (${accessor} == null) {`);
lines.push(`${indent} json.writeNull();`);
lines.push(`${indent}} else {`);
lines.push(`${indent} ${property.writeMethodCall}(${accessor});`);
lines.push(`${indent}}`);
} else {
lines.push(`${indent}${property.writeMethodCall}(${accessor});`);
}
break;
case "enum": {
const enumName = property.enumName;
const enumMap = enumMappings[enumName];
if (property.isNullable) {
lines.push(`${indent}if (${accessor} == null) {`);
lines.push(`${indent} json.writeNull();`);
lines.push(`${indent}} else {`);
}
if (enumMap && Object.keys(enumMap).length > 0) {
// Generate switch statement for enum mapping
lines.push(`${indent}${property.isNullable ? ' ' : ''}switch (${accessor}) {`);
for (const [javaValue, haxeValue] of Object.entries(enumMap)) {
lines.push(`${indent}${property.isNullable ? ' ' : ''} case ${haxeValue}: json.writeValue("${javaValue}");`);
}
lines.push(`${indent}${property.isNullable ? ' ' : ''} default: json.writeValue("unknown");`);
lines.push(`${indent}${property.isNullable ? ' ' : ''}}`);
} else {
// Fallback using Type.enumConstructor or similar
lines.push(`${indent}${property.isNullable ? ' ' : ''}json.writeValue(Type.enumConstructor(${accessor}));`);
}
if (property.isNullable) {
lines.push(`${indent}}`);
}
break;
}
case "array": {
if (property.isNullable) {
lines.push(`${indent}if (${accessor} == null) {`);
lines.push(`${indent} json.writeNull();`);
lines.push(`${indent}} else {`);
lines.push(`${indent} json.writeArrayStart();`);
lines.push(`${indent} for (item in ${accessor}) {`);
} else {
lines.push(`${indent}json.writeArrayStart();`);
lines.push(`${indent}for (item in ${accessor}) {`);
}
const itemIndent = property.isNullable ? `${indent} ` : `${indent} `;
if (property.elementKind === "primitive") {
lines.push(`${itemIndent}json.writeValue(item);`);
} else {
lines.push(`${itemIndent}${property.writeMethodCall}(item);`);
}
if (property.isNullable) {
lines.push(`${indent} }`);
lines.push(`${indent} json.writeArrayEnd();`);
lines.push(`${indent}}`);
} else {
lines.push(`${indent}}`);
lines.push(`${indent}json.writeArrayEnd();`);
}
break;
}
case "nestedArray": {
if (property.isNullable) {
lines.push(`${indent}if (${accessor} == null) {`);
lines.push(`${indent} json.writeNull();`);
lines.push(`${indent}} else {`);
}
const outerIndent = property.isNullable ? `${indent} ` : indent;
lines.push(`${outerIndent}json.writeArrayStart();`);
lines.push(`${outerIndent}for (nestedArray in ${accessor}) {`);
lines.push(`${outerIndent} if (nestedArray == null) {`);
lines.push(`${outerIndent} json.writeNull();`);
lines.push(`${outerIndent} } else {`);
lines.push(`${outerIndent} json.writeArrayStart();`);
lines.push(`${outerIndent} for (elem in nestedArray) {`);
lines.push(`${outerIndent} json.writeValue(elem);`);
lines.push(`${outerIndent} }`);
lines.push(`${outerIndent} json.writeArrayEnd();`);
lines.push(`${outerIndent} }`);
lines.push(`${outerIndent}}`);
lines.push(`${outerIndent}json.writeArrayEnd();`);
if (property.isNullable) {
lines.push(`${indent}}`);
}
break;
}
}
return lines;
}
function generateHaxeFromIR(ir: SerializerIR): string {
const haxeOutput: string[] = [];
// Generate Haxe file header
haxeOutput.push('package spine.utils;');
haxeOutput.push('');
haxeOutput.push('import haxe.ds.StringMap;');
haxeOutput.push('import spine.*;');
haxeOutput.push('import spine.animation.*;');
haxeOutput.push('import spine.attachments.*;');
haxeOutput.push('');
haxeOutput.push('class SkeletonSerializer {');
haxeOutput.push(' private var visitedObjects:StringMap<String> = new StringMap();');
haxeOutput.push(' private var nextId:Int = 1;');
haxeOutput.push(' private var json:JsonWriter;');
haxeOutput.push('');
haxeOutput.push(' public function new() {}');
haxeOutput.push('');
// File header
lines.push('package spine.utils;');
lines.push('');
lines.push('import haxe.ds.ObjectMap;');
lines.push('import spine.*;');
lines.push('import spine.animation.*;');
lines.push('import spine.attachments.*;');
lines.push('');
lines.push('class SkeletonSerializer {');
lines.push(' private var visitedObjects:ObjectMap<Dynamic, String> = new ObjectMap();');
lines.push(' private var nextId:Int = 1;');
lines.push(' private var json:JsonWriter;');
lines.push('');
lines.push(' public function new() {}');
lines.push('');
// Generate public methods
for (const method of ir.publicMethods) {
const haxeParamType = transformType(method.paramType);
haxeOutput.push(` public function ${method.name}(${method.paramName}:${haxeParamType}):String {`);
haxeOutput.push(' visitedObjects = new StringMap();');
haxeOutput.push(' nextId = 1;');
haxeOutput.push(' json = new JsonWriter();');
haxeOutput.push(` ${method.writeMethodCall}(${method.paramName});`);
haxeOutput.push(' return json.getString();');
haxeOutput.push(' }');
haxeOutput.push('');
lines.push(` public function ${method.name}(${method.paramName}:${haxeParamType}):String {`);
lines.push(' visitedObjects = new ObjectMap();');
lines.push(' nextId = 1;');
lines.push(' json = new JsonWriter();');
lines.push(` ${method.writeMethodCall}(${method.paramName});`);
lines.push(' return json.getString();');
lines.push(' }');
lines.push('');
}
// Core reflection helper methods
lines.push(' // Core reflection helpers');
lines.push(' private function extractPropertyName(javaGetter:String):String {');
lines.push(' var getter = javaGetter;');
lines.push(' if (getter.indexOf("()") != -1) getter = getter.substr(0, getter.length - 2);');
lines.push(' ');
lines.push(' if (getter.substr(0, 3) == "get") {');
lines.push(' var prop = getter.substr(3);');
lines.push(' return prop.charAt(0).toLowerCase() + prop.substr(1);');
lines.push(' }');
lines.push(' if (getter.substr(0, 2) == "is") {');
lines.push(' var prop = getter.substr(2);');
lines.push(' return prop.charAt(0).toLowerCase() + prop.substr(1);');
lines.push(' }');
lines.push(' return getter;');
lines.push(' }');
lines.push('');
lines.push(' private function getSpecialFieldNames(javaGetter:String):Array<String> {');
lines.push(' return switch(javaGetter) {');
lines.push(' case "getInt()": ["intValue"];');
lines.push(' case "getFloat()": ["floatValue"];');
lines.push(' case "getString()": ["stringValue"];');
lines.push(' case "getPhysicsConstraints()": ["physics"];');
lines.push(' case "getUpdateCache()": ["_updateCache"];');
lines.push(' case "getSetupPose()": ["setup"];');
lines.push(' case "getAppliedPose()": ["applied"];');
lines.push(' default: [];');
lines.push(' }');
lines.push(' }');
lines.push('');
lines.push(' private function getPropertyValue(obj:Dynamic, javaGetter:String):Dynamic {');
lines.push(' var propName = extractPropertyName(javaGetter);');
lines.push(' ');
lines.push(' // Try direct field access first');
lines.push(' if (Reflect.hasField(obj, propName)) {');
lines.push(' return Reflect.field(obj, propName);');
lines.push(' }');
lines.push(' ');
lines.push(' // Try special field variations');
lines.push(' var specialNames = getSpecialFieldNames(javaGetter);');
lines.push(' for (name in specialNames) {');
lines.push(' if (Reflect.hasField(obj, name)) {');
lines.push(' return Reflect.field(obj, name);');
lines.push(' }');
lines.push(' }');
lines.push(' ');
lines.push(' // Try method access (remove parentheses)');
lines.push(' var methodName = javaGetter;');
lines.push(' if (methodName.indexOf("()") != -1) {');
lines.push(' methodName = methodName.substr(0, methodName.length - 2);');
lines.push(' }');
lines.push(' if (Reflect.hasField(obj, methodName)) {');
lines.push(' var method = Reflect.field(obj, methodName);');
lines.push(' if (Reflect.isFunction(method)) {');
lines.push(' return Reflect.callMethod(obj, method, []);');
lines.push(' }');
lines.push(' }');
lines.push(' ');
lines.push(' // Last resort: return null and let the caller handle it');
lines.push(' return null;');
lines.push(' }');
lines.push('');
// Generate write methods
for (const method of ir.writeMethods) {
const shortName = method.paramType.split('.').pop();
const haxeType = transformType(method.paramType);
haxeOutput.push(` private function ${method.name}(obj:${haxeType}):Void {`);
lines.push(` private function ${method.name}(obj:Dynamic):Void {`);
if (method.isAbstractType) {
// Handle abstract types with Std.isOfType chain (Haxe equivalent of instanceof)
// Handle abstract types with Std.isOfType chain
if (method.subtypeChecks && method.subtypeChecks.length > 0) {
let first = true;
for (const subtype of method.subtypeChecks) {
const subtypeHaxeName = transformType(subtype.typeName);
if (first) {
haxeOutput.push(` if (Std.isOfType(obj, ${subtypeHaxeName})) {`);
lines.push(` if (Std.isOfType(obj, ${subtypeHaxeName})) {`);
first = false;
} else {
haxeOutput.push(` } else if (Std.isOfType(obj, ${subtypeHaxeName})) {`);
lines.push(` } else if (Std.isOfType(obj, ${subtypeHaxeName})) {`);
}
haxeOutput.push(` ${subtype.writeMethodCall}(cast(obj, ${subtypeHaxeName}));`);
lines.push(` ${subtype.writeMethodCall}(obj);`);
}
haxeOutput.push(' } else {');
haxeOutput.push(` throw new spine.SpineException("Unknown ${shortName} type");`);
haxeOutput.push(' }');
lines.push(' } else {');
lines.push(` throw new spine.SpineException("Unknown ${shortName} type");`);
lines.push(' }');
} else {
haxeOutput.push(' json.writeNull(); // No concrete implementations after filtering exclusions');
lines.push(' json.writeNull(); // No concrete implementations');
}
} else {
// Handle concrete types - add cycle detection
haxeOutput.push(' if (visitedObjects.exists(obj)) {');
haxeOutput.push(' json.writeValue(visitedObjects.get(obj));');
haxeOutput.push(' return;');
haxeOutput.push(' }');
// Handle concrete types
lines.push(' if (visitedObjects.exists(obj)) {');
lines.push(' json.writeValue(visitedObjects.get(obj));');
lines.push(' return;');
lines.push(' }');
lines.push('');
// Generate reference string
const nameGetter = method.properties.find(p =>
(p.kind === 'object' || p.kind === "primitive") &&
p.getter === 'getName()' &&
p.valueType === 'String'
const hasNameProperty = method.properties.some(p =>
p.name === 'name' && p.valueType === 'String'
);
if (nameGetter) {
const nameAccessor = mapJavaGetterToHaxeField('getName()', 'obj');
haxeOutput.push(` var refString = ${nameAccessor} != null ? "<${shortName}-" + ${nameAccessor} + ">" : "<${shortName}-" + (nextId++) + ">";`);
if (hasNameProperty) {
lines.push(' var nameValue = getPropertyValue(obj, "getName()");');
lines.push(` var refString = nameValue != null ? "<${shortName}-" + nameValue + ">" : "<${shortName}-" + nextId++ + ">";`);
} else {
haxeOutput.push(` var refString = "<${shortName}-" + (nextId++) + ">";`);
lines.push(` var refString = "<${shortName}-" + nextId++ + ">";`);
}
haxeOutput.push(' visitedObjects.set(obj, refString);');
haxeOutput.push('');
haxeOutput.push(' json.writeObjectStart();');
lines.push(' visitedObjects.set(obj, refString);');
lines.push('');
lines.push(' json.writeObjectStart();');
lines.push(' json.writeName("refString");');
lines.push(' json.writeValue(refString);');
lines.push(' json.writeName("type");');
lines.push(` json.writeValue("${shortName}");`);
// Write reference string and type
haxeOutput.push(' json.writeName("refString");');
haxeOutput.push(' json.writeValue(refString);');
haxeOutput.push(' json.writeName("type");');
haxeOutput.push(` json.writeValue("${shortName}");`);
// Write properties
// Write properties using reflection
for (const property of method.properties) {
haxeOutput.push('');
haxeOutput.push(` json.writeName("${property.name}");`);
const propertyLines = generatePropertyCode(property, ' ', ir.enumMappings);
haxeOutput.push(...propertyLines);
lines.push('');
lines.push(` json.writeName("${property.name}");`);
lines.push(` writeProperty(obj, "${property.getter}", ${JSON.stringify(property)});`);
}
haxeOutput.push('');
haxeOutput.push(' json.writeObjectEnd();');
lines.push('');
lines.push(' json.writeObjectEnd();');
}
haxeOutput.push(' }');
haxeOutput.push('');
lines.push(' }');
lines.push('');
}
// Add helper methods for special types (following C++ pattern)
haxeOutput.push(' // Helper methods for special types');
haxeOutput.push(' private function writeColor(obj:spine.Color):Void {');
haxeOutput.push(' if (obj == null) {');
haxeOutput.push(' json.writeNull();');
haxeOutput.push(' } else {');
haxeOutput.push(' json.writeObjectStart();');
haxeOutput.push(' json.writeName("r");');
haxeOutput.push(' json.writeValue(obj.r);');
haxeOutput.push(' json.writeName("g");');
haxeOutput.push(' json.writeValue(obj.g);');
haxeOutput.push(' json.writeName("b");');
haxeOutput.push(' json.writeValue(obj.b);');
haxeOutput.push(' json.writeName("a");');
haxeOutput.push(' json.writeValue(obj.a);');
haxeOutput.push(' json.writeObjectEnd();');
haxeOutput.push(' }');
haxeOutput.push(' }');
haxeOutput.push('');
// Generic property writer using reflection
lines.push(' private function writeProperty(obj:Dynamic, javaGetter:String, propertyInfo:Dynamic):Void {');
lines.push(' var value = getPropertyValue(obj, javaGetter);');
lines.push(' ');
lines.push(' if (value == null) {');
lines.push(' json.writeNull();');
lines.push(' return;');
lines.push(' }');
lines.push(' ');
lines.push(' switch (propertyInfo.kind) {');
lines.push(' case "primitive":');
lines.push(' json.writeValue(value);');
lines.push(' ');
lines.push(' case "object":');
lines.push(' var writeMethod = Reflect.field(this, propertyInfo.writeMethodCall);');
lines.push(' if (writeMethod != null) {');
lines.push(' Reflect.callMethod(this, writeMethod, [value]);');
lines.push(' } else {');
lines.push(' json.writeValue("<" + propertyInfo.valueType + ">");');
lines.push(' }');
lines.push(' ');
lines.push(' case "enum":');
lines.push(' // Handle enum-like classes with name property');
lines.push(' if (Reflect.hasField(value, "name")) {');
lines.push(' json.writeValue(Reflect.field(value, "name"));');
lines.push(' } else {');
lines.push(' // Fallback for actual Haxe enums');
lines.push(' var enumValue = Type.enumConstructor(value);');
lines.push(' json.writeValue(enumValue != null ? enumValue : "unknown");');
lines.push(' }');
lines.push(' ');
lines.push(' case "array":');
lines.push(' writeArray(value, propertyInfo);');
lines.push(' ');
lines.push(' case "nestedArray":');
lines.push(' writeNestedArray(value);');
lines.push(' }');
lines.push(' }');
lines.push('');
haxeOutput.push('}');
lines.push(' private function writeArray(arr:Dynamic, propertyInfo:Dynamic):Void {');
lines.push(' if (arr == null) {');
lines.push(' json.writeNull();');
lines.push(' return;');
lines.push(' }');
lines.push(' ');
lines.push(' json.writeArrayStart();');
lines.push(' for (item in cast(arr, Array<Dynamic>)) {');
lines.push(' if (propertyInfo.elementKind == "primitive") {');
lines.push(' json.writeValue(item);');
lines.push(' } else if (propertyInfo.writeMethodCall != null) {');
lines.push(' var writeMethod = Reflect.field(this, propertyInfo.writeMethodCall);');
lines.push(' if (writeMethod != null) {');
lines.push(' Reflect.callMethod(this, writeMethod, [item]);');
lines.push(' } else {');
lines.push(' json.writeValue(item);');
lines.push(' }');
lines.push(' } else {');
lines.push(' json.writeValue(item);');
lines.push(' }');
lines.push(' }');
lines.push(' json.writeArrayEnd();');
lines.push(' }');
lines.push('');
return haxeOutput.join('\n');
lines.push(' private function writeNestedArray(arr:Dynamic):Void {');
lines.push(' if (arr == null) {');
lines.push(' json.writeNull();');
lines.push(' return;');
lines.push(' }');
lines.push(' ');
lines.push(' json.writeArrayStart();');
lines.push(' for (nestedArray in cast(arr, Array<Dynamic>)) {');
lines.push(' if (nestedArray == null) {');
lines.push(' json.writeNull();');
lines.push(' } else {');
lines.push(' json.writeArrayStart();');
lines.push(' for (elem in cast(nestedArray, Array<Dynamic>)) {');
lines.push(' json.writeValue(elem);');
lines.push(' }');
lines.push(' json.writeArrayEnd();');
lines.push(' }');
lines.push(' }');
lines.push(' json.writeArrayEnd();');
lines.push(' }');
lines.push('');
// Special type helpers
lines.push(' // Helper methods for special types');
lines.push(' private function writeColor(obj:Dynamic):Void {');
lines.push(' if (obj == null) {');
lines.push(' json.writeNull();');
lines.push(' return;');
lines.push(' }');
lines.push(' json.writeObjectStart();');
lines.push(' json.writeName("r");');
lines.push(' json.writeValue(Reflect.field(obj, "r"));');
lines.push(' json.writeName("g");');
lines.push(' json.writeValue(Reflect.field(obj, "g"));');
lines.push(' json.writeName("b");');
lines.push(' json.writeValue(Reflect.field(obj, "b"));');
lines.push(' json.writeName("a");');
lines.push(' json.writeValue(Reflect.field(obj, "a"));');
lines.push(' json.writeObjectEnd();');
lines.push(' }');
lines.push('');
lines.push(' private function writeTextureRegion(obj:Dynamic):Void {');
lines.push(' json.writeValue("<TextureRegion>");');
lines.push(' }');
lines.push('');
lines.push(' private function writeIntArray(obj:Dynamic):Void {');
lines.push(' if (obj == null) {');
lines.push(' json.writeNull();');
lines.push(' return;');
lines.push(' }');
lines.push(' // IntArray in Java might be a custom type, try to get its array data');
lines.push(' var items = getPropertyValue(obj, "getItems()");');
lines.push(' if (items != null) {');
lines.push(' writeArray(items, {elementKind: "primitive"});');
lines.push(' } else {');
lines.push(' // Fallback: assume it\'s already an array');
lines.push(' writeArray(obj, {elementKind: "primitive"});');
lines.push(' }');
lines.push(' }');
lines.push('');
lines.push(' private function writeFloatArray(obj:Dynamic):Void {');
lines.push(' if (obj == null) {');
lines.push(' json.writeNull();');
lines.push(' return;');
lines.push(' }');
lines.push(' // FloatArray in Java might be a custom type, try to get its array data');
lines.push(' var items = getPropertyValue(obj, "getItems()");');
lines.push(' if (items != null) {');
lines.push(' writeArray(items, {elementKind: "primitive"});');
lines.push(' } else {');
lines.push(' // Fallback: assume it\'s already an array');
lines.push(' writeArray(obj, {elementKind: "primitive"});');
lines.push(' }');
lines.push(' }');
lines.push('}');
return lines.join('\n');
}
async function validateGeneratedHaxeCode(haxeCode: string, outputPath: string): Promise<void> {
async function validateGeneratedHaxeCode (haxeCode: string, outputPath: string): Promise<void> {
// Write code to file
fs.writeFileSync(outputPath, haxeCode);
try {
// Basic syntax validation by attempting to parse with Haxe compiler
// Use JsonWriter.hx as main to avoid framework dependencies
// Basic syntax validation
execSync('haxe -cp spine-haxe --no-output -main spine.utils.JsonWriter', {
cwd: path.resolve(__dirname, '../../spine-haxe'),
stdio: 'pipe'
@ -340,14 +390,12 @@ async function validateGeneratedHaxeCode(haxeCode: string, outputPath: string):
console.log('✓ Generated Haxe serializer syntax validates successfully');
} catch (error: any) {
// Don't fail immediately - the serializer might still work despite validation issues
// This is because the Haxe runtime has optional dependencies for different frameworks
console.log('⚠ Haxe serializer validation had issues (may still work):');
console.log(error.message.split('\n').slice(0, 3).join('\n'));
}
}
async function main(): Promise<void> {
async function main (): Promise<void> {
try {
// Read the IR file
const irFile = path.resolve(__dirname, '../output/serializer-ir.json');
@ -358,8 +406,8 @@ async function main(): Promise<void> {
const ir: SerializerIR = JSON.parse(fs.readFileSync(irFile, 'utf8'));
// Generate Haxe serializer from IR
const haxeCode = generateHaxeFromIR(ir);
// Generate Haxe serializer using reflection-based approach
const haxeCode = generateReflectionBasedHaxe(ir);
// Write the Haxe file
const haxeFile = path.resolve(
@ -369,13 +417,13 @@ async function main(): Promise<void> {
fs.mkdirSync(path.dirname(haxeFile), { recursive: true });
// Validate generated code compiles before writing
// Validate generated code
await validateGeneratedHaxeCode(haxeCode, haxeFile);
console.log(`Generated Haxe serializer from IR: ${haxeFile}`);
console.log(`Generated reflection-based Haxe serializer: ${haxeFile}`);
console.log(`- ${ir.publicMethods.length} public methods`);
console.log(`- ${ir.writeMethods.length} write methods`);
console.log(`- ${Object.keys(ir.enumMappings).length} enum mappings`);
console.log(`- Uses runtime reflection for property access`);
} catch (error: any) {
console.error('Error:', error.message);
@ -389,4 +437,4 @@ if (import.meta.url === `file://${process.argv[1]}`) {
main();
}
export { generateHaxeFromIR };
export { generateReflectionBasedHaxe };

View File

@ -13,7 +13,7 @@ interface TypeLocation {
line: number;
}
function findTypeInJava(typeName: string): TypeLocation | null {
function findTypeInJava (typeName: string): TypeLocation | null {
try {
// Search for class, interface, or enum definitions
const result = execSync(
@ -34,7 +34,7 @@ function findTypeInJava(typeName: string): TypeLocation | null {
return null;
}
function findTypeInHaxe(typeName: string): TypeLocation | null {
function findTypeInHaxe (typeName: string): TypeLocation | null {
try {
// Search for class, interface, enum, typedef, or abstract definitions
const result = execSync(
@ -55,7 +55,7 @@ function findTypeInHaxe(typeName: string): TypeLocation | null {
return null;
}
async function main(): Promise<void> {
async function main (): Promise<void> {
// Read the IR file
const irFile = path.resolve(__dirname, '../output/serializer-ir.json');
if (!fs.existsSync(irFile)) {