Created
September 2, 2025 04:00
-
-
Save jamonholmgren/6d902029ec5081a38d1423011cc7dbd5 to your computer and use it in GitHub Desktop.
A Godot 4.4+ sync system. *Tips hat* This represents probably 50+ hours of hard work; I'm abandoning it, because I think simpler RPCs are probably better. But it still has a place in my heart.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # This class should be added as an autoload called "Sync". | |
| # This is an efficient way to sync data to other players. | |
| # | |
| # Usage in a node: | |
| # func _ready(): | |
| # Sync.enable(self) | |
| # | |
| # func on_sync(p: Sync.SyncPack) -> void: | |
| # # All data is always collected | |
| # global_transform = p.sync(global_transform) | |
| # linear_velocity = p.sync(linear_velocity) | |
| # _some = p.sync(_some) | |
| # _other = p.sync_slow(_other) | |
| # _heavy = p.sync_slow(_heavy, 3) # 3x slower than normal syncs | |
| # | |
| # # Optional: implement should_sync(pid: int) -> bool to control who receives updates | |
| # | |
| # # This is called when players join the game to sync props to them | |
| # func on_props_requested() -> void: | |
| # # Sync all props to all players | |
| # Sync.sync_prop(self, &"some_prop") | |
| # Sync.sync_prop(self, &"some_other_prop") | |
| # ... | |
| class_name JamminSync extends Node | |
| # --- Config --- | |
| const TICK_HZ: int = 30 | |
| const MTU_BYTES: int = 1200 | |
| # --- Types --- | |
| const NULL: int = 0 | |
| const STRING: int = 1 | |
| const FLOAT: int = 3 | |
| const INT: int = 5 | |
| const VECTOR2: int = 6 | |
| const VECTOR3: int = 7 | |
| const VECTOR4: int = 8 | |
| const BOOL: int = 9 | |
| const BITMAP: int = 10 | |
| const NODE: int = 11 | |
| const NODE_PATH: int = 11 | |
| const ARRAY_INT_32: int = 12 | |
| const ARRAY_INT_64: int = 13 | |
| const ARRAY_FLOAT_32: int = 14 | |
| const ARRAY_FLOAT_64: int = 15 | |
| const ARRAY_VECTOR2: int = 16 | |
| const ARRAY_VECTOR3: int = 17 | |
| const ARRAY_VECTOR4: int = 18 | |
| # --- Unreliable state --- | |
| var _entries: Array[Entry] = [] | |
| var _next_us: int = 0 | |
| var _seq: int = 0 | |
| var _cursor: int = 0 | |
| var _last_seq_from: Dictionary[int, int] = {} # player_id -> last seq | |
| # --- Reliable state --- | |
| var _updated_props: Dictionary[NodePath, Dictionary] = {} | |
| var _prop_last_update_tick: Dictionary[NodePath, int] = {} | |
| # --- Public API --- | |
| # Enable automatic syncing of data to other players. | |
| func enable(node: Node) -> void: _enable(node) | |
| # Disable automatic syncing of data to other players. | |
| func disable(node: Node) -> void: _disable(node) | |
| func reset() -> void: | |
| _entries.clear() | |
| _next_us = 0 | |
| _seq = 0 | |
| _cursor = 0 | |
| _last_seq_from.clear() | |
| _prop_last_update_tick.clear() | |
| _updated_props.clear() | |
| # Refresh the syncing of a node. | |
| # Useful if you've moved it to a new location, since . | |
| func refresh(node: Node) -> void: disable(node); enable(node) | |
| # Determines if and when a node should be synced to a player. | |
| # You can also provide a custom should_sync method on the node, | |
| # and this will call that and return the result instead. | |
| func should_sync(node: Node3D, pid: int) -> StringName: return _should_sync(node, pid) | |
| # Sync.set_prop(node, &"some_prop", 123) | |
| # | |
| # Sets a prop on a node and reliably syncs it to all other clients. | |
| # Use this instead of setting the prop directly to make | |
| # sure it's always synced to all clients. | |
| # Does NOT respect the authority of the node; do your own checks | |
| # with Sync.owns(node) if you need to make sure it's the authority. | |
| func set_prop(node: Node, prop: StringName, value: Variant) -> void: | |
| # sets on the node locally first, immediately | |
| if str(prop).begins_with("meta_"): node.set_meta.call_deferred(prop, value) | |
| elif str(prop).begins_with("set_"): node.call_deferred(prop, value) | |
| else: node.set.call_deferred(prop, value) | |
| _sync_reliable(node, { prop: value }) | |
| # Sync.sync_prop(node, &"some_prop") | |
| # Convenience method to sync a prop without having to set it. | |
| func sync_prop(node: Node, prop: StringName) -> void: | |
| # only sync props if we own the node! | |
| if not owns(node): return | |
| var val: Variant = node.get(prop) | |
| _sync_reliable(node, { prop: val }) | |
| # Can be called locally, or requested remotely | |
| @rpc("any_peer", "reliable", "call_remote", 901) | |
| func sync_all_node_state() -> void: | |
| var data_to_send: Dictionary[NodePath, Variant] = {} | |
| var bad_nodes: Array[NodePath] = [] | |
| for prop_path: NodePath in _prop_last_update_tick: | |
| var node_path: NodePath = NodePath(prop_path.get_concatenated_names()) | |
| var node: Node = get_node_or_null(node_path) | |
| if Pool.bad_node(node): bad_nodes.append(prop_path); continue | |
| # Only sync if we own the node! | |
| if not node.is_multiplayer_authority(): continue | |
| var property_name: StringName = prop_path.get_concatenated_subnames() | |
| var val: Variant = node.get(property_name) | |
| if val is Node: | |
| var n: Node = val | |
| assert(n.is_inside_tree(), "Sync: Node %s is not inside tree" % n.name) | |
| val = n.get_path() # send a path instead of a node | |
| data_to_send[prop_path] = val | |
| # Now send the data to all players | |
| _receive_all_node_state.rpc(data_to_send) | |
| # Cleanup bad nodes | |
| for prop_path: NodePath in bad_nodes: _prop_last_update_tick.erase(prop_path) | |
| # For metrics, figure out how much memory it took to sync | |
| var memory_used: int = 0 | |
| for prop_path: NodePath in data_to_send: | |
| memory_used += Log.approx_memory_used(data_to_send[prop_path]) | |
| Log.info("Sync", str(data_to_send)) | |
| print("Sync %s: Synced %s bytes of data to all players" % [Engine.get_process_frames(), memory_used]) | |
| @rpc("any_peer", "reliable", "call_remote", 901) | |
| func _receive_all_node_state(data_to_send: Dictionary[NodePath, Variant]) -> void: | |
| for prop_path: NodePath in data_to_send: | |
| var node_path: NodePath = NodePath(prop_path.get_concatenated_names()) | |
| var node: Node = get_node_or_null(node_path) | |
| if not node: continue # we don't have this node (yet?) | |
| var val: Variant = data_to_send[prop_path] | |
| if val is NodePath: | |
| var val_node_path: NodePath = val | |
| var n: Node = get_node_or_null(val_node_path) | |
| if not n: continue # can't update this property; the node doesn't exist | |
| val = n | |
| var property_name: StringName = prop_path.get_concatenated_subnames() | |
| node.set(property_name, val) | |
| func owns(node: Node) -> bool: | |
| return node.is_inside_tree() and node.is_multiplayer_authority() | |
| # --- Lifecycle --- | |
| func _ready() -> void: | |
| _next_us = Time.get_ticks_usec() + int(1_000_000 / float(TICK_HZ)) | |
| # Sync everything to everyone when the game starts or resumes | |
| Game.started.connect(sync_all_node_state) | |
| Game.resumed.connect(sync_all_node_state) | |
| func _process(_delta: float) -> void: | |
| if not Game.is_running(): return # no sync while paused/not started | |
| var now: int = Time.get_ticks_usec() | |
| if now < _next_us: return | |
| _next_us += int(1_000_000 / float(TICK_HZ)) | |
| _tick() | |
| # --- Tick: build chunks and send --- | |
| func _tick() -> void: | |
| _seq += 1 | |
| if not _updated_props.is_empty(): _sync_reliable_entries() | |
| if _entries.is_empty(): return | |
| var sync_mode: StringName = Options.get_option(&"multiplayer_sync_mode", &"Smart") | |
| # Send to each peer | |
| assert(Log.perf_start(&"Sync.send_to_peers")) | |
| if sync_mode == &"Smart": | |
| for pid: int in Lobby.player_ids(): _sync_to_peer(pid) | |
| else: | |
| _send_to_all_peers_chunked() | |
| assert(Log.perf_stop(&"Sync.send_to_peers")) | |
| func _enable(node: Node) -> void: | |
| if node.is_in_group(&"multiplayer_syncing"): return | |
| if _entries.find_custom(func(e: Entry) -> bool: return e.node == node) != -1: return | |
| if node.has_method(&"on_sync"): | |
| var e: Entry = Entry.new() | |
| e.node = node | |
| e.node3D = _get_node3D_parent(node) | |
| _entries.append(e) | |
| if not node.tree_exiting.is_connected(_disable): | |
| node.tree_exiting.connect(_disable.bind(node)) | |
| node.add_to_group(&"multiplayer_syncing") | |
| func _disable(node: Node) -> void: | |
| node.remove_from_group(&"multiplayer_syncing") | |
| var e: Entry = _find_entry_by_node(node) | |
| if e: _entries.erase(e) | |
| func _collect_sync_data(e: Entry, sync_method: StringName) -> SyncPack: | |
| var pack: SyncPack = SyncPack.new(SyncPack.Mode.WRITE) | |
| var node: Node = get_node_or_null(e.get_path()) | |
| if not node or not is_instance_valid(node) or node.is_queued_for_deletion(): return pack | |
| if not owns(node): return pack | |
| if not node.has_method(sync_method): | |
| push_warning("Sync: Node %s has no %s method" % [node.name, sync_method]) | |
| return pack | |
| node.call(sync_method, pack) | |
| return pack | |
| func _sync_to_peer(pid: int) -> void: | |
| if pid == Lobby.id(): return | |
| var start: int = _cursor | |
| var budget: int = MTU_BYTES | |
| var ids: PackedInt64Array = PackedInt64Array() | |
| var offs: PackedInt32Array = PackedInt32Array() | |
| var vals: PackedFloat32Array = PackedFloat32Array() | |
| var cursor: int = 0 | |
| var total_data_size: int = 0 | |
| var nodes_checked: int = 0 | |
| var nodes_skipped: int = 0 | |
| var nodes_synced: int = 0 | |
| var skipped_data_get: int = 0 | |
| var loop_count: int = 0 | |
| while budget > 128 and _entries.size() > 0 and _cursor < _entries.size(): | |
| loop_count += 1 | |
| var e: Entry = _entries.get(_cursor) if _cursor < _entries.size() else null | |
| if not e: break | |
| _cursor = (_cursor + 1) % _entries.size() | |
| var node: Node = get_node_or_null(e.get_path()) | |
| nodes_checked += 1 | |
| if not node or not is_instance_valid(node) or node.is_queued_for_deletion(): continue | |
| # Check if this node's data should be synced to this player | |
| var t: StringName = _sync_type(e, node, pid) | |
| if t == &"none": | |
| nodes_skipped += 1 | |
| if _cursor == start: break | |
| continue | |
| # Collect sync data if we haven't yet | |
| if e.last_gather_tick != Game.tick: | |
| var pack: SyncPack = _collect_sync_data(e, &"on_sync") | |
| e.last_gather_tick = Game.tick | |
| e.sync_data = pack.values | |
| e.slow_index = pack.slow_index | |
| total_data_size += pack.values.size() * 4 # per float | |
| else: | |
| skipped_data_get += 1 | |
| var count: int = e.sync_data.size() | |
| var add_bytes: int = 8 + 4 * count | |
| if add_bytes > budget and ids.size() > 0: | |
| _send_chunk_to(pid, ids, offs, vals, _seq) | |
| budget = MTU_BYTES | |
| ids = PackedInt64Array() | |
| offs = PackedInt32Array() | |
| vals = PackedFloat32Array() | |
| cursor = 0 | |
| _cursor = (_cursor - 1 + _entries.size()) % _entries.size() | |
| continue | |
| nodes_synced += 1 | |
| ids.push_back(e.id) | |
| offs.push_back(cursor) | |
| vals.append_array(e.get_data(t)) | |
| cursor += count | |
| budget -= add_bytes | |
| if _cursor == start: break | |
| if ids.size() > 0: | |
| _send_chunk_to(pid, ids, offs, vals, _seq) | |
| # Just making sure we don't get warnings about these unused variables. | |
| # I plan to use them to monitor the sync system's performance. | |
| if total_data_size + nodes_checked + nodes_skipped + nodes_synced + skipped_data_get + loop_count > 0: | |
| pass | |
| func _send_to_all_peers_chunked() -> void: | |
| var chunk_budget: int = 1000 # Bytes per chunk | |
| var chunk_paths: PackedStringArray = PackedStringArray() | |
| var chunk_offs: PackedInt32Array = PackedInt32Array() | |
| var chunk_vals: PackedFloat32Array = PackedFloat32Array() | |
| var cursor: int = 0 | |
| var pack: SyncPack = SyncPack.new(SyncPack.Mode.WRITE) | |
| for e: Entry in _entries: | |
| if not e.node.has_method(&"on_sync"): continue | |
| pack = _collect_sync_data(e, &"on_sync") | |
| var data_size: int = pack.values.size() * 4 # 4 bytes per float | |
| # If adding this entry would exceed the budget, send current chunk | |
| if chunk_vals.size() > 0 and cursor + data_size > chunk_budget: | |
| recv_chunk.rpc(chunk_paths, chunk_offs, chunk_vals, _seq) | |
| # Reset for next chunk | |
| chunk_paths.clear() | |
| chunk_offs.clear() | |
| chunk_vals.clear() | |
| cursor = 0 | |
| # Add entry to current chunk | |
| chunk_paths.push_back(e.node.get_path()) | |
| chunk_offs.push_back(cursor) | |
| chunk_vals.append_array(pack.values) | |
| cursor += pack.values.size() | |
| pack.reset() | |
| # Send final chunk if it has data | |
| if chunk_paths.size() > 0: | |
| recv_chunk.rpc(chunk_paths, chunk_offs, chunk_vals, _seq) | |
| var _sent_chunks: int = 0 | |
| var _last_sent_chunk_time: int = 0 | |
| func _send_chunk_to(pid: int, ids: PackedInt64Array, offs: PackedInt32Array, vals: PackedFloat32Array, seq: int) -> void: | |
| if Game.tick != _last_sent_chunk_time: | |
| _last_sent_chunk_time = Game.tick | |
| _sent_chunks = 0 | |
| recv_chunk.rpc_id(pid, ids, offs, vals, seq) | |
| _sent_chunks += 1 | |
| # Sends all updated reliable entries to all clients. | |
| func _sync_reliable_entries() -> void: | |
| if Lobby.player_ids().size() <= 0: return | |
| # Using efficient PackedArray formats where possible | |
| var node_paths: PackedStringArray = PackedStringArray() | |
| var prop_names: PackedStringArray = PackedStringArray() | |
| var value_types: PackedByteArray = PackedByteArray() | |
| var string_values: PackedStringArray = PackedStringArray() | |
| var float_values: PackedFloat64Array = PackedFloat64Array() | |
| var int_values: PackedInt64Array = PackedInt64Array() | |
| var vector2_values: PackedVector2Array = PackedVector2Array() | |
| var vector3_values: PackedVector3Array = PackedVector3Array() | |
| var vector4_values: PackedVector4Array = PackedVector4Array() | |
| var bool_values: PackedByteArray = PackedByteArray() | |
| var bitmap_values: Array[PackedByteArray] = [] | |
| var node_path_values: PackedStringArray = PackedStringArray() | |
| var array_int_32_values: Array[PackedInt32Array] = [] | |
| var array_int_64_values: Array[PackedInt64Array] = [] | |
| var array_float_32_values: Array[PackedFloat32Array] = [] | |
| var array_float_64_values: Array[PackedFloat64Array] = [] | |
| var array_vector2_values: Array[PackedVector2Array] = [] | |
| var array_vector3_values: Array[PackedVector3Array] = [] | |
| var array_vector4_values: Array[PackedVector4Array] = [] | |
| # Process each node's properties | |
| for npath: NodePath in _updated_props: | |
| var props_dict: Dictionary = _updated_props[npath] | |
| # Add each property for this node | |
| for prop: StringName in props_dict: | |
| var value: Variant = props_dict[prop] | |
| node_paths.append(npath) # Add node ID for this property | |
| prop_names.append(String(prop)) # Convert StringName to String for PackedStringArray | |
| # Add value to appropriate typed array and track its type | |
| if value == null: | |
| value_types.append(NULL) | |
| # Don't need to store the value; we know it's null | |
| elif value is String: | |
| value_types.append(STRING) | |
| var v: String = value | |
| string_values.append(v) | |
| elif value is float: | |
| value_types.append(FLOAT) | |
| var v: float = value | |
| float_values.append(v) | |
| elif value is int: | |
| value_types.append(INT) | |
| var v: int = value | |
| int_values.append(v) | |
| elif value is Vector2: | |
| value_types.append(VECTOR2) | |
| var v: Vector2 = value | |
| vector2_values.append(v) | |
| elif value is Vector3: | |
| value_types.append(VECTOR3) | |
| var v: Vector3 = value | |
| vector3_values.append(v) | |
| elif value is Vector4: | |
| value_types.append(VECTOR4) | |
| var v: Vector4 = value | |
| vector4_values.append(v) | |
| elif value is bool: | |
| value_types.append(BOOL) | |
| var v: int = 1 if value else 0 | |
| bool_values.append(v) | |
| elif value is BitMap: | |
| value_types.append(BITMAP) | |
| var v: BitMap = value | |
| var dat: PackedByteArray = v.data["data"] | |
| bitmap_values.append(dat) | |
| elif value is Node: | |
| value_types.append(NODE) | |
| var n: Node = value | |
| node_path_values.append(n.get_path()) | |
| elif value is NodePath: | |
| value_types.append(NODE_PATH) | |
| var v: NodePath = value | |
| node_path_values.append(v) | |
| elif value is PackedInt32Array: | |
| value_types.append(ARRAY_INT_32) | |
| var v: PackedInt32Array = value | |
| array_int_32_values.append(v) | |
| elif value is PackedInt64Array: | |
| value_types.append(ARRAY_INT_64) | |
| var v: PackedInt64Array = value | |
| array_int_64_values.append(v) | |
| elif value is PackedFloat32Array: | |
| value_types.append(ARRAY_FLOAT_32) | |
| var v: PackedFloat32Array = value | |
| array_float_32_values.append(v) | |
| elif value is PackedFloat64Array: | |
| value_types.append(ARRAY_FLOAT_64) | |
| var v: PackedFloat64Array = value | |
| array_float_64_values.append(v) | |
| elif value is PackedVector2Array: | |
| value_types.append(ARRAY_VECTOR2) | |
| var v: PackedVector2Array = value | |
| array_vector2_values.append(v) | |
| elif value is PackedVector3Array: | |
| value_types.append(ARRAY_VECTOR3) | |
| var v: PackedVector3Array = value | |
| array_vector3_values.append(v) | |
| elif value is PackedVector4Array: | |
| value_types.append(ARRAY_VECTOR4) | |
| var v: PackedVector4Array = value | |
| array_vector4_values.append(v) | |
| else: | |
| # assert false on unsupported types | |
| assert(false, "Sync: Unsupported type for %s:%s = %s" % [npath, prop, value]) | |
| continue | |
| # Record that we've updated this property recently | |
| _prop_last_update_tick[NodePath(str(npath) + "/" + prop)] = Game.tick | |
| # Validate that all arrays have the same length | |
| var total_properties: int = node_paths.size() | |
| assert(total_properties == prop_names.size(), "Property count mismatch: %d vs %d" % [total_properties, prop_names.size()]) | |
| assert(total_properties == value_types.size(), "Type count mismatch: %d vs %d" % [total_properties, value_types.size()]) | |
| # Send the efficient PackedArray format | |
| _receive_reliable.rpc( | |
| node_paths, | |
| prop_names, | |
| value_types, | |
| string_values, | |
| float_values, | |
| int_values, | |
| vector2_values, | |
| vector3_values, | |
| vector4_values, | |
| bool_values, | |
| bitmap_values, | |
| node_path_values, | |
| array_int_32_values, | |
| array_int_64_values, | |
| array_float_32_values, | |
| array_float_64_values, | |
| array_vector2_values, | |
| array_vector3_values, | |
| array_vector4_values | |
| ) | |
| _updated_props.clear() | |
| @rpc("any_peer", "reliable", "call_remote", 805) | |
| func _receive_reliable( | |
| node_paths: PackedStringArray, | |
| prop_names: PackedStringArray, | |
| value_types: PackedByteArray, | |
| string_values: PackedStringArray, | |
| float_values: PackedFloat64Array, | |
| int_values: PackedInt64Array, | |
| vector2_values: PackedVector2Array, | |
| vector3_values: PackedVector3Array, | |
| vector4_values: PackedVector4Array, | |
| bool_values: PackedByteArray, | |
| bitmap_values: Array[PackedByteArray], | |
| node_path_values: PackedStringArray, | |
| array_int_32_values: Array[PackedInt32Array], | |
| array_int_64_values: Array[PackedInt64Array], | |
| array_float_32_values: Array[PackedFloat32Array], | |
| array_float_64_values: Array[PackedFloat64Array], | |
| array_vector2_values: Array[PackedVector2Array], | |
| array_vector3_values: Array[PackedVector3Array], | |
| array_vector4_values: Array[PackedVector4Array] | |
| ) -> void: | |
| # Validate that all arrays have the expected lengths | |
| var total_properties: int = node_paths.size() | |
| assert(total_properties == prop_names.size(), "Property count mismatch: %d vs %d" % [total_properties, prop_names.size()]) | |
| assert(total_properties == value_types.size(), "Type count mismatch: %d vs %d" % [total_properties, value_types.size()]) | |
| # Track current position in each value array | |
| var string_index: int = 0 | |
| var float_index: int = 0 | |
| var int_64_index: int = 0 | |
| var vector2_index: int = 0 | |
| var vector3_index: int = 0 | |
| var vector4_index: int = 0 | |
| var bool_index: int = 0 | |
| var bitmap_index: int = 0 | |
| var node_index: int = 0 | |
| var node_path_index: int = 0 | |
| var array_int_32_index: int = 0 | |
| var array_int_64_index: int = 0 | |
| var array_float_32_index: int = 0 | |
| var array_float_64_index: int = 0 | |
| var array_vector2_index: int = 0 | |
| var array_vector3_index: int = 0 | |
| var array_vector4_index: int = 0 | |
| # Process each property update | |
| for i: int in total_properties: | |
| var node_path: String = node_paths[i] # Get the node path for this property | |
| if not has_node(node_path): continue # can't find it? | |
| var node: Node = get_node(node_path) | |
| var prop_name: String = prop_names[i] # Get the property name | |
| var prop_string_name: StringName = StringName(prop_name) # Convert back to StringName | |
| var value_type: int = value_types[i] # Get the type of this value | |
| var value: Variant = null | |
| # Read value from the appropriate array based on its type | |
| match value_type: | |
| NULL: | |
| value = null | |
| STRING: | |
| assert(string_index < string_values.size(), "String value index out of bounds") | |
| value = string_values[string_index] | |
| string_index += 1 | |
| FLOAT: | |
| assert(float_index < float_values.size(), "Float value index out of bounds") | |
| value = float_values[float_index] | |
| float_index += 1 | |
| INT: | |
| assert(int_64_index < int_values.size(), "Int64 value index out of bounds") | |
| value = int_values[int_64_index] | |
| int_64_index += 1 | |
| VECTOR2: | |
| assert(vector2_index < vector2_values.size(), "Vector2 value index out of bounds") | |
| value = vector2_values[vector2_index] | |
| vector2_index += 1 | |
| VECTOR3: | |
| assert(vector3_index < vector3_values.size(), "Vector3 value index out of bounds") | |
| value = vector3_values[vector3_index] | |
| vector3_index += 1 | |
| VECTOR4: | |
| assert(vector4_index < vector4_values.size(), "Vector4 value index out of bounds") | |
| value = vector4_values[vector4_index] | |
| vector4_index += 1 | |
| BOOL: | |
| assert(bool_index < bool_values.size(), "Bool value index out of bounds") | |
| value = bool_values[bool_index] != 0 # Convert int back to bool | |
| bool_index += 1 | |
| BITMAP: | |
| assert(bitmap_index < bitmap_values.size(), "Bitmap value index out of bounds") | |
| var dat: PackedByteArray = bitmap_values[bitmap_index] | |
| value = BitMap.new() | |
| value.data["data"] = dat | |
| bitmap_index += 1 | |
| NODE: | |
| assert(node_index < node_path_values.size(), "Node value index out of bounds") | |
| value = node_path_values[node_index] | |
| node_index += 1 | |
| NODE_PATH: | |
| assert(node_path_index < node_path_values.size(), "NodePath value index out of bounds") | |
| value = node_path_values[node_path_index] | |
| node_path_index += 1 | |
| ARRAY_INT_32: | |
| assert(array_int_32_index < array_int_32_values.size(), "ArrayInt32 value index out of bounds") | |
| value = array_int_32_values[array_int_32_index] | |
| array_int_32_index += 1 | |
| ARRAY_INT_64: | |
| assert(array_int_64_index < array_int_64_values.size(), "ArrayInt64 value index out of bounds") | |
| value = array_int_64_values[array_int_64_index] | |
| array_int_64_index += 1 | |
| ARRAY_FLOAT_32: | |
| assert(array_float_32_index < array_float_32_values.size(), "ArrayFloat32 value index out of bounds") | |
| value = array_float_32_values[array_float_32_index] | |
| array_float_32_index += 1 | |
| ARRAY_FLOAT_64: | |
| assert(array_float_64_index < array_float_64_values.size(), "ArrayFloat64 value index out of bounds") | |
| value = array_float_64_values[array_float_64_index] | |
| array_float_32_index += 1 | |
| ARRAY_VECTOR2: | |
| assert(array_vector2_index < array_vector2_values.size(), "ArrayVector2 value index out of bounds") | |
| value = array_vector2_values[array_vector2_index] | |
| array_vector2_index += 1 | |
| ARRAY_VECTOR3: | |
| assert(array_vector3_index < array_vector3_values.size(), "ArrayVector3 value index out of bounds") | |
| value = array_vector3_values[array_vector3_index] | |
| array_vector3_index += 1 | |
| ARRAY_VECTOR4: | |
| assert(array_vector4_index < array_vector4_values.size(), "ArrayVector4 value index out of bounds") | |
| value = array_vector4_values[array_vector4_index] | |
| array_vector4_index += 1 | |
| _: | |
| push_error("Sync: Unknown value type %d for property %s" % [value_type, prop_name]) | |
| continue | |
| # Apply the property update to the node | |
| if prop_name.begins_with("set_"): | |
| # setter | |
| node.call_deferred(prop_string_name, value) | |
| else: | |
| node.set.call_deferred(prop_string_name, value) | |
| # --- Receive & apply --- | |
| @rpc("any_peer", "unreliable", "call_remote", 803) | |
| func recv_chunk(paths: PackedStringArray, offs: PackedInt32Array, vals: PackedFloat32Array, seq: int) -> void: | |
| var peer: int = multiplayer.get_remote_sender_id() | |
| @warning_ignore("unsafe_call_argument") | |
| var last: int = int(_last_seq_from.get(peer, 0)) | |
| if seq < last: return | |
| @warning_ignore("unsafe_call_argument") | |
| _last_seq_from[peer] = seq | |
| for i: int in paths.size(): | |
| var node: Node = get_node_or_null(paths[i]) | |
| if node == null or owns(node): continue | |
| var start: int = offs[i] | |
| var end: int = offs[i + 1] if i + 1 < offs.size() else vals.size() | |
| var slice: PackedFloat32Array = vals.slice(start, end) | |
| var pack: SyncPack = SyncPack.new(SyncPack.Mode.READ, slice) | |
| @warning_ignore("unsafe_method_access") | |
| if node.has_method(&"on_sync"): node.on_sync(pack) | |
| else: push_error("Sync: Node %s has no on_sync method" % node.name) | |
| func _sync_type(e: Entry, node: Node, pid: int) -> StringName: | |
| if pid == Lobby.id(): return &"none" # no syncing to self | |
| # non-3d nodes should provide a custom should_sync method | |
| @warning_ignore("unsafe_method_access") | |
| if node.has_method("should_sync"): return node.should_sync(pid) | |
| assert(e.node3D is Node3D, "Either provide a should_sync method, or make sure the node is a Node3D or has a Node3D parent for distance-based sync.") | |
| var t: StringName = _should_sync(e.node3D, pid) | |
| assert([&"full", &"normal", &"none"].has(t), "Invalid sync type: %s (correct types are: full, normal, none)" % t) | |
| return t | |
| func _should_sync(node: Node3D, pid: int) -> StringName: | |
| var distance: float = Game.player_pos(pid).distance_to(node.global_position) | |
| # crewmate (within 50m) -- probably a crewmate, sync always | |
| if distance < 50.0: return _if_tick(1) | |
| # very close (within 300m) gets 30 fps updates | |
| if distance < 300.0: return _if_tick(2) | |
| # close (within 1500m) gets 10 fps updates | |
| if distance < 1500.0: return _if_tick(6) | |
| # within visual range (8000m) gets 4 fps updates | |
| if distance < 8000.0: return _if_tick(30) | |
| # everyone else gets at least one update every 4 seconds | |
| return _if_tick(240) | |
| func _if_tick(mod: int) -> StringName: | |
| if Game.tick % mod * 2 == 0: return &"full" | |
| if Game.tick % mod == 0: return &"normal" | |
| return &"none" | |
| func _get_node3D_parent(node: Node) -> Node3D: | |
| var parent: Node = node | |
| while parent: | |
| if parent is Node3D: return parent | |
| parent = parent.get_parent() | |
| return null | |
| func _sync_reliable(node: Node, props: Dictionary[StringName, Variant]) -> void: | |
| var npath: NodePath = node.get_path() | |
| if not _updated_props.has(npath): _updated_props[npath] = {} | |
| _updated_props[npath].merge(props, true) | |
| func _find_entry_by_node(node: Node) -> Entry: | |
| for e: Entry in _entries: if e.node == node: return e | |
| return null | |
| # --- Internal types --- | |
| class Entry: | |
| var node: Node # original node | |
| var node3D: Node3D # either this or a parent (or null) | |
| var id: int | |
| var last_gather_tick: int = 0 | |
| var slow_index: int = 0 | |
| var sync_data: PackedFloat32Array = PackedFloat32Array() | |
| func get_path() -> NodePath: return node.get_path() | |
| func get_data(t: StringName) -> PackedFloat32Array: | |
| if t == &"full": return sync_data | |
| if t == &"normal" and slow_index > 0: return sync_data.slice(0, slow_index) | |
| return sync_data | |
| # --- SyncPack --- | |
| # Helps you sync unreliable data to other players. | |
| # You don't instantiate it directly; instead, you | |
| # provide an `on_sync` method in your node and | |
| # we'll pass in a SyncPack object. | |
| # | |
| # Usage: | |
| # func on_sync(pack: SyncPack) -> void: | |
| # global_transform = pack.sync(global_transform) | |
| # linear_velocity = pack.sync(linear_velocity, 0.2) # lerps at 0.2x speed | |
| # some_other_value = pack.sync_slow(some_other_value) | |
| # some_other_value = pack.sync_slow(some_other_value, 0.2) | |
| class SyncPack: | |
| enum Mode { WRITE, READ } | |
| var mode: int | |
| var values: PackedFloat32Array = PackedFloat32Array() | |
| var _r: int = 0 | |
| var slow_index: int = 0 | |
| func _init(m: int, incoming: Array = []) -> void: | |
| mode = m | |
| values = PackedFloat32Array(incoming) | |
| func reset() -> void: | |
| values.clear() | |
| _r = 0 | |
| slow_index = 0 | |
| @warning_ignore_start("unsafe_cast") | |
| # Use specific sync methods instead for type safety | |
| func sync(v: Variant, lrp: float = 1.0) -> Variant: | |
| assert(slow_index == 0, "sync_slow() called before sync()...always put sync_slow last") | |
| return _sync(v, lrp) | |
| # These send slower than normal syncs | |
| func sync_slow(v: Variant, lrp: float = 1.0) -> Variant: | |
| if slow_index == 0: slow_index = _r | |
| return _sync(v, lrp) | |
| func _sync(v: Variant, lrp: float = 1.0) -> Variant: | |
| if v is float: return _sync_float(v as float, lrp) | |
| if v is Vector2: return _sync_vector2(v as Vector2, lrp) | |
| if v is Vector3: return _sync_vector3(v as Vector3, lrp) | |
| if v is Quaternion: return _sync_quaternion(v as Quaternion, lrp) | |
| if v is Basis: return _sync_basis(v as Basis, lrp) | |
| if v is Transform3D: return _sync_transform3d(v as Transform3D, lrp) | |
| if v is PackedFloat32Array: return _sync_array(v as PackedFloat32Array) | |
| push_error("Sync: Unsupported type: %s" % v) | |
| # ok | |
| return v | |
| func sync_float(v: float, lrp: float = 1.0) -> float: | |
| assert(slow_index == 0, "sync_slow() called before sync()...always put sync_slow last") | |
| return _sync_float(v, lrp) | |
| func sync_float_slow(v: float, lrp: float = 1.0) -> float: | |
| if slow_index == 0: slow_index = _r | |
| return sync_float(v, lrp) | |
| func _sync_float(v: float, lrp: float = 1.0) -> float: | |
| if mode == Mode.WRITE: | |
| # Collect the data | |
| values.append(v) | |
| _r += 1 | |
| return v | |
| else: | |
| if _r >= values.size(): return v | |
| var r: float = lerpf(v, values[_r], lrp) | |
| # var r: float = values[_r] | |
| _r += 1 | |
| return r | |
| func sync_vector2(v: Vector2, lrp: float = 1.0) -> Vector2: | |
| assert(slow_index == 0, "sync_slow() called before sync()...always put sync_slow last") | |
| return _sync_vector2(v, lrp) | |
| func sync_vector2_slow(v: Vector2, lrp: float = 1.0) -> Vector2: | |
| if slow_index == 0: slow_index = _r | |
| return _sync_vector2(v, lrp) | |
| func _sync_vector2(v: Vector2, lrp: float = 1.0) -> Vector2: | |
| if mode == Mode.WRITE: | |
| values.append_array([v.x, v.y]) | |
| _r += 2 | |
| return v | |
| else: | |
| if _r >= values.size(): return v | |
| var r: Vector2 = v.lerp(Vector2(values[_r], values[_r + 1]), lrp) | |
| # var r: Vector2 = Vector2(values[_r], values[_r + 1]) | |
| _r += 2 | |
| return r | |
| func sync_vector3(v: Vector3, lrp: float = 1.0) -> Vector3: | |
| assert(slow_index == 0, "sync_slow() called before sync()...always put sync_slow last") | |
| return _sync_vector3(v, lrp) | |
| func sync_vector3_slow(v: Vector3, lrp: float = 1.0) -> Vector3: | |
| if slow_index == 0: slow_index = _r | |
| return _sync_vector3(v, lrp) | |
| func _sync_vector3(v: Vector3, lrp: float = 1.0) -> Vector3: | |
| if mode == Mode.WRITE: | |
| values.append_array([v.x, v.y, v.z]) | |
| _r += 3 | |
| return v | |
| else: | |
| if _r >= values.size(): return v | |
| var r: Vector3 = v.lerp(Vector3(values[_r], values[_r + 1], values[_r + 2]), lrp) | |
| # var r: Vector3 = Vector3(values[_r], values[_r + 1], values[_r + 2]) | |
| _r += 3 | |
| return r | |
| func sync_basis(v: Basis, lrp: float = 1.0) -> Basis: | |
| assert(slow_index == 0, "sync_slow() called before sync()...always put sync_slow last") | |
| return _sync_basis(v, lrp) | |
| func sync_basis_slow(v: Basis, lrp: float = 1.0) -> Basis: | |
| if slow_index == 0: slow_index = _r | |
| return _sync_basis(v, lrp) | |
| func _sync_basis(v: Basis, lrp: float = 1.0) -> Basis: | |
| assert(v is Basis, "Sync: sync_basis() called with non-Basis value: %s" % v) | |
| if mode == Mode.WRITE: | |
| values.append_array([v.x.x, v.x.y, v.x.z, v.y.x, v.y.y, v.y.z, v.z.x, v.z.y, v.z.z]) | |
| _r += 9 | |
| return v | |
| else: | |
| if _r >= values.size(): return v | |
| var to_basis: Basis = Basis( | |
| Vector3(values[_r], values[_r + 1], values[_r + 2]), | |
| Vector3(values[_r + 3], values[_r + 4], values[_r + 5]), | |
| Vector3(values[_r + 6], values[_r + 7], values[_r + 8]) | |
| ) | |
| var r: Basis = v.orthonormalized().slerp(to_basis.orthonormalized(), lrp) | |
| # var r: Basis = to_basis.orthonormalized() | |
| _r += 9 | |
| return r | |
| func sync_quaternion(v: Quaternion, lrp: float = 1.0) -> Quaternion: | |
| assert(slow_index == 0, "sync_slow() called before sync()...always put sync_slow last") | |
| assert(v is Quaternion, "Sync: sync_quaternion() called with non-Quaternion value: %s" % v) | |
| return _sync_quaternion(v, lrp) | |
| func sync_quaternion_slow(v: Quaternion, lrp: float = 1.0) -> Quaternion: | |
| if slow_index == 0: slow_index = _r | |
| return _sync_quaternion(v, lrp) | |
| func _sync_quaternion(v: Quaternion, lrp: float = 1.0) -> Quaternion: | |
| assert(v is Quaternion, "Sync: _sync_quaternion() called with non-Quaternion value: %s" % v) | |
| if mode == Mode.WRITE: | |
| values.append_array([v.x, v.y, v.z, v.w]) | |
| _r += 4 | |
| return v | |
| else: | |
| if _r >= values.size(): return v | |
| var r: Quaternion = Quaternion(values[_r], values[_r + 1], values[_r + 2], values[_r + 3]).slerp(v, lrp) | |
| # var r: Quaternion = Quaternion(values[_r], values[_r + 1], values[_r + 2], values[_r + 3]) | |
| _r += 4 | |
| return r | |
| func sync_transform3d(v: Transform3D, lrp: float = 1.0) -> Transform3D: | |
| assert(slow_index == 0, "sync_slow() called before sync()...always put sync_slow last") | |
| return _sync_transform3d(v, lrp) | |
| func sync_transform3d_slow(v: Transform3D, lrp: float = 1.0) -> Transform3D: | |
| if slow_index == 0: slow_index = _r | |
| return _sync_transform3d(v, lrp) | |
| func _sync_transform3d(v: Transform3D, lrp: float = 1.0) -> Transform3D: | |
| if mode == Mode.WRITE: | |
| values.append_array([v.basis.x.x, v.basis.x.y, v.basis.x.z, v.basis.y.x, v.basis.y.y, v.basis.y.z, v.basis.z.x, v.basis.z.y, v.basis.z.z, v.origin.x, v.origin.y, v.origin.z]) | |
| _r += 12 | |
| return v | |
| else: | |
| if _r >= values.size(): return v | |
| var r: Transform3D = Transform3D( | |
| Vector3(values[_r], values[_r + 1], values[_r + 2]), | |
| Vector3(values[_r + 3], values[_r + 4], values[_r + 5]), | |
| Vector3(values[_r + 6], values[_r + 7], values[_r + 8]), | |
| Vector3(values[_r + 9], values[_r + 10], values[_r + 11]) | |
| ).interpolate_with(v, lrp) | |
| _r += 12 | |
| return r | |
| func sync_array(v: PackedFloat32Array, lrp: float = 1.0) -> PackedFloat32Array: | |
| assert(slow_index == 0, "sync_slow() called before sync()...always put sync_slow last") | |
| return _sync_array(v, lrp) | |
| func sync_array_slow(v: PackedFloat32Array, lrp: float = 1.0) -> PackedFloat32Array: | |
| if slow_index == 0: slow_index = _r | |
| return _sync_array(v, lrp) | |
| func _sync_array(v: PackedFloat32Array, lrp: float = 1.0) -> PackedFloat32Array: | |
| if not is_equal_approx(lrp, 1.0): push_warning("Sync: sync_array() does not support lrp") | |
| if mode == Mode.WRITE: | |
| values.append_array(v) | |
| _r += v.size() | |
| return v | |
| else: | |
| if _r >= values.size(): return v | |
| var r: PackedFloat32Array = values.slice(_r, _r + v.size()) | |
| _r += v.size() | |
| return r | |
| @warning_ignore_restore("unsafe_cast") |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment