distributed: Revert all of the old object-sorting code.
This commit is contained in:
parent
8985cf6386
commit
8421adf550
13 changed files with 10 additions and 320 deletions
|
@ -37,7 +37,6 @@ from otp.uberdog import OtpAvatarManager
|
|||
from otp.distributed import OtpDoGlobals
|
||||
from otp.distributed.TelemetryLimiter import TelemetryLimiter
|
||||
from otp.ai.GarbageLeakServerEventAggregator import GarbageLeakServerEventAggregator
|
||||
from direct.distributed.MsgTypes import *
|
||||
|
||||
class OTPClientRepository(ClientRepositoryBase):
|
||||
notify = directNotify.newCategory('OTPClientRepository')
|
||||
|
@ -62,10 +61,6 @@ class OTPClientRepository(ClientRepositoryBase):
|
|||
|
||||
|
||||
self.__currentAvId = 0
|
||||
|
||||
self.sortedGenerates = []
|
||||
self.sortedDoIds = {}
|
||||
self.expectedInterests = []
|
||||
|
||||
|
||||
self.productName = config.GetString('product-name', 'DisneyOnline-US')
|
||||
|
@ -1801,41 +1796,21 @@ class OTPClientRepository(ClientRepositoryBase):
|
|||
currentGameStateName = 'None'
|
||||
|
||||
def gotInterestDoneMessage(self, di):
|
||||
if self.expectedInterests:
|
||||
dg = Datagram(di.getDatagram())
|
||||
tempDi = DatagramIterator(dg, di.getCurrentIndex())
|
||||
context = tempDi.getUint32()
|
||||
handle = tempDi.getUint16()
|
||||
if handle in self.expectedInterests:
|
||||
self.expectedInterests.remove(handle)
|
||||
if not self.expectedInterests:
|
||||
self.doSortedGenerate()
|
||||
if self.deferredGenerates:
|
||||
dg = Datagram(di.getDatagram())
|
||||
di = DatagramIterator(dg, di.getCurrentIndex())
|
||||
self.deferredGenerates[-1].append((CLIENT_DONE_INTEREST_RESP, (dg, di)))
|
||||
self.deferredGenerates.append((CLIENT_DONE_INTEREST_RESP, (dg, di)))
|
||||
else:
|
||||
self.handleInterestDoneMessage(di)
|
||||
|
||||
def doSortedGenerate(self):
|
||||
if not self.sortedGenerates:
|
||||
return
|
||||
for generates in self.sortedGenerates:
|
||||
for generate in generates:
|
||||
msgType, extra = generate
|
||||
self.replayDeferredGenerate(msgType, extra)
|
||||
|
||||
|
||||
def gotObjectLocationMessage(self, di):
|
||||
if self.deferredGenerates or self.sortedGenerates:
|
||||
if self.deferredGenerates:
|
||||
dg = Datagram(di.getDatagram())
|
||||
di = DatagramIterator(dg, di.getCurrentIndex())
|
||||
di2 = DatagramIterator(dg, di.getCurrentIndex())
|
||||
doId = di2.getUint32()
|
||||
if doId in self.deferredDoIds:
|
||||
self.deferredDoIds[doId][2].append((CLIENT_OBJECT_LOCATION, (dg, di)))
|
||||
elif doId in self.sortedDoIds:
|
||||
self.sortedDoIds[doId][2].append((CLIENT_OBJECT_LOCATION, (dg, di)))
|
||||
self.deferredDoIds[doId][3].append((CLIENT_OBJECT_LOCATION, (dg, di)))
|
||||
else:
|
||||
self.handleObjectLocation(di)
|
||||
else:
|
||||
|
@ -1891,7 +1866,7 @@ class OTPClientRepository(ClientRepositoryBase):
|
|||
dg, di = extra
|
||||
self.handleObjectLocation(di)
|
||||
else:
|
||||
self.o_replayDeferredGenerate(msgType, extra)
|
||||
ClientRepositoryBase.replayDeferredGenerate(self, msgType, extra)
|
||||
|
||||
@exceptionLogged(append=False)
|
||||
def handleDatagram(self, di):
|
||||
|
@ -1971,41 +1946,9 @@ class OTPClientRepository(ClientRepositoryBase):
|
|||
dclass = self.dclassesByNumber[classId]
|
||||
if self._isInvalidPlayerAvatarGenerate(doId, dclass, parentId, zoneId):
|
||||
return
|
||||
|
||||
if self.expectedInterests:
|
||||
sortOrder = getattr(dclass.getClassDef(), 'sortOrder', 0)
|
||||
if sortOrder:
|
||||
while len(self.sortedGenerates) < sortOrder+1:
|
||||
self.sortedGenerates.append([])
|
||||
self.sortedGenerates[sortOrder].append((CLIENT_ENTER_OBJECT_REQUIRED_OTHER, doId))
|
||||
dg = PyDatagram(di.getRemainingBytes())
|
||||
dg.addUint16(0)
|
||||
di = PyDatagramIterator(dg)
|
||||
self.sortedDoIds[doId] = ((parentId, zoneId, classId, doId, di), dg, [])
|
||||
return
|
||||
deferFor = getattr(dclass.getClassDef(), 'deferFor', 0)
|
||||
if not self.deferInterval or self.noDefer:
|
||||
deferrable = False
|
||||
if deferFor == 0:
|
||||
dclass.startGenerate()
|
||||
distObj = self.generateWithRequiredFields(dclass, doId, di, parentId, zoneId)
|
||||
dclass.stopGenerate()
|
||||
else:
|
||||
if len(self.deferredGenerates) == 0:
|
||||
taskMgr.doMethodLater(self.deferInterval, self.doDeferredGenerate, 'deferredGenerate')
|
||||
while len(self.deferredGenerates) < deferFor+1:
|
||||
self.deferredGenerates.append([])
|
||||
dg = PyDatagram(di.getRemainingBytes())
|
||||
dg.addUint16(0)
|
||||
di = PyDatagramIterator(dg)
|
||||
self.deferredGenerates[deferFor].append((CLIENT_ENTER_OBJECT_REQUIRED_OTHER, doId))
|
||||
self.deferredDoIds[doId] = ((parentId,
|
||||
zoneId,
|
||||
classId,
|
||||
doId,
|
||||
di),
|
||||
dg,
|
||||
[])
|
||||
dclass.startGenerate()
|
||||
distObj = self.generateWithRequiredFields(dclass, doId, di, parentId, zoneId)
|
||||
dclass.stopGenerate()
|
||||
|
||||
def handleGenerateWithRequiredOther(self, di):
|
||||
doId = di.getUint32()
|
||||
|
@ -2015,38 +1958,9 @@ class OTPClientRepository(ClientRepositoryBase):
|
|||
dclass = self.dclassesByNumber[classId]
|
||||
if self._isInvalidPlayerAvatarGenerate(doId, dclass, parentId, zoneId):
|
||||
return
|
||||
deferFor = getattr(dclass.getClassDef(), 'deferFor', 0)
|
||||
if not self.deferInterval or self.noDefer:
|
||||
deferrable = False
|
||||
now = globalClock.getFrameTime()
|
||||
if self.expectedInterests:
|
||||
sortOrder = getattr(dclass.getClassDef(), 'sortOrder', 0)
|
||||
if sortOrder:
|
||||
while len(self.sortedGenerates) < sortOrder+1:
|
||||
self.sortedGenerates.append([])
|
||||
self.sortedGenerates[sortOrder].append((CLIENT_ENTER_OBJECT_REQUIRED_OTHER, doId))
|
||||
dg = PyDatagram(di.getRemainingBytes())
|
||||
dg.addUint16(0)
|
||||
di = PyDatagramIterator(dg)
|
||||
self.sortedDoIds[doId] = ((parentId, zoneId, classId, doId, di), dg, [])
|
||||
return
|
||||
if self.deferredGenerates or deferFor != 0:
|
||||
if len(self.deferredGenerates) == 0:
|
||||
taskMgr.doMethodLater(self.deferInterval, self.doDeferredGenerate, 'deferredGenerate')
|
||||
while len(self.deferredGenerates) < deferFor+1:
|
||||
self.deferredGenerates.append([])
|
||||
self.deferredGenerates[deferFor].append((CLIENT_ENTER_OBJECT_REQUIRED_OTHER, doId))
|
||||
dg = Datagram(di.getDatagram())
|
||||
di = DatagramIterator(dg, di.getCurrentIndex())
|
||||
self.deferredDoIds[doId] = ((parentId,
|
||||
zoneId,
|
||||
classId,
|
||||
doId,
|
||||
di),
|
||||
dg,
|
||||
[])
|
||||
else:
|
||||
self.doGenerate(parentId, zoneId, classId, doId, di)
|
||||
dclass.startGenerate()
|
||||
distObj = self.generateWithRequiredOtherFields(dclass, doId, di, parentId, zoneId)
|
||||
dclass.stopGenerate()
|
||||
|
||||
def handleGenerateWithRequiredOtherOwner(self, di):
|
||||
doId = di.getUint32()
|
||||
|
@ -2123,215 +2037,3 @@ class OTPClientRepository(ClientRepositoryBase):
|
|||
|
||||
def addTaggedInterest(self, parentId, zoneId, mainTag, desc, otherTags = [], event = None):
|
||||
return self.addInterest(parentId, zoneId, desc, event)
|
||||
|
||||
|
||||
def addInterest(self, parentId, zoneId, desc, event = None):
|
||||
handle = ClientRepositoryBase.addInterest(self, parentId, zoneId, desc, event)
|
||||
self.expectedInterests.append(handle.asInt())
|
||||
return handle
|
||||
|
||||
#The functions below have been moved from CRBase into OTPCr so we don't need to fuck with CRBase for deferred generated
|
||||
#They should be moved back eventually
|
||||
def disableDoId(self, doId, ownerView=False):
|
||||
table, cache = self.getTables(ownerView)
|
||||
# Make sure the object exists
|
||||
if table.has_key(doId):
|
||||
# Look up the object
|
||||
distObj = table[doId]
|
||||
# remove the object from the dictionary
|
||||
del table[doId]
|
||||
|
||||
# Only cache the object if it is a "cacheable" type
|
||||
# object; this way we don't clutter up the caches with
|
||||
# trivial objects that don't benefit from caching.
|
||||
# also don't try to cache an object that is delayDeleted
|
||||
cached = False
|
||||
if distObj.getCacheable() and distObj.getDelayDeleteCount() <= 0:
|
||||
cached = cache.cache(distObj)
|
||||
if not cached:
|
||||
distObj.deleteOrDelay()
|
||||
if distObj.getDelayDeleteCount() <= 0:
|
||||
# make sure we're not leaking
|
||||
distObj.detectLeaks()
|
||||
|
||||
elif self.deferredDoIds.has_key(doId):
|
||||
# The object had been deferred. Great; we don't even have
|
||||
# to generate it now.
|
||||
del self.deferredDoIds[doId]
|
||||
for cycle, deferredGenerates in enumerate(self.deferredGenerates):
|
||||
try:
|
||||
i = deferredGenerates.index((CLIENT_ENTER_OBJECT_REQUIRED_OTHER, doId))
|
||||
del self.deferredGenerates[cycle][i]
|
||||
except:
|
||||
pass
|
||||
|
||||
elif self.sortedDoIds.has_key(doId):
|
||||
del self.sortedDoIds[doId]
|
||||
for cycle, sortedGenerates in enumerate(self.sortedGenerates):
|
||||
try:
|
||||
i = sortedGenerates.index((CLIENT_ENTER_OBJECT_REQUIRED_OTHER, doId))
|
||||
del self.sortedGenerates[cycle][i]
|
||||
except:
|
||||
pass
|
||||
|
||||
else:
|
||||
self._logFailedDisable(doId, ownerView)
|
||||
|
||||
#This function has an extra o_ because it is also overridden by OTPCr
|
||||
#when moving it back to CRBase, it needs the o_ removed, and the reference to it here needs to be changed
|
||||
def o_replayDeferredGenerate(self, msgType, extra):
|
||||
""" Override this to do something appropriate with deferred
|
||||
"generate" messages when they are replayed().
|
||||
"""
|
||||
|
||||
if msgType == CLIENT_ENTER_OBJECT_REQUIRED_OTHER:
|
||||
# It's a generate message.
|
||||
doId = extra
|
||||
if doId in self.deferredDoIds:
|
||||
args, dg, updates = self.deferredDoIds[doId]
|
||||
del self.deferredDoIds[doId]
|
||||
self.doGenerate(*args)
|
||||
|
||||
for dg, di in updates:
|
||||
# non-DC updates that need to be played back in-order are
|
||||
# stored as (msgType, (dg, di))
|
||||
if type(di) is types.TupleType:
|
||||
msgType = dg
|
||||
dg, di = di
|
||||
self.replayDeferredGenerate(msgType, (dg, di))
|
||||
else:
|
||||
# ovUpdated is set to True since its OV
|
||||
# is assumbed to have occured when the
|
||||
# deferred update was originally received
|
||||
self.__doUpdate(doId, di, True)
|
||||
elif doId in self.sortedDoIds:
|
||||
args, dg, updates = self.sortedDoIds[doId]
|
||||
del self.sortedDoIds[doId]
|
||||
self.doGenerate(*args)
|
||||
|
||||
for dg, di in updates:
|
||||
# non-DC updates that need to be played back in-order are
|
||||
# stored as (msgType, (dg, di))
|
||||
if type(di) is types.TupleType:
|
||||
msgType = dg
|
||||
dg, di = di
|
||||
self.replayDeferredGenerate(msgType, (dg, di))
|
||||
else:
|
||||
# ovUpdated is set to True since its OV
|
||||
# is assumbed to have occured when the
|
||||
# deferred update was originally received
|
||||
self.__doUpdate(doId, di, True)
|
||||
|
||||
else:
|
||||
self.notify.warning("Ignoring deferred message %s" % (msgType))
|
||||
|
||||
def doDeferredGenerate(self, task):
|
||||
""" This is the task that generates an object on the deferred
|
||||
queue. """
|
||||
now = globalClock.getFrameTime()
|
||||
if now - self.lastGenerate < self.deferInterval:
|
||||
# Come back later.
|
||||
return Task.again
|
||||
self.lastGenerate = globalClock.getFrameTime()
|
||||
deferredGenerates = self.deferredGenerates.pop(0)
|
||||
for deferredGenerate in deferredGenerates:
|
||||
# Generate the next deferred object.
|
||||
msgType, extra = deferredGenerate
|
||||
self.replayDeferredGenerate(msgType, extra)
|
||||
|
||||
if len( self.deferredGenerates) == 0:
|
||||
# All objects are generaetd.
|
||||
return Task.done
|
||||
return Task.again
|
||||
|
||||
def handleUpdateField(self, di):
|
||||
"""
|
||||
This method is called when a CLIENT_OBJECT_UPDATE_FIELD
|
||||
message is received; it decodes the update, unpacks the
|
||||
arguments, and calls the corresponding method on the indicated
|
||||
DistributedObject.
|
||||
|
||||
In fact, this method is exactly duplicated by the C++ method
|
||||
cConnectionRepository::handle_update_field(), which was
|
||||
written to optimize the message loop by handling all of the
|
||||
CLIENT_OBJECT_UPDATE_FIELD messages in C++. That means that
|
||||
nowadays, this Python method will probably never be called,
|
||||
since UPDATE_FIELD messages will not even be passed to the
|
||||
Python message handlers. But this method remains for
|
||||
documentation purposes, and also as a "just in case" handler
|
||||
in case we ever do come across a situation in the future in
|
||||
which python might handle the UPDATE_FIELD message.
|
||||
"""
|
||||
# Get the DO Id
|
||||
doId = di.getUint32()
|
||||
|
||||
ovUpdated = self.__doUpdateOwner(doId, di)
|
||||
|
||||
if doId in self.deferredDoIds:
|
||||
# This object hasn't really been generated yet. Sit on
|
||||
# the update.
|
||||
args, dg0, updates = self.deferredDoIds[doId]
|
||||
|
||||
# Keep a copy of the datagram, and move the di to the copy
|
||||
dg = Datagram(di.getDatagram())
|
||||
di = DatagramIterator(dg, di.getCurrentIndex())
|
||||
|
||||
updates.append((dg, di))
|
||||
elif doId in self.sortedDoIds:
|
||||
args, dg0, updates = self.sortedDoIds[doId]
|
||||
|
||||
# Keep a copy of the datagram, and move the di to the copy
|
||||
dg = Datagram(di.getDatagram())
|
||||
di = DatagramIterator(dg, di.getCurrentIndex())
|
||||
|
||||
updates.append((dg, di))
|
||||
else:
|
||||
# This object has been fully generated. It's OK to update.
|
||||
self.__doUpdate(doId, di, ovUpdated)
|
||||
|
||||
#The following two functions are unmodified, but are needed for the above to work
|
||||
def __doUpdate(self, doId, di, ovUpdated):
|
||||
# Find the DO
|
||||
do = self.doId2do.get(doId)
|
||||
if do is not None:
|
||||
# Let the dclass finish the job
|
||||
do.dclass.receiveUpdate(do, di)
|
||||
elif not ovUpdated:
|
||||
# this next bit is looking for avatar handles so that if you get an update
|
||||
# for an avatar that isn't in your doId2do table but there is a
|
||||
# avatar handle for that object then it's messages will be forwarded to that
|
||||
# object. We are currently using that for whisper echoing
|
||||
# if you need a more general perpose system consider registering proxy objects on
|
||||
# a dict and adding the avatar handles to that dict when they are created
|
||||
# then change/remove the old method. I didn't do that because I couldn't think
|
||||
# of a use for it. -JML
|
||||
try :
|
||||
handle = self.identifyAvatar(doId)
|
||||
if handle:
|
||||
dclass = self.dclassesByName[handle.dclassName]
|
||||
dclass.receiveUpdate(handle, di)
|
||||
|
||||
else:
|
||||
self.notify.warning(
|
||||
"Asked to update non-existent DistObj " + str(doId))
|
||||
except:
|
||||
self.notify.warning(
|
||||
"Asked to update non-existent DistObj " + str(doId) + "and failed to find it")
|
||||
|
||||
def __doUpdateOwner(self, doId, di):
|
||||
ovObj = self.doId2ownerView.get(doId)
|
||||
if ovObj:
|
||||
odg = Datagram(di.getDatagram())
|
||||
odi = DatagramIterator(odg, di.getCurrentIndex())
|
||||
ovObj.dclass.receiveUpdate(ovObj, odi)
|
||||
return True
|
||||
return False
|
||||
|
||||
def flushGenerates(self):
|
||||
""" Forces all pending generates to be performed immediately. """
|
||||
for deferredGenerates in self.deferredGenerates:
|
||||
for deferredGenerate in deferredGenerates:
|
||||
msgType, extra = self.deferredGenerate
|
||||
self.replayDeferredGenerate(msgType, extra)
|
||||
self.deferredGenerates = []
|
||||
taskMgr.remove('deferredGenerate')
|
||||
|
|
|
@ -32,7 +32,6 @@ class DistributedBattleBase(DistributedNode.DistributedNode, BattleBase):
|
|||
camMenuFov = ToontownBattleGlobals.BattleCamMenuFov
|
||||
camJoinPos = ToontownBattleGlobals.BattleCamJoinPos
|
||||
camJoinHpr = ToontownBattleGlobals.BattleCamJoinHpr
|
||||
sortOrder = 2
|
||||
|
||||
def __init__(self, cr, townBattle):
|
||||
DistributedNode.DistributedNode.__init__(self, cr)
|
||||
|
|
|
@ -24,7 +24,6 @@ if (__debug__):
|
|||
import pdb
|
||||
|
||||
class DistributedDoor(DistributedObject.DistributedObject, DelayDeletable):
|
||||
deferFor = 1
|
||||
|
||||
def __init__(self, cr):
|
||||
DistributedObject.DistributedObject.__init__(self, cr)
|
||||
|
|
|
@ -18,7 +18,6 @@ from toontown.building import BoardingGroupShow
|
|||
class DistributedElevator(DistributedObject.DistributedObject):
|
||||
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedElevator')
|
||||
JumpOutOffsets = JumpOutOffsets
|
||||
sortOrder = 1
|
||||
|
||||
def __init__(self, cr):
|
||||
DistributedObject.DistributedObject.__init__(self, cr)
|
||||
|
|
|
@ -16,7 +16,6 @@ from otp.nametag.Nametag import Nametag
|
|||
from otp.nametag.NametagConstants import *
|
||||
|
||||
class DistributedElevatorExt(DistributedElevator.DistributedElevator):
|
||||
sortOrder = 1
|
||||
|
||||
def __init__(self, cr):
|
||||
DistributedElevator.DistributedElevator.__init__(self, cr)
|
||||
|
|
|
@ -20,7 +20,6 @@ from otp.speedchat import SpeedChatGlobals
|
|||
|
||||
class DistributedElectionEvent(DistributedObject, FSM):
|
||||
notify = DirectNotifyGlobal.directNotify.newCategory("DistributedElectionEvent")
|
||||
deferFor = 1
|
||||
|
||||
def __init__(self, cr):
|
||||
DistributedObject.__init__(self, cr)
|
||||
|
|
|
@ -8,7 +8,6 @@ import webbrowser
|
|||
import SafezoneInvasionGlobals
|
||||
|
||||
class DistributedSafezoneInvasion(DistributedObject):
|
||||
deferFor = 1
|
||||
|
||||
def __init__(self, cr):
|
||||
DistributedObject.__init__(self, cr)
|
||||
|
|
|
@ -12,7 +12,6 @@ import HouseGlobals
|
|||
|
||||
class DistributedFurnitureItem(DistributedHouseItem.DistributedHouseItem, DistributedSmoothNode.DistributedSmoothNode):
|
||||
notify = directNotify.newCategory('DistributedFurnitureItem')
|
||||
deferFor = 1
|
||||
|
||||
def __init__(self, cr):
|
||||
DistributedHouseItem.DistributedHouseItem.__init__(self, cr)
|
||||
|
|
|
@ -16,7 +16,6 @@ from toontown.parties.JellybeanRewardGui import JellybeanRewardGui
|
|||
from toontown.parties.PartyUtils import getPartyActivityIcon, getCenterPosFromGridSize
|
||||
|
||||
class DistributedPartyActivity(DistributedObject.DistributedObject):
|
||||
deferFor = 1 # We need to defer the generation of activities 1 frame, as the party must generate first
|
||||
def __init__(self, cr, activityId, activityType, wantLever = False, wantRewardGui = False):
|
||||
DistributedObject.DistributedObject.__init__(self, cr)
|
||||
self.activityId = activityId
|
||||
|
|
|
@ -37,7 +37,6 @@ INITIAL_VELOCITY = 80.0
|
|||
WHISTLE_SPEED = INITIAL_VELOCITY * 0.35
|
||||
|
||||
class DistributedPartyCannon(DistributedObject, Cannon):
|
||||
deferFor = 2
|
||||
notify = directNotify.newCategory('DistributedPartyCannon')
|
||||
LOCAL_CANNON_MOVE_TASK = 'localCannonMoveTask'
|
||||
|
||||
|
|
|
@ -33,7 +33,6 @@ ALLOW_BATTLE_DETECT = 1
|
|||
class DistributedSuit(DistributedSuitBase.DistributedSuitBase, DelayDeletable):
|
||||
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedSuit')
|
||||
ENABLE_EXPANDED_NAME = 0
|
||||
sortOrder = 1
|
||||
|
||||
def __init__(self, cr):
|
||||
try:
|
||||
|
|
|
@ -15,7 +15,6 @@ from direct.interval.IntervalGlobal import *
|
|||
import random
|
||||
|
||||
class DistributedNPCToonBase(DistributedToon.DistributedToon):
|
||||
deferFor = 1
|
||||
|
||||
def __init__(self, cr):
|
||||
try:
|
||||
|
|
|
@ -85,7 +85,6 @@ from ToonBaseGlobal import *
|
|||
from direct.showbase.MessengerGlobal import *
|
||||
from toontown.distributed import ToontownClientRepository
|
||||
cr = ToontownClientRepository.ToontownClientRepository(serverVersion, launcher)
|
||||
cr.setDeferInterval(1)
|
||||
cr.music = music
|
||||
del music
|
||||
base.initNametagGlobals()
|
||||
|
|
Loading…
Reference in a new issue