- def _read_data(self, file):
- import transaction
-
- n=1 << 16
-
- if isinstance(file, str):
- size=len(file)
- if size < n: return file, size
- # Big string: cut it into smaller chunks
- file = StringIO(file)
-
- if isinstance(file, FileUpload) and not file:
- raise ValueError, 'File not specified'
-
- if hasattr(file, '__class__') and file.__class__ is Pdata:
- size=len(file)
- return file, size
-
- seek=file.seek
- read=file.read
-
- seek(0,2)
- size=end=file.tell()
-
- if size <= 2*n:
- seek(0)
- if size < n: return read(size), size
- return Pdata(read(size)), size
-
- # Make sure we have an _p_jar, even if we are a new object, by
- # doing a sub-transaction commit.
- transaction.savepoint(optimistic=True)
-
- if self._p_jar is None:
- # Ugh
- seek(0)
- return Pdata(read(size)), size
-
- # Now we're going to build a linked list from back
- # to front to minimize the number of database updates
- # and to allow us to get things out of memory as soon as
- # possible.
- next = None
- while end > 0:
- pos = end-n
- if pos < n:
- pos = 0 # we always want at least n bytes
- seek(pos)
-
- # Create the object and assign it a next pointer
- # in the same transaction, so that there is only
- # a single database update for it.
- data = Pdata(read(end-pos))
- self._p_jar.add(data)
- data.next = next
-
- # Save the object so that we can release its memory.
- transaction.savepoint(optimistic=True)
- data._p_deactivate()
- # The object should be assigned an oid and be a ghost.
- assert data._p_oid is not None
- assert data._p_state == -1
-
- next = data
- end = pos
-
- return next, size
-