1
# Copyright (C) 2010 Google Inc. All rights reserved.
3
# Redistribution and use in source and binary forms, with or without
4
# modification, are permitted provided that the following conditions are
7
# * Redistributions of source code must retain the above copyright
8
# notice, this list of conditions and the following disclaimer.
9
# * Redistributions in binary form must reproduce the above
10
# copyright notice, this list of conditions and the following disclaimer
11
# in the documentation and/or other materials provided with the
13
# * Neither the name of Google Inc. nor the names of its
14
# contributors may be used to endorse or promote products derived from
15
# this software without specific prior written permission.
17
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29
from datetime import datetime
32
from google.appengine.ext import db
34
MAX_DATA_ENTRY_PER_FILE = 10
35
MAX_ENTRY_LEN = 1000 * 1000
38
class DataEntry(db.Model):
39
"""Datastore entry that stores one segmant of file data
43
data = db.BlobProperty()
49
def get_data(self, key):
53
class DataStoreFile(db.Model):
54
"""This class stores file in datastore.
55
If a file is oversize (>1000*1000 bytes), the file is split into
56
multiple segments and stored in multiple datastore entries.
59
name = db.StringProperty()
60
data_keys = db.ListProperty(db.Key)
61
# keys to the data store entries that can be reused for new data.
62
# If it is emtpy, create new DataEntry.
63
new_data_keys = db.ListProperty(db.Key)
64
date = db.DateTimeProperty(auto_now_add=True)
68
def delete_data(self, keys=None):
73
data_entry = DataEntry.get(key)
77
def save_data(self, data):
79
logging.warning("No data to save.")
82
if len(data) > (MAX_DATA_ENTRY_PER_FILE * MAX_ENTRY_LEN):
83
logging.error("File too big, can't save to datastore: %dK",
88
# Use the new_data_keys to store new data. If all new data are saved
89
# successfully, swap new_data_keys and data_keys so we can reuse the
90
# data_keys entries in next run. If unable to save new data for any
91
# reason, only the data pointed by new_data_keys may be corrupted,
92
# the existing data_keys data remains untouched. The corrupted data
93
# in new_data_keys will be overwritten in next update.
94
keys = self.new_data_keys
95
self.new_data_keys = []
97
while start < len(data):
100
data_entry = DataEntry.get(key)
102
logging.warning("Found key, but no data entry: %s", key)
103
data_entry = DataEntry()
105
data_entry = DataEntry()
107
data_entry.data = db.Blob(data[start: start + MAX_ENTRY_LEN])
110
except Exception, err:
111
logging.error("Failed to save data store entry: %s", err)
113
self.delete_data(keys)
116
logging.info("Data saved: %s.", data_entry.key())
117
self.new_data_keys.append(data_entry.key())
121
start = start + MAX_ENTRY_LEN
124
self.delete_data(keys)
126
temp_keys = self.data_keys
127
self.data_keys = self.new_data_keys
128
self.new_data_keys = temp_keys
134
if not self.data_keys:
135
logging.warning("No data to load.")
139
for key in self.data_keys:
140
logging.info("Loading data for key: %s.", key)
141
data_entry = DataEntry.get(key)
143
logging.error("No data found for key: %s.", key)
146
data.append(data_entry.data)
148
self.data = "".join(data)