add demonstration of geosearch
[gae-samples.git] / search / product_search_python / admin_handlers.py
blob10d542d66a08ae8180856ecc92c92182cafa4246
1 #!/usr/bin/env python
3 # Copyright 2012 Google Inc.
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
9 # http://www.apache.org/licenses/LICENSE-2.0
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
17 """ Contains the admin request handlers for the app (those that require
18 administrative access).
19 """
21 import csv
22 import logging
23 import os
24 import urllib
25 import uuid
27 from base_handler import BaseHandler
28 import categories
29 import config
30 import docs
31 import errors
32 import models
33 import stores
34 import utils
36 from google.appengine.api import users
37 from google.appengine.ext.deferred import defer
38 from google.appengine.ext import ndb
39 from google.appengine.api import search
42 def reinitAll(sample_data=True):
43 """
44 Deletes all product entities and documents, essentially resetting the app
45 state, then loads in static sample data if requested. Hardwired for the
46 expected product types in the sample data.
47 This function is intended to be run 'offline' (e.g., via a Task Queue task).
48 As an extension to this functionality, the channel ID could be used to notify
49 when done."""
51 # delete all the product and review entities
52 review_keys = models.Review.query().fetch(keys_only=True)
53 ndb.delete_multi(review_keys)
54 prod_keys = models.Product.query().fetch(keys_only=True)
55 ndb.delete_multi(prod_keys)
56 # delete all the associated product documents in the doc and
57 # store indexes
58 docs.Product.deleteAllInProductIndex()
59 docs.Store.deleteAllInIndex()
60 # load in sample data if indicated
61 if sample_data:
62 logging.info('Loading product sample data')
63 # Load from csv sample files.
64 # The following are hardwired to the format of the sample data files
65 # for the two example product types ('books' and 'hd televisions')-- see
66 # categories.py
67 datafile = os.path.join('data', config.SAMPLE_DATA_BOOKS)
68 # books
69 reader = csv.DictReader(
70 open(datafile, 'r'),
71 ['pid', 'name', 'category', 'price',
72 'publisher', 'title', 'pages', 'author',
73 'description', 'isbn'])
74 importData(reader)
75 datafile = os.path.join('data', config.SAMPLE_DATA_TVS)
76 # tvs
77 reader = csv.DictReader(
78 open(datafile, 'r'),
79 ['pid', 'name', 'category', 'price',
80 'size', 'brand', 'tv_type',
81 'description'])
82 importData(reader)
84 # next create docs from store location info
85 loadStoreLocationData()
87 logging.info('Re-initialization complete.')
89 def loadStoreLocationData():
90 # create documents from store location info
91 # currently logs but otherwise swallows search errors.
92 slocs = stores.stores
93 for s in slocs:
94 logging.info("s: %s", s)
95 geopoint = search.GeoPoint(s[3][0], s[3][1])
96 fields = [search.TextField(name='storename', value=s[1]),
97 search.TextField(name='address', value=s[2]),
98 search.GeoField(name='store_location', value=geopoint)
100 d = search.Document(doc_id=s[0], fields=fields)
101 try:
102 add_result = search.Index(config.STORE_INDEX_NAME).add(d)
103 except search.Error:
104 logging.exception("Error adding document:")
107 def importData(reader):
108 """Import via the csv reader iterator using the specified batch size as set in
109 the config file. We want to ensure the batch is not too large-- we allow 100
110 rows/products max per batch."""
111 MAX_BATCH_SIZE = 100
112 rows = []
113 # index in batches
114 # ensure the batch size in the config file is not over the max or < 1.
115 batchsize = utils.intClamp(config.IMPORT_BATCH_SIZE, 1, MAX_BATCH_SIZE)
116 logging.debug('batchsize: %s', batchsize)
117 for row in reader:
118 if len(rows) == batchsize:
119 docs.Product.buildProductBatch(rows)
120 rows = [row]
121 else:
122 rows.append(row)
123 if rows:
124 docs.Product.buildProductBatch(rows)
127 class AdminHandler(BaseHandler):
128 """Displays the admin page."""
130 def buildAdminPage(self, notification=None):
131 # If necessary, build the app's product categories now. This is done only
132 # if there are no Category entities in the datastore.
133 models.Category.buildAllCategories()
134 tdict = {
135 'sampleb': config.SAMPLE_DATA_BOOKS,
136 'samplet': config.SAMPLE_DATA_TVS,
137 'update_sample': config.DEMO_UPDATE_BOOKS_DATA}
138 if notification:
139 tdict['notification'] = notification
140 self.render_template('admin.html', tdict)
142 @BaseHandler.logged_in
143 def get(self):
144 action = self.request.get('action')
145 if action == 'reinit':
146 # reinitialise the app data to the sample data
147 defer(reinitAll)
148 self.buildAdminPage(notification="Reinitialization performed.")
149 elif action == 'demo_update':
150 # update the sample data, from (hardwired) book update
151 # data. Demonstrates updating some existing products, and adding some new
152 # ones.
153 logging.info('Loading product sample update data')
154 # The following is hardwired to the known format of the sample data file
155 datafile = os.path.join('data', config.DEMO_UPDATE_BOOKS_DATA)
156 reader = csv.DictReader(
157 open(datafile, 'r'),
158 ['pid', 'name', 'category', 'price',
159 'publisher', 'title', 'pages', 'author',
160 'description', 'isbn'])
161 for row in reader:
162 docs.Product.buildProduct(row)
163 self.buildAdminPage(notification="Demo update performed.")
165 elif action == 'update_ratings':
166 self.update_ratings()
167 self.buildAdminPage(notification="Ratings update performed.")
168 else:
169 self.buildAdminPage()
171 def update_ratings(self):
172 """Find the products that have had an average ratings change, and need their
173 associated documents updated (re-indexed) to reflect that change; and
174 re-index those docs in batch. There will only
175 be such products if config.BATCH_RATINGS_UPDATE is True; otherwise the
176 associated documents will be updated right away."""
177 # get the pids of the products that need review info updated in their
178 # associated documents.
179 pkeys = models.Product.query(
180 models.Product.needs_review_reindex == True).fetch(keys_only=True)
181 # re-index these docs in batch
182 models.Product.updateProdDocsWithNewRating(pkeys)
185 class DeleteProductHandler(BaseHandler):
186 """Remove data for the product with the given pid, including that product's
187 reviews and its associated indexed document."""
189 @BaseHandler.logged_in
190 def post(self):
191 pid = self.request.get('pid')
192 if not pid: # this should not be reached
193 msg = 'There was a problem: no product id given.'
194 logging.error(msg)
195 url = '/'
196 linktext = 'Go to product search page.'
197 self.render_template(
198 'notification.html',
199 {'title': 'Error', 'msg': msg,
200 'goto_url': url, 'linktext': linktext})
201 return
203 # Delete the product entity within a transaction, and define transactional
204 # tasks for deleting the product's reviews and its associated document.
205 # These tasks will only be run if the transaction successfully commits.
206 def _tx():
207 prod = models.Product.get_by_id(pid)
208 if prod:
209 prod.key.delete()
210 defer(models.Review.deleteReviews, prod.key.id(), _transactional=True)
211 defer(
212 docs.Product.removeProductDocByPid,
213 prod.key.id(), _transactional=True)
215 ndb.transaction(_tx)
216 # indicate success
217 msg = (
218 'The product with product id %s has been ' +
219 'successfully removed.') % (pid,)
220 url = '/'
221 linktext = 'Go to product search page.'
222 self.render_template(
223 'notification.html',
224 {'title': 'Product Removed', 'msg': msg,
225 'goto_url': url, 'linktext': linktext})
228 class CreateProductHandler(BaseHandler):
229 """Handler to create a new product: this constitutes both a product entity
230 and its associated indexed document."""
232 def parseParams(self):
233 """Filter the param set to the expected params."""
235 pid = self.request.get('pid')
236 doc = docs.Product.getDocFromPid(pid)
237 params = {}
238 if doc: # populate default params from the doc
239 fields = doc.fields
240 for f in fields:
241 params[f.name] = f.value
242 else:
243 # start with the 'core' fields
244 params = {
245 'pid': uuid.uuid4().hex, # auto-generate default UID
246 'name': '',
247 'description': '',
248 'category': '',
249 'price': ''}
250 pf = categories.product_dict
251 # add the fields specific to the categories
252 for _, cdict in pf.iteritems():
253 temp = {}
254 for elt in cdict.keys():
255 temp[elt] = ''
256 params.update(temp)
258 for k, v in params.iteritems():
259 # Process the request params. Possibly replace default values.
260 params[k] = self.request.get(k, v)
261 return params
263 @BaseHandler.logged_in
264 def get(self):
265 params = self.parseParams()
266 self.render_template('create_product.html', params)
268 @BaseHandler.logged_in
269 def post(self):
270 self.createProduct(self.parseParams())
272 def createProduct(self, params):
273 """Create a product entity and associated document from the given params
274 dict."""
276 try:
277 product = docs.Product.buildProduct(params)
278 self.redirect(
279 '/product?' + urllib.urlencode(
280 {'pid': product.pid, 'pname': params['name'],
281 'category': product.category
283 except errors.Error as e:
284 logging.exception('Error:')
285 params['error_message'] = e.error_message
286 self.render_template('create_product.html', params)