input
stringlengths
11
7.65k
target
stringlengths
22
8.26k
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def edit_category(category_id): category = Category.query.filter_by(id=category_id).first_or_404() form = CategoryForm(obj=category) if form.validate_on_submit(): form.populate_obj(category) flash(_("Category updated."), "success") category.save() return render_template("management/category_form.html", form=form, title=_("Edit Category"))
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def delete_category(category_id): category = Category.query.filter_by(id=category_id).first_or_404() involved_users = User.query.filter(Forum.category_id == category.id, Topic.forum_id == Forum.id, Post.user_id == User.id).all() category.delete(involved_users) flash(_("Category with all associated forums deleted."), "success") return redirect(url_for("management.forums"))
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def plugins(): plugins = get_all_plugins() return render_template("management/plugins.html", plugins=plugins)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def enable_plugin(plugin): plugin = get_plugin_from_all(plugin) if plugin.enabled: flash(_("Plugin %(plugin)s is already enabled.", plugin=plugin.name), "info") return redirect(url_for("management.plugins")) try: plugin.enable() flash(_("Plugin %(plugin)s enabled. Please restart FlaskBB now.", plugin=plugin.name), "success") except OSError: flash(_("It seems that FlaskBB does not have enough filesystem " "permissions. Try removing the 'DISABLED' file by " "yourself instead."), "danger") return redirect(url_for("management.plugins"))
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def disable_plugin(plugin): try: plugin = get_plugin(plugin) except KeyError: flash(_("Plugin %(plugin)s not found.", plugin=plugin.name), "danger") return redirect(url_for("management.plugins")) try: plugin.disable() flash(_("Plugin %(plugin)s disabled. Please restart FlaskBB now.", plugin=plugin.name), "success") except OSError: flash(_("It seems that FlaskBB does not have enough filesystem " "permissions. Try creating the 'DISABLED' file by " "yourself instead."), "danger") return redirect(url_for("management.plugins"))
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def uninstall_plugin(plugin): plugin = get_plugin_from_all(plugin) if plugin.uninstallable: plugin.uninstall() Setting.invalidate_cache() flash(_("Plugin has been uninstalled."), "success") else: flash(_("Cannot uninstall plugin."), "danger") return redirect(url_for("management.plugins"))
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def plugin(self): return plugins.get(self.plugin_name)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def setUpModule(): base.enabledPlugins.append('provenance') base.startServer()
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __repr__(self): return "Authorization(id={id})".format(id=self.id)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def tearDownModule(): base.stopServer()
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def setUp(self): base.TestCase.setUp(self) # Create some test documents with an item admin = { 'email': '[email protected]', 'login': 'adminlogin', 'firstName': 'Admin', 'lastName': 'Last', 'password': 'adminpassword', 'admin': True } self.admin = self.model('user').createUser(**admin) user = { 'email': '[email protected]', 'login': 'goodlogin', 'firstName': 'First', 'lastName': 'Last', 'password': 'goodpassword', 'admin': False } self.user = self.model('user').createUser(**user) # Track folder, item, and setting provenance initially self.model('setting').set( constants.PluginSettings.PROVENANCE_RESOURCES, 'folder,setting') coll1 = { 'name': 'Test Collection', 'description': 'test coll', 'public': True, 'creator': self.admin } self.coll1 = self.model('collection').createCollection(**coll1) folder1 = { 'parent': self.coll1, 'parentType': 'collection', 'name': 'Public test folder', 'creator': self.admin } self.folder1 = self.model('folder').createFolder(**folder1) self.model('folder').setUserAccess( self.folder1, self.user, level=AccessType.WRITE, save=False) self.model('folder').setPublic(self.folder1, True, save=True) item1 = { 'name': 'Public object', 'creator': self.admin, 'folder': self.folder1 } self.item1 = self.model('item').createItem(**item1)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _checkProvenance(self, resp, item, version, user, eventType, matches=None, fileInfo=None, resource='item'): if resp is None: resp = self._getProvenance(item, user, resource=resource) self.assertStatusOk(resp) itemProvenance = resp.json self.assertEqual(itemProvenance['resourceId'], str(item['_id'])) provenance = itemProvenance['provenance'] self.assertEqual(provenance['eventType'], eventType) self.assertEqual(provenance['version'], version) self.assertEqual(str(provenance['eventUser']), str(user['_id'])) if matches: for key in matches: self.assertEqual(provenance[key], matches[key]) if fileInfo: for key in fileInfo: if isinstance(fileInfo[key], dict): for subkey in fileInfo[key]: self.assertEqual(provenance['file'][0][key][subkey], fileInfo[key][subkey]) else: self.assertEqual(provenance['file'][0][key], fileInfo[key])
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _getProvenance(self, item, user, version=None, resource='item', checkOk=True): params = {} if version is not None: params = {'version': version} resp = self.request( path='/%s/%s/provenance' % (resource, item['_id']), method='GET', user=user, type='application/json', params=params) if checkOk: self.assertStatusOk(resp) return resp
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _getProvenanceAfterMetadata(self, item, meta, user): resp = self.request(path='/item/%s/metadata' % item['_id'], method='PUT', user=user, body=json.dumps(meta), type='application/json') self.assertStatusOk(resp) return self._getProvenance(item, user)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def testProvenanceItemMetadata(self): """ Test item provenance endpoint with metadata and basic changes """ item = self.item1 user = self.user admin = self.admin # check that the first version of the item exists # ensure version 1, created by admin user, with creation event self._checkProvenance(None, item, 1, admin, 'creation') # update meta to {x:y} metadata1 = {'x': 'y'} resp = self._getProvenanceAfterMetadata(item, metadata1, admin) # ensure version 2, updated by admin user, with update event, and meta # in provenance matches self._checkProvenance(resp, item, 2, admin, 'update', {'new': {'meta': metadata1}}) # update meta to {} by regular user, we have to send in the key to # remove it but check the saved metadata against {} metadata2 = {'x': None} resp = self._getProvenanceAfterMetadata(item, metadata2, user) # ensure version 3, updated by regular user, with update event, and # meta in provenance matches self._checkProvenance(resp, item, 3, user, 'update', {'old': {'meta': metadata1}, 'new': {'meta': {}}}) # update meta to {x:y} by regular user metadata3 = {'x': 'y'} resp = self._getProvenanceAfterMetadata(item, metadata3, user) # ensure version 4, updated by regular user, with update event, and # meta in provenance matches self._checkProvenance(resp, item, 4, user, 'update', {'old': {'meta': {}}, 'new': {'meta': metadata3}}) # update meta to {x:z} by regular user metadata4 = {'x': 'z'} resp = self._getProvenanceAfterMetadata(item, metadata4, user) # ensure version 5, updated by regular user, with update event, and # meta in provenance matches self._checkProvenance(resp, item, 5, user, 'update', {'old': {'meta': metadata3}, 'new': {'meta': metadata4}}) # update meta to {x:z, q:u} by regular user metadata5 = {'x': 'z', 'q': 'u'} resp = self._getProvenanceAfterMetadata(item, metadata5, user) # ensure version 6, updated by regular user, with update event, and # meta in provenance matches self._checkProvenance(resp, item, 6, user, 'update', {'old': {'meta': metadata4}, 'new': {'meta': metadata5}}) # update meta to {q:a} by regular user metadata6 = {'x': None, 'q': 'a'} resp = self._getProvenanceAfterMetadata(item, metadata6, user) # ensure version 7, updated by regular user, with update event, and # meta in provenance matches self._checkProvenance(resp, item, 7, user, 'update', {'old': {'meta': metadata5}, 'new': {'meta': {'q': 'a'}}}) # Change the item name and description params = {'name': 'Renamed object', 'description': 'New description'} resp = self.request(path='/item/%s' % item['_id'], method='PUT', user=admin, params=params) self.assertStatusOk(resp) params['lowerName'] = params['name'].lower() self._checkProvenance(None, item, 8, admin, 'update', {'new': params}) # Copy the item and check that we marked it as copied params = {'name': 'Copied object'} resp = self.request(path='/item/%s/copy' % item['_id'], method='POST', user=admin, params=params) self.assertStatusOk(resp) newItem = resp.json self._checkProvenance(None, newItem, 9, admin, 'copy', {'originalId': str(item['_id'])})
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def testProvenanceItemFiles(self): """ Test item provenance when adding, modifying, and deleting files. """ item = self.item1 admin = self.admin # Test adding a new file to an existing item fileData1 = 'Hello world' fileData2 = 'Hello world, again' fileName1 = 'helloWorld.txt' fileName2 = 'helloWorldEdit.txt' resp = self.request( path='/file', method='POST', user=admin, params={ 'parentType': 'item', 'parentId': item['_id'], 'name': fileName1, 'size': len(fileData1), 'mimeType': 'text/plain' }) self.assertStatusOk(resp) uploadId = resp.json['_id'] fields = [('offset', 0), ('uploadId', uploadId)] files = [('chunk', fileName1, fileData1)] resp = self.multipartRequest( path='/file/chunk', user=admin, fields=fields, files=files) self.assertStatusOk(resp) file1 = resp.json self._checkProvenance(None, item, 2, admin, 'fileAdded', fileInfo={'fileId': str(file1['_id']), 'new': {'mimeType': 'text/plain', 'size': len(fileData1), 'name': fileName1}}) # Edit the file name resp = self.request(path='/file/%s' % file1['_id'], method='PUT', user=admin, params={'name': fileName2}) self.assertStatusOk(resp) self._checkProvenance(None, item, 3, admin, 'fileUpdate', fileInfo={'fileId': str(file1['_id']), 'old': {'name': fileName1}, 'new': {'name': fileName2}}) # Reupload the file resp = self.request(path='/file/%s/contents' % file1['_id'], method='PUT', user=admin, params={'size': len(fileData2)}) self.assertStatusOk(resp) uploadId = resp.json['_id'] fields = [('offset', 0), ('uploadId', uploadId)] files = [('chunk', fileName1, fileData2)] resp = self.multipartRequest( path='/file/chunk', user=admin, fields=fields, files=files) self.assertStatusOk(resp) self.assertEqual(file1['_id'], resp.json['_id']) self._checkProvenance(None, item, 4, admin, 'fileUpdate', fileInfo={'fileId': str(file1['_id']), 'old': {'size': len(fileData1)}, 'new': {'size': len(fileData2)}}) # Delete the file resp = self.request(path='/file/%s' % file1['_id'], method='DELETE', user=admin) self.assertStatusOk(resp) self._checkProvenance(None, item, 5, admin, 'fileRemoved', fileInfo={'fileId': str(file1['_id']), 'old': {'size': len(fileData2), 'name': fileName2}})
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def testProvenanceFolder(self): """ Test folder provenance, including turning off and on the provenance handling of folders. """ folder1 = self.folder1 user = self.admin # check that the first version of the folder provenance exists self._checkProvenance(None, folder1, 1, user, 'creation', resource='folder') # Edit the folder and check again params1 = {'name': 'Renamed folder', 'description': 'New description'} resp = self.request(path='/folder/%s' % folder1['_id'], method='PUT', user=user, params=params1) self.assertStatusOk(resp) params1['lowerName'] = params1['name'].lower() self._checkProvenance(None, folder1, 2, user, 'update', {'new': params1}, resource='folder') # Turn off folder provenance and make sure asking for it fails self.model('setting').set( constants.PluginSettings.PROVENANCE_RESOURCES, 'setting') resp = self._getProvenance(folder1, user, resource='folder', checkOk=False) self.assertStatus(resp, 400) # While folder provenance is off, create a second folder and edit the # first folder params2 = {'name': 'Renamed Again', 'description': 'Description 2'} resp = self.request(path='/folder/%s' % folder1['_id'], method='PUT', user=user, params=params2) self.assertStatusOk(resp) params2['lowerName'] = params2['name'].lower() folder2 = { 'parent': self.coll1, 'parentType': 'collection', 'name': 'Private test folder', 'creator': self.admin } folder2 = self.model('folder').createFolder(**folder2) # Turn back on folder provenance and check that it didn't record the # changes we made. self.model('setting').set( constants.PluginSettings.PROVENANCE_RESOURCES, 'folder,setting') self._checkProvenance(None, folder1, 2, user, 'update', {'new': params1}, resource='folder') # Changing folder1 again should now show this change, and the old value # should show the gap in the data params3 = {'name': 'Renamed C', 'description': 'Description 3'} resp = self.request(path='/folder/%s' % folder1['_id'], method='PUT', user=user, params=params3) self.assertStatusOk(resp) params3['lowerName'] = params3['name'].lower() self._checkProvenance(None, folder1, 3, user, 'update', {'old': params2, 'new': params3}, resource='folder') # The new folder should have no provenance resp = self._getProvenance(folder2, user, resource='folder') self.assertEqual(resp.json['resourceId'], str(folder2['_id'])) self.assertIsNone(resp.json['provenance']) # Edit the new folder; it should show the unknown history followed by # the edit params4 = {'description': 'Folder 2 Description'} resp = self.request(path='/folder/%s' % folder2['_id'], method='PUT', user=user, params=params4) self.assertStatusOk(resp) resp = self._getProvenance(folder2, user, 1, resource='folder') self._checkProvenance(resp, folder2, 1, user, 'unknownHistory', resource='folder') self._checkProvenance(None, folder2, 2, user, 'update', {'new': params4}, resource='folder') # We should also see the initial history using negative indexing resp = self._getProvenance(folder2, user, -2, resource='folder') self._checkProvenance(resp, folder2, 1, user, 'unknownHistory', resource='folder') # We should be able to get the entire history using 'all' resp = self._getProvenance(folder2, user, 'all', resource='folder') self.assertEqual(resp.json['resourceId'], str(folder2['_id'])) self.assertEqual(len(resp.json['provenance']), 2) self.assertEqual(resp.json['provenance'][0]['eventType'], 'unknownHistory') self.assertEqual(resp.json['provenance'][1]['eventType'], 'update') # We should get an error if we ask for a nonsense version resp = self._getProvenance(folder2, user, 'not_a_version', resource='folder', checkOk=False) self.assertStatus(resp, 400)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _rmsprop_update_numpy(self, var, g, mg, rms, mom, lr, decay, momentum, centered): rms_t = rms * decay + (1 - decay) * g * g if centered: mg_t = mg * decay + (1 - decay) * g denom_t = rms_t - mg_t * mg_t else: mg_t = mg denom_t = rms_t mom_t = momentum * mom + lr * g / np.sqrt(denom_t, dtype=denom_t.dtype) var_t = var - mom_t return var_t, mg_t, rms_t, mom_t
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _sparse_rmsprop_update_numpy(self, var, gindexs, gvalues, mg, rms, mom, lr, decay, momentum, centered): mg_t = copy.deepcopy(mg) rms_t = copy.deepcopy(rms) mom_t = copy.deepcopy(mom) var_t = copy.deepcopy(var) for i in range(len(gindexs)): gindex = gindexs[i] gvalue = gvalues[i] rms_t[gindex] = rms[gindex] * decay + (1 - decay) * gvalue * gvalue denom_t = rms_t[gindex] if centered: mg_t[gindex] = mg_t[gindex] * decay + (1 - decay) * gvalue denom_t -= mg_t[gindex] * mg_t[gindex] mom_t[gindex] = momentum * mom[gindex] + lr * gvalue / np.sqrt(denom_t) var_t[gindex] = var[gindex] - mom_t[gindex] return var_t, mg_t, rms_t, mom_t
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def testDense(self, dtype, param_value): (learning_rate, decay, momentum, epsilon, centered, use_resource) = tuple( param_value) with self.session(use_gpu=True): # Initialize variables for numpy implementation. var0_np = np.array([1.0, 2.0], dtype=dtype.as_numpy_dtype) grads0_np = np.array([0.1, 0.2], dtype=dtype.as_numpy_dtype) var1_np = np.array([3.0, 4.0], dtype=dtype.as_numpy_dtype) grads1_np = np.array([0.01, 0.2], dtype=dtype.as_numpy_dtype) if use_resource: var0 = resource_variable_ops.ResourceVariable(var0_np) var1 = resource_variable_ops.ResourceVariable(var1_np) else: var0 = variables.Variable(var0_np) var1 = variables.Variable(var1_np) grads0 = constant_op.constant(grads0_np) grads1 = constant_op.constant(grads1_np) opt = rmsprop.RMSPropOptimizer( learning_rate=learning_rate, decay=decay, momentum=momentum, epsilon=epsilon, centered=centered) update = opt.apply_gradients(zip([grads0, grads1], [var0, var1])) variables.global_variables_initializer().run() mg0 = opt.get_slot(var0, "mg") self.assertEqual(mg0 is not None, centered) mg1 = opt.get_slot(var1, "mg") self.assertEqual(mg1 is not None, centered) rms0 = opt.get_slot(var0, "rms") self.assertIsNotNone(rms0) rms1 = opt.get_slot(var1, "rms") self.assertIsNotNone(rms1) mom0 = opt.get_slot(var0, "momentum") self.assertIsNotNone(mom0) mom1 = opt.get_slot(var1, "momentum") self.assertIsNotNone(mom1) mg0_np = np.array([0.0, 0.0], dtype=dtype.as_numpy_dtype) mg1_np = np.array([0.0, 0.0], dtype=dtype.as_numpy_dtype) rms0_np = np.array([epsilon, epsilon], dtype=dtype.as_numpy_dtype) rms1_np = np.array([epsilon, epsilon], dtype=dtype.as_numpy_dtype) mom0_np = np.array([0.0, 0.0], dtype=dtype.as_numpy_dtype) mom1_np = np.array([0.0, 0.0], dtype=dtype.as_numpy_dtype) # Fetch params to validate initial values self.assertAllClose([1.0, 2.0], var0.eval()) self.assertAllClose([3.0, 4.0], var1.eval()) # Run 4 steps of RMSProp for _ in range(4): update.run() var0_np, mg0_np, rms0_np, mom0_np = self._rmsprop_update_numpy( var0_np, grads0_np, mg0_np, rms0_np, mom0_np, learning_rate, decay, momentum, centered) var1_np, mg1_np, rms1_np, mom1_np = self._rmsprop_update_numpy( var1_np, grads1_np, mg1_np, rms1_np, mom1_np, learning_rate, decay, momentum, centered) # Validate updated params if centered: self.assertAllCloseAccordingToType(mg0_np, mg0.eval()) self.assertAllCloseAccordingToType(mg1_np, mg1.eval()) self.assertAllCloseAccordingToType(rms0_np, rms0.eval()) self.assertAllCloseAccordingToType(rms1_np, rms1.eval()) self.assertAllCloseAccordingToType(mom0_np, mom0.eval()) self.assertAllCloseAccordingToType(mom1_np, mom1.eval()) # TODO(b/117393988): Reduce tolerances for float16. self.assertAllCloseAccordingToType( var0_np, var0.eval(), half_rtol=3e-3, half_atol=3e-3) self.assertAllCloseAccordingToType( var1_np, var1.eval(), half_rtol=3e-3, half_atol=3e-3)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def testMinimizeSparseResourceVariable(self, dtype): with self.cached_session(): var0 = resource_variable_ops.ResourceVariable([[1.0, 2.0]], dtype=dtype) x = constant_op.constant([[4.0], [5.0]], dtype=dtype) pred = math_ops.matmul(embedding_ops.embedding_lookup([var0], [0]), x) loss = pred * pred sgd_op = rmsprop.RMSPropOptimizer( learning_rate=1.0, decay=0.0, momentum=0.0, epsilon=0.0, centered=False).minimize(loss) variables.global_variables_initializer().run() # Fetch params to validate initial values self.assertAllCloseAccordingToType([[1.0, 2.0]], var0.eval()) # Run 1 step of sgd sgd_op.run() # Validate updated params self.assertAllCloseAccordingToType( [[0., 1.]], var0.eval(), atol=0.01)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def testMinimizeSparseResourceVariableCentered(self, dtype): with self.cached_session(): var0 = resource_variable_ops.ResourceVariable([[1.0, 2.0]], dtype=dtype) x = constant_op.constant([[4.0], [5.0]], dtype=dtype) pred = math_ops.matmul(embedding_ops.embedding_lookup([var0], [0]), x) loss = pred * pred sgd_op = rmsprop.RMSPropOptimizer( learning_rate=1.0, decay=0.1, momentum=0.0, epsilon=1.0, centered=True).minimize(loss) variables.global_variables_initializer().run() # Fetch params to validate initial values self.assertAllCloseAccordingToType([[1.0, 2.0]], var0.eval()) # Run 1 step of sgd sgd_op.run() # Validate updated params self.assertAllCloseAccordingToType( [[-7/3.0, -4/3.0]], var0.eval(), atol=0.01)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def testSparse(self, dtype, param_value): (learning_rate, decay, momentum, epsilon, centered, _) = tuple( param_value) with self.session(use_gpu=True): # Initialize variables for numpy implementation. var0_np = np.array([1.0, 2.0], dtype=dtype.as_numpy_dtype) grads0_np = np.array([0.1], dtype=dtype.as_numpy_dtype) var1_np = np.array([3.0, 4.0], dtype=dtype.as_numpy_dtype) grads1_np = np.array([0.01], dtype=dtype.as_numpy_dtype) var0 = variables.Variable(var0_np) var1 = variables.Variable(var1_np) grads0_np_indices = np.array([0], dtype=np.int32) grads0 = ops.IndexedSlices( constant_op.constant(grads0_np), constant_op.constant(grads0_np_indices), constant_op.constant([1])) grads1_np_indices = np.array([1], dtype=np.int32) grads1 = ops.IndexedSlices( constant_op.constant(grads1_np), constant_op.constant(grads1_np_indices), constant_op.constant([1])) opt = rmsprop.RMSPropOptimizer( learning_rate=learning_rate, decay=decay, momentum=momentum, epsilon=epsilon, centered=centered) update = opt.apply_gradients(zip([grads0, grads1], [var0, var1])) variables.global_variables_initializer().run() mg0 = opt.get_slot(var0, "mg") self.assertEqual(mg0 is not None, centered) mg1 = opt.get_slot(var1, "mg") self.assertEqual(mg1 is not None, centered) rms0 = opt.get_slot(var0, "rms") self.assertIsNotNone(rms0) rms1 = opt.get_slot(var1, "rms") self.assertIsNotNone(rms1) mom0 = opt.get_slot(var0, "momentum") self.assertIsNotNone(mom0) mom1 = opt.get_slot(var1, "momentum") self.assertIsNotNone(mom1) mg0_np = np.array([0.0, 0.0], dtype=dtype.as_numpy_dtype) mg1_np = np.array([0.0, 0.0], dtype=dtype.as_numpy_dtype) rms0_np = np.array([epsilon, epsilon], dtype=dtype.as_numpy_dtype) rms1_np = np.array([epsilon, epsilon], dtype=dtype.as_numpy_dtype) mom0_np = np.array([0.0, 0.0], dtype=dtype.as_numpy_dtype) mom1_np = np.array([0.0, 0.0], dtype=dtype.as_numpy_dtype) # Fetch params to validate initial values self.assertAllClose([1.0, 2.0], var0.eval()) self.assertAllClose([3.0, 4.0], var1.eval()) # Run 4 steps of RMSProp for _ in range(4): update.run() var0_np, mg0_np, rms0_np, mom0_np = self._sparse_rmsprop_update_numpy( var0_np, grads0_np_indices, grads0_np, mg0_np, rms0_np, mom0_np, learning_rate, decay, momentum, centered) var1_np, mg1_np, rms1_np, mom1_np = self._sparse_rmsprop_update_numpy( var1_np, grads1_np_indices, grads1_np, mg1_np, rms1_np, mom1_np, learning_rate, decay, momentum, centered) # Validate updated params if centered: self.assertAllCloseAccordingToType(mg0_np, mg0.eval()) self.assertAllCloseAccordingToType(mg1_np, mg1.eval()) self.assertAllCloseAccordingToType(rms0_np, rms0.eval()) self.assertAllCloseAccordingToType(rms1_np, rms1.eval()) self.assertAllCloseAccordingToType(mom0_np, mom0.eval()) self.assertAllCloseAccordingToType(mom1_np, mom1.eval()) self.assertAllCloseAccordingToType(var0_np, var0.eval()) self.assertAllCloseAccordingToType(var1_np, var1.eval())
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def testWithoutMomentum(self, dtype): with self.session(use_gpu=True): var0 = variables.Variable([1.0, 2.0], dtype=dtype) var1 = variables.Variable([3.0, 4.0], dtype=dtype) grads0 = constant_op.constant([0.1, 0.1], dtype=dtype) grads1 = constant_op.constant([0.01, 0.01], dtype=dtype) opt = rmsprop.RMSPropOptimizer( learning_rate=2.0, decay=0.9, momentum=0.0, epsilon=1.0) update = opt.apply_gradients(zip([grads0, grads1], [var0, var1])) variables.global_variables_initializer().run() rms0 = opt.get_slot(var0, "rms") self.assertIsNotNone(rms0) rms1 = opt.get_slot(var1, "rms") self.assertIsNotNone(rms1) mom0 = opt.get_slot(var0, "momentum") self.assertIsNotNone(mom0) mom1 = opt.get_slot(var1, "momentum") self.assertIsNotNone(mom1) # Fetch params to validate initial values self.assertAllClose([1.0, 2.0], var0.eval()) self.assertAllClose([3.0, 4.0], var1.eval()) # Step 1: the rms accumulators where 1. So we should see a normal # update: v -= grad * learning_rate update.run() # Check the root mean square accumulators. self.assertAllCloseAccordingToType( np.array([0.901, 0.901]), rms0.eval()) self.assertAllCloseAccordingToType( np.array([0.90001, 0.90001]), rms1.eval()) # Check the parameters. self.assertAllCloseAccordingToType( np.array([ 1.0 - (0.1 * 2.0 / math.sqrt(0.901)), 2.0 - (0.1 * 2.0 / math.sqrt(0.901)) ]), var0.eval()) self.assertAllCloseAccordingToType( np.array([ 3.0 - (0.01 * 2.0 / math.sqrt(0.90001)), 4.0 - (0.01 * 2.0 / math.sqrt(0.90001)) ]), var1.eval()) # Step 2: the root mean square accumulators contain the previous update. update.run() # Check the rms accumulators. self.assertAllCloseAccordingToType( np.array([0.901 * 0.9 + 0.001, 0.901 * 0.9 + 0.001]), rms0.eval()) self.assertAllCloseAccordingToType( np.array([0.90001 * 0.9 + 1e-5, 0.90001 * 0.9 + 1e-5]), rms1.eval()) # Check the parameters. self.assertAllCloseAccordingToType( np.array([ 1.0 - (0.1 * 2.0 / math.sqrt(0.901)) - (0.1 * 2.0 / math.sqrt(0.901 * 0.9 + 0.001)), 2.0 - (0.1 * 2.0 / math.sqrt(0.901)) - (0.1 * 2.0 / math.sqrt(0.901 * 0.9 + 0.001)) ]), var0.eval()) self.assertAllCloseAccordingToType( np.array([ 3.0 - (0.01 * 2.0 / math.sqrt(0.90001)) - (0.01 * 2.0 / math.sqrt(0.90001 * 0.9 + 1e-5)), 4.0 - (0.01 * 2.0 / math.sqrt(0.90001)) - (0.01 * 2.0 / math.sqrt(0.90001 * 0.9 + 1e-5)) ]), var1.eval())
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def testWithMomentum(self, dtype): with self.session(use_gpu=True): var0 = variables.Variable([1.0, 2.0], dtype=dtype) var1 = variables.Variable([3.0, 4.0], dtype=dtype) grads0 = constant_op.constant([0.1, 0.1], dtype=dtype) grads1 = constant_op.constant([0.01, 0.01], dtype=dtype) opt = rmsprop.RMSPropOptimizer( learning_rate=2.0, decay=0.9, momentum=0.5, epsilon=1.0) update = opt.apply_gradients(zip([grads0, grads1], [var0, var1])) variables.global_variables_initializer().run() rms0 = opt.get_slot(var0, "rms") self.assertIsNotNone(rms0) rms1 = opt.get_slot(var1, "rms") self.assertIsNotNone(rms1) mom0 = opt.get_slot(var0, "momentum") self.assertIsNotNone(mom0) mom1 = opt.get_slot(var1, "momentum") self.assertIsNotNone(mom1) # Fetch params to validate initial values self.assertAllClose([1.0, 2.0], var0.eval()) self.assertAllClose([3.0, 4.0], var1.eval()) # Step 1: rms = 1, mom = 0. So we should see a normal # update: v -= grad * learning_rate update.run() # Check the root mean square accumulators. self.assertAllCloseAccordingToType( np.array([0.901, 0.901]), rms0.eval()) self.assertAllCloseAccordingToType( np.array([0.90001, 0.90001]), rms1.eval()) # Check the momentum accumulators self.assertAllCloseAccordingToType( np.array([(0.1 * 2.0 / math.sqrt(0.901)), (0.1 * 2.0 / math.sqrt(0.901))]), mom0.eval()) self.assertAllCloseAccordingToType( np.array([(0.01 * 2.0 / math.sqrt(0.90001)), (0.01 * 2.0 / math.sqrt(0.90001))]), mom1.eval()) # Check that the parameters. self.assertAllCloseAccordingToType( np.array([ 1.0 - (0.1 * 2.0 / math.sqrt(0.901)), 2.0 - (0.1 * 2.0 / math.sqrt(0.901)) ]), var0.eval()) self.assertAllCloseAccordingToType( np.array([ 3.0 - (0.01 * 2.0 / math.sqrt(0.90001)), 4.0 - (0.01 * 2.0 / math.sqrt(0.90001)) ]), var1.eval()) # Step 2: the root mean square accumulators contain the previous update. update.run() # Check the rms accumulators. self.assertAllCloseAccordingToType( np.array([0.901 * 0.9 + 0.001, 0.901 * 0.9 + 0.001]), rms0.eval()) self.assertAllCloseAccordingToType( np.array([0.90001 * 0.9 + 1e-5, 0.90001 * 0.9 + 1e-5]), rms1.eval()) self.assertAllCloseAccordingToType( np.array([ 0.5 * (0.1 * 2.0 / math.sqrt(0.901)) + (0.1 * 2.0 / math.sqrt(0.901 * 0.9 + 0.001)), 0.5 * (0.1 * 2.0 / math.sqrt(0.901)) + (0.1 * 2.0 / math.sqrt(0.901 * 0.9 + 0.001)) ]), mom0.eval()) self.assertAllCloseAccordingToType( np.array([ 0.5 * (0.01 * 2.0 / math.sqrt(0.90001)) + (0.01 * 2.0 / math.sqrt(0.90001 * 0.9 + 1e-5)), 0.5 * (0.01 * 2.0 / math.sqrt(0.90001)) + (0.01 * 2.0 / math.sqrt(0.90001 * 0.9 + 1e-5)) ]), mom1.eval()) # Check the parameters. self.assertAllCloseAccordingToType( np.array([ 1.0 - (0.1 * 2.0 / math.sqrt(0.901)) - (0.5 * (0.1 * 2.0 / math.sqrt(0.901)) + (0.1 * 2.0 / math.sqrt(0.901 * 0.9 + 0.001))), 2.0 - (0.1 * 2.0 / math.sqrt(0.901)) - (0.5 * (0.1 * 2.0 / math.sqrt(0.901)) + (0.1 * 2.0 / math.sqrt(0.901 * 0.9 + 0.001))) ]), var0.eval()) self.assertAllCloseAccordingToType( np.array([ 3.0 - (0.01 * 2.0 / math.sqrt(0.90001)) - (0.5 * (0.01 * 2.0 / math.sqrt(0.90001)) + (0.01 * 2.0 / math.sqrt(0.90001 * 0.9 + 1e-5))), 4.0 - (0.01 * 2.0 / math.sqrt(0.90001)) - (0.5 * (0.01 * 2.0 / math.sqrt(0.90001)) + (0.01 * 2.0 / math.sqrt(0.90001 * 0.9 + 1e-5))) ]), var1.eval())
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __init__(self): self.mainFrame = gui.mainFrame.MainFrame.getInstance()
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def display(self, callingWindow, srcContext, mainItem): if srcContext not in ("marketItemGroup", "marketItemMisc") or self.mainFrame.getActiveFit() is None: return False if mainItem is None: return False for attr in ("emDamage", "thermalDamage", "explosiveDamage", "kineticDamage"): if mainItem.getAttribute(attr) is not None: return True return False
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def getText(self, callingWindow, itmContext, mainItem): return "Set {} as Damage Pattern".format(itmContext if itmContext is not None else "Item")
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def activate(self, callingWindow, fullContext, mainItem, i): fitID = self.mainFrame.getActiveFit() Fit.getInstance().setAsPattern(fitID, mainItem) wx.PostEvent(self.mainFrame, GE.FitChanged(fitIDs=(fitID,)))
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def getBitmap(self, callingWindow, context, mainItem): return None
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def testProcessPriorTo24(self): """Tests the Process function on a Firefox History database file.""" # This is probably version 23 but potentially an older version. plugin = firefox_history.FirefoxHistoryPlugin() storage_writer = self._ParseDatabaseFileWithPlugin( ['places.sqlite'], plugin) # The places.sqlite file contains 205 events (1 page visit, # 2 x 91 bookmark records, 2 x 3 bookmark annotations, # 2 x 8 bookmark folders). # However there are three events that do not have a timestamp # so the test file will show 202 extracted events. number_of_events = storage_writer.GetNumberOfAttributeContainers('event') self.assertEqual(number_of_events, 202) number_of_warnings = storage_writer.GetNumberOfAttributeContainers( 'extraction_warning') self.assertEqual(number_of_warnings, 0) number_of_warnings = storage_writer.GetNumberOfAttributeContainers( 'recovery_warning') self.assertEqual(number_of_warnings, 0) events = list(storage_writer.GetEvents()) # Check the first page visited event. expected_event_values = { 'data_type': 'firefox:places:page_visited', 'date_time': '2011-07-01 11:16:21.371935', 'host': 'news.google.com', 'timestamp_desc': definitions.TIME_DESCRIPTION_LAST_VISITED, 'title': 'Google News', 'url': 'http://news.google.com/', 'visit_count': 1, 'visit_type': 2} self.CheckEventValues(storage_writer, events[0], expected_event_values) # Check the first bookmark event. expected_event_values = { 'data_type': 'firefox:places:bookmark', 'date_time': '2011-07-01 11:13:59.266344', 'timestamp_desc': definitions.TIME_DESCRIPTION_ADDED} self.CheckEventValues(storage_writer, events[1], expected_event_values) # Check the second bookmark event. expected_event_values = { 'data_type': 'firefox:places:bookmark', 'date_time': '2011-07-01 11:13:59.267198', 'places_title': ( 'folder=BOOKMARKS_MENU&folder=UNFILED_BOOKMARKS&folder=TOOLBAR&' 'sort=12&excludeQueries=1&excludeItemIfParentHasAnnotation=livemark' '%2FfeedURI&maxResults=10&queryType=1'), 'timestamp_desc': definitions.TIME_DESCRIPTION_MODIFICATION, 'title': 'Recently Bookmarked', 'type': 'URL', 'url': ( 'place:folder=BOOKMARKS_MENU&folder=UNFILED_BOOKMARKS&folder=' 'TOOLBAR&sort=12&excludeQueries=1&excludeItemIfParentHasAnnotation=' 'livemark%2FfeedURI&maxResults=10&queryType=1'), 'visit_count': 0} self.CheckEventValues(storage_writer, events[2], expected_event_values) # Check the first bookmark annotation event. expected_event_values = { 'data_type': 'firefox:places:bookmark_annotation', 'date_time': '2011-07-01 11:13:59.267146', 'timestamp_desc': definitions.TIME_DESCRIPTION_ADDED} self.CheckEventValues(storage_writer, events[183], expected_event_values) # Check another bookmark annotation event. expected_event_values = { 'content': 'RecentTags', 'data_type': 'firefox:places:bookmark_annotation', 'date_time': '2011-07-01 11:13:59.267605', 'timestamp_desc': definitions.TIME_DESCRIPTION_ADDED, 'title': 'Recent Tags', 'url': 'place:sort=14&type=6&maxResults=10&queryType=1'} self.CheckEventValues(storage_writer, events[184], expected_event_values) # Check the second last bookmark folder event. expected_event_values = { 'data_type': 'firefox:places:bookmark_folder', 'date_time': '2011-03-21 10:05:01.553774', 'timestamp_desc': definitions.TIME_DESCRIPTION_ADDED} self.CheckEventValues(storage_writer, events[200], expected_event_values) # Check the last bookmark folder event. expected_event_values = { 'data_type': 'firefox:places:bookmark_folder', 'date_time': '2011-07-01 11:14:11.766851', 'timestamp_desc': definitions.TIME_DESCRIPTION_MODIFICATION, 'title': 'Latest Headlines'} self.CheckEventValues(storage_writer, events[201], expected_event_values)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def testProcessVersion25(self): """Tests the Process function on a Firefox History database file v 25.""" plugin = firefox_history.FirefoxHistoryPlugin() storage_writer = self._ParseDatabaseFileWithPlugin( ['places_new.sqlite'], plugin) # The places.sqlite file contains 84 events: # 34 page visits. # 28 bookmarks # 14 bookmark folders # 8 annotations number_of_events = storage_writer.GetNumberOfAttributeContainers('event') self.assertEqual(number_of_events, 84) number_of_warnings = storage_writer.GetNumberOfAttributeContainers( 'extraction_warning') self.assertEqual(number_of_warnings, 0) number_of_warnings = storage_writer.GetNumberOfAttributeContainers( 'recovery_warning') self.assertEqual(number_of_warnings, 0) events = list(storage_writer.GetEvents()) counter = collections.Counter() for event in events: event_data = self._GetEventDataOfEvent(storage_writer, event) counter[event_data.data_type] += 1 self.assertEqual(counter['firefox:places:bookmark'], 28) self.assertEqual(counter['firefox:places:page_visited'], 34) self.assertEqual(counter['firefox:places:bookmark_folder'], 14) self.assertEqual(counter['firefox:places:bookmark_annotation'], 8) expected_event_values = { 'data_type': 'firefox:places:page_visited', 'date_time': '2013-10-30 21:57:11.281942', 'host': 'code.google.com', 'url': 'http://code.google.com/p/plaso', 'visit_count': 1, 'visit_type': 2} self.CheckEventValues(storage_writer, events[10], expected_event_values)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def each(f): if f.body: f.hashes = [] for hash_type, h in HashFile.extract_hashes(f.body.contents): hash_object = Hash.get_or_create(value=h.hexdigest()) hash_object.add_source("analytics") hash_object.save() f.active_link_to( hash_object, "{} hash".format(hash_type.upper()), "HashFile", clean_old=False, ) f.hashes.append({"hash": hash_type, "value": h.hexdigest()}) f.save()
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __init__(self, queue, level, formatter): super(QueueingLogHandler, self).__init__() self._queue = queue self.setLevel(level) self.setFormatter(formatter)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def emit(self, record): msg = self.format(record) self._queue.put_nowait(msg)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def close(self): super(QueueingLogHandler, self).close() self._queue.put_nowait(None)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def emitted(self): return self._queue
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __init__(self): self._logging_handlers = set()
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def test(self, logger_name, logger_level, message): logger = logging.getLogger(logger_name) getattr(logger, logger_level.lower())(message)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def available_loggers(self): """ List of initalized loggers """ return logging.getLogger().manager.loggerDict.keys()
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def close_log_streams(self): """ Closes all log_stream streams. """ while self._logging_handlers: self._logging_handlers.pop().close()
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def log_stream(self, logger_name, level_name, format_str): """ Attaches a log handler to the specified logger and sends emitted logs back as stream. """ if logger_name != "" and logger_name not in self.available_loggers(): raise ValueError("logger {0} is not available".format(logger_name)) level_name_upper = level_name.upper() if level_name else "NOTSET" try: level = getattr(logging, level_name_upper) except AttributeError, e: raise AttributeError("log level {0} is not available".format(level_name_upper))
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def create(params, env=None, headers=None): return request.send('post', request.uri_path("plans"), params, env, headers)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def update(id, params=None, env=None, headers=None): return request.send('post', request.uri_path("plans",id), params, env, headers)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def list(params=None, env=None, headers=None): return request.send_list_request('get', request.uri_path("plans"), params, env, headers)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def retrieve(id, env=None, headers=None): return request.send('get', request.uri_path("plans",id), None, env, headers)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def delete(id, env=None, headers=None): return request.send('post', request.uri_path("plans",id,"delete"), None, env, headers)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def copy(params, env=None, headers=None): return request.send('post', request.uri_path("plans","copy"), params, env, headers)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def wrapper%(signature)s: with ldap3mock: return func%(funcargs)s
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def generateUUID(): # pylint: disable=invalid-name """ Utility function; generates UUIDs """ return str(uuid.uuid4())
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _convert_objectGUID(item): item = uuid.UUID("{{{0!s}}}".format(item)).bytes_le item = escape_bytes(item) return item
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def refund_user(self, user_id): # Do logic here...
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __init__(self): self._calls = []
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def with_status_check(obj, *args, **kwargs): if obj.status not in valid_start_statuses: exception_msg = ( u"Error calling {} {}: status is '{}', must be one of: {}" ).format(func, obj, obj.status, valid_start_statuses) raise VerificationException(exception_msg) return func(obj, *args, **kwargs)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __iter__(self): return iter(self._calls)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def expiration_datetime(self): """Datetime that the verification will expire. """ days_good_for = settings.VERIFY_STUDENT["DAYS_GOOD_FOR"] return self.created_at + timedelta(days=days_good_for)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __len__(self): return len(self._calls)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def should_display_status_to_user(self): """Whether or not the status from this attempt should be displayed to the user.""" return True
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __getitem__(self, idx): return self._calls[idx]
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def active_at_datetime(self, deadline): """Check whether the verification was active at a particular datetime. Arguments: deadline (datetime): The date at which the verification was active (created before and expiration datetime is after today). Returns: bool """ return ( self.created_at < deadline and self.expiration_datetime > now() )
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def setdata(self, request, response): self._calls.append(Call(request, response))
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __unicode__(self): return 'ManualIDVerification for {name}, status: {status}'.format( name=self.name, status=self.status, )
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def reset(self): self._calls = []
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def should_display_status_to_user(self): """ Whether or not the status should be displayed to the user. """ return False
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __init__(self, connection): self.connection = connection
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __unicode__(self): return 'SSOIDVerification for {name}, status: {status}'.format( name=self.name, status=self.status, )
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def paged_search(self, **kwargs): self.connection.search(search_base=kwargs.get("search_base"), search_scope=kwargs.get("search_scope"), search_filter=kwargs.get( "search_filter"), attributes=kwargs.get("attributes"), paged_size=kwargs.get("page_size"), size_limit=kwargs.get("size_limit"), paged_cookie=None) result = self.connection.response if kwargs.get("generator", False): # If ``generator=True`` is passed, ``paged_search`` should return an iterator. result = iter(result) return result
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __init__(self, connection): self.standard = self.Standard(connection)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def parsed_error_msg(self): """ Sometimes, the error message we've received needs to be parsed into something more human readable The default behavior is to return the current error message as is. """ return self.error_msg
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def __init__(self, directory=None): if directory is None: directory = [] import copy self.directory = copy.deepcopy(directory) self.bound = False self.start_tls_called = False self.extend = self.Extend(self) self.operation = { "!" : self._search_not, "&" : self._search_and, "|" : self._search_or, }
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def upload_face_image(self, img): raise NotImplementedError
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def set_directory(self, directory): self.directory = directory
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def upload_photo_id_image(self, img): raise NotImplementedError
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _find_user(self, dn): return next(i for (i, d) in enumerate(self.directory) if d["dn"] == dn)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def mark_ready(self): """ Mark that the user data in this attempt is correct. In order to succeed, the user must have uploaded the necessary images (`face_image_url`, `photo_id_image_url`). This method will also copy their name from their user profile. Prior to marking it ready, we read this value directly from their profile, since they're free to change it. This often happens because people put in less formal versions of their name on signup, but realize they want something different to go on a formal document. Valid attempt statuses when calling this method: `created` Status after method completes: `ready` Other fields that will be set by this method: `name` State Transitions: `created` → `ready` This is what happens when the user confirms to us that the pictures they uploaded are good. Note that we don't actually do a submission anywhere yet. """ # At any point prior to this, they can change their names via their # student dashboard. But at this point, we lock the value into the # attempt. self.name = self.user.profile.name self.status = "ready" self.save()
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def approve(self, user_id=None, service=""): """ Approve this attempt. `user_id` Valid attempt statuses when calling this method: `submitted`, `approved`, `denied` Status after method completes: `approved` Other fields that will be set by this method: `reviewed_by_user_id`, `reviewed_by_service`, `error_msg` State Transitions: `submitted` → `approved` This is the usual flow, whether initiated by a staff user or an external validation service. `approved` → `approved` No-op. First one to approve it wins. `denied` → `approved` This might happen if a staff member wants to override a decision made by an external service or another staff member (say, in response to a support request). In this case, the previous values of `reviewed_by_user_id` and `reviewed_by_service` will be changed to whoever is doing the approving, and `error_msg` will be reset. The only record that this record was ever denied would be in our logs. This should be a relatively rare occurence. """ # If someone approves an outdated version of this, the first one wins if self.status == "approved": return log.info(u"Verification for user '{user_id}' approved by '{reviewer}'.".format( user_id=self.user, reviewer=user_id )) self.error_msg = "" # reset, in case this attempt was denied before self.error_code = "" # reset, in case this attempt was denied before self.reviewing_user = user_id self.reviewing_service = service self.status = "approved" self.save() # Emit signal to find and generate eligible certificates LEARNER_NOW_VERIFIED.send_robust( sender=PhotoVerification, user=self.user )
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def bind(self, read_server_info=True): return self.bound
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def deny(self, error_msg, error_code="", reviewing_user=None, reviewing_service=""): """ Deny this attempt. Valid attempt statuses when calling this method: `submitted`, `approved`, `denied` Status after method completes: `denied` Other fields that will be set by this method: `reviewed_by_user_id`, `reviewed_by_service`, `error_msg`, `error_code` State Transitions: `submitted` → `denied` This is the usual flow, whether initiated by a staff user or an external validation service. `approved` → `denied` This might happen if a staff member wants to override a decision made by an external service or another staff member, or just correct a mistake made during the approval process. In this case, the previous values of `reviewed_by_user_id` and `reviewed_by_service` will be changed to whoever is doing the denying. The only record that this record was ever approved would be in our logs. This should be a relatively rare occurence. `denied` → `denied` Update the error message and reviewing_user/reviewing_service. Just lets you amend the error message in case there were additional details to be made. """ log.info(u"Verification for user '{user_id}' denied by '{reviewer}'.".format( user_id=self.user, reviewer=reviewing_user )) self.error_msg = error_msg self.error_code = error_code self.reviewing_user = reviewing_user self.reviewing_service = reviewing_service self.status = "denied" self.save()
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def start_tls(self, read_server_info=True): self.start_tls_called = True
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def system_error(self, error_msg, error_code="", reviewing_user=None, reviewing_service=""): """ Mark that this attempt could not be completed because of a system error. Status should be moved to `must_retry`. For example, if Software Secure reported to us that they couldn't process our submission because they couldn't decrypt the image we sent. """ if self.status in ["approved", "denied"]: return # If we were already approved or denied, just leave it. self.error_msg = error_msg self.error_code = error_code self.reviewing_user = reviewing_user self.reviewing_service = reviewing_service self.status = "must_retry" self.save()
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def add(self, dn, object_class=None, attributes=None): self.result = { 'dn' : '', 'referrals' : None, 'description' : 'success', 'result' : 0, 'message' : '', 'type' : 'addResponse'} # Check to see if the user exists in the directory try: index = self._find_user(dn) except StopIteration: # If we get here the user doesn't exist so continue # Create a entry object for the new user entry = {} entry['dn'] = dn entry['attributes'] = attributes if object_class != None: entry['attributes'].update( {'objectClass': object_class} ) else: # User already exists self.result["description"] = "failure" self.result["result"] = 68 self.result["message"] = \ "Error entryAlreadyExists for {0}".format(dn) return False # Add the user entry to the directory self.directory.append(entry) # Attempt to write changes to disk with open(DIRECTORY, 'w+') as f: f.write(str(self.directory)) return True
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def retire_user(cls, user_id): """ Retire user as part of GDPR Phase I Returns 'True' if records found :param user_id: int :return: bool """ try: user_obj = User.objects.get(id=user_id) except User.DoesNotExist: return False photo_objects = cls.objects.filter( user=user_obj ).update( name='', face_image_url='', photo_id_image_url='', photo_id_key='' ) return photo_objects > 0
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def delete(self, dn, controls=None): self.result = { 'dn' : '', 'referrals' : None, 'description' : 'success', 'result' : 0, 'message' : '', 'type' : 'addResponse'} # Check to see if the user exists in the directory try: index = self._find_user(dn) except StopIteration: # If we get here the user doesn't exist so continue self.result["description"] = "failure" self.result["result"] = 32 self.result["message"] = "Error no such object: {0}".format(dn) return False # Delete the entry object for the user self.directory.pop(index) # Attempt to write changes to disk with open(DIRECTORY, 'w+') as f: f.write(str(self.directory)) return True
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def approve(self, user_id=None, service=""): """ Approve the verification attempt for user Valid attempt statuses when calling this method: `submitted`, `approved`, `denied` After method completes: status is set to `approved` expiry_date is set to one year from now """ self.expiry_date = now() + timedelta( days=settings.VERIFY_STUDENT["DAYS_GOOD_FOR"] ) super(SoftwareSecurePhotoVerification, self).approve(user_id, service)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def modify(self, dn, changes, controls=None): self.result = { 'dn' : '', 'referrals' : None, 'description' : 'success', 'result' : 0, 'message' : '', 'type' : 'modifyResponse'} # Check to see if the user exists in the directory try: index = self._find_user(dn) except StopIteration: # If we get here the user doesn't exist so continue self.result["description"] = "failure" self.result["result"] = 32 self.result["message"] = "Error no such object: {0!s}".format(dn) return False # extract the hash we are interested in entry = self.directory[index].get("attributes") # Loop over the changes hash and apply them for k, v in changes.items(): if v[0] == "MODIFY_DELETE": entry.pop(k) elif v[0] == "MODIFY_REPLACE" or v[0] == "MODIFY_ADD": entry[k] = v[1][0] else: self.result["result"] = 2 self.result["message"] = "Error bad/missing/not implemented" \ "modify operation: %s" % k[1] # Place the attributes back into the directory hash self.directory[index]["attributes"] = entry # Attempt to write changes to disk with open(DIRECTORY, 'w+') as f: f.write(str(self.directory)) return True
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def get_initial_verification(cls, user, earliest_allowed_date=None): """Get initial verification for a user with the 'photo_id_key'. Arguments: user(User): user object earliest_allowed_date(datetime): override expiration date for initial verification Return: SoftwareSecurePhotoVerification (object) or None """ init_verification = cls.objects.filter( user=user, status__in=["submitted", "approved"], created_at__gte=( earliest_allowed_date or earliest_allowed_verification_date() ) ).exclude(photo_id_key='') return init_verification.latest('created_at') if init_verification.exists() else None
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _match_greater_than_or_equal(search_base, attribute, value, candidates): matches = list() for entry in candidates: dn = entry.get("dn") if not dn.endswith(search_base): continue value_from_directory = entry.get("attributes").get(attribute) if str(value_from_directory) >= str(value): entry["type"] = "searchResEntry" matches.append(entry) return matches
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def upload_face_image(self, img_data): """ Upload an image of the user's face. `img_data` should be a raw bytestream of a PNG image. This method will take the data, encrypt it using our FACE_IMAGE_AES_KEY, encode it with base64 and save it to the storage backend. Yes, encoding it to base64 adds compute and disk usage without much real benefit, but that's what the other end of this API is expecting to get. """ # Skip this whole thing if we're running acceptance tests or if we're # developing and aren't interested in working on student identity # verification functionality. If you do want to work on it, you have to # explicitly enable these in your private settings. if settings.FEATURES.get('AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'): return aes_key_str = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["FACE_IMAGE_AES_KEY"] aes_key = aes_key_str.decode("hex") path = self._get_path("face") buff = ContentFile(encrypt_and_encode(img_data, aes_key)) self._storage.save(path, buff)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _match_greater_than(search_base, attribute, value, candidates): matches = list() for entry in candidates: dn = entry.get("dn") if not dn.endswith(search_base): continue value_from_directory = entry.get("attributes").get(attribute) if str(value_from_directory) > str(value): entry["type"] = "searchResEntry" matches.append(entry) return matches
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def upload_photo_id_image(self, img_data): """ Upload an the user's photo ID image. `img_data` should be a raw bytestream of a PNG image. This method will take the data, encrypt it using a randomly generated AES key, encode it with base64 and save it to the storage backend. The random key is also encrypted using Software Secure's public RSA key and stored in our `photo_id_key` field. Yes, encoding it to base64 adds compute and disk usage without much real benefit, but that's what the other end of this API is expecting to get. """ # Skip this whole thing if we're running acceptance tests or if we're # developing and aren't interested in working on student identity # verification functionality. If you do want to work on it, you have to # explicitly enable these in your private settings. if settings.FEATURES.get('AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'): # fake photo id key is set only for initial verification self.photo_id_key = 'fake-photo-id-key' self.save() return aes_key = random_aes_key() rsa_key_str = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["RSA_PUBLIC_KEY"] rsa_encrypted_aes_key = rsa_encrypt(aes_key, rsa_key_str) # Save this to the storage backend path = self._get_path("photo_id") buff = ContentFile(encrypt_and_encode(img_data, aes_key)) self._storage.save(path, buff) # Update our record fields self.photo_id_key = rsa_encrypted_aes_key.encode('base64') self.save()
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _match_less_than_or_equal(search_base, attribute, value, candidates): matches = list() for entry in candidates: dn = entry.get("dn") if not dn.endswith(search_base): continue value_from_directory = entry.get("attributes").get(attribute) if str(value_from_directory) <= str(value): entry["type"] = "searchResEntry" matches.append(entry) return matches
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def submit(self, copy_id_photo_from=None): """ Submit our verification attempt to Software Secure for validation. This will set our status to "submitted" if the post is successful, and "must_retry" if the post fails. Keyword Arguments: copy_id_photo_from (SoftwareSecurePhotoVerification): If provided, re-send the ID photo data from this attempt. This is used for reverification, in which new face photos are sent with previously-submitted ID photos. """ try: response = self.send_request(copy_id_photo_from=copy_id_photo_from) if response.ok: self.submitted_at = now() self.status = "submitted" self.save() else: self.status = "must_retry" self.error_msg = response.text self.save() except Exception: # pylint: disable=broad-except log.exception( u'Software Secure submission failed for user %s, setting status to must_retry', self.user.username ) self.status = "must_retry" self.save()
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _match_less_than(search_base, attribute, value, candidates): matches = list() for entry in candidates: dn = entry.get("dn") if not dn.endswith(search_base): continue value_from_directory = entry.get("attributes").get(attribute) if str(value_from_directory) < str(value): entry["type"] = "searchResEntry" matches.append(entry) return matches
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def parsed_error_msg(self): """ Parse the error messages we receive from SoftwareSecure Error messages are written in the form: `[{"photoIdReasons": ["Not provided"]}]` Returns: str[]: List of error messages. """ parsed_errors = [] error_map = { 'EdX name not provided': 'name_mismatch', 'Name mismatch': 'name_mismatch', 'Photo/ID Photo mismatch': 'photos_mismatched', 'ID name not provided': 'id_image_missing_name', 'Invalid Id': 'id_invalid', 'No text': 'id_invalid', 'Not provided': 'id_image_missing', 'Photo hidden/No photo': 'id_image_not_clear', 'Text not clear': 'id_image_not_clear', 'Face out of view': 'user_image_not_clear', 'Image not clear': 'user_image_not_clear', 'Photo not provided': 'user_image_missing', } try: messages = set() message_groups = json.loads(self.error_msg) for message_group in message_groups: messages = messages.union(set(*six.itervalues(message_group))) for message in messages: parsed_error = error_map.get(message) if parsed_error: parsed_errors.append(parsed_error) else: log.debug(u'Ignoring photo verification error message: %s', message) except Exception: # pylint: disable=broad-except log.exception(u'Failed to parse error message for SoftwareSecurePhotoVerification %d', self.pk) return parsed_errors
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _match_equal_to(search_base, attribute, value, candidates): matches = list() match_using_regex = False if "*" in value: match_using_regex = True #regex = check_escape(value) regex = value.replace('*', '.*') regex = "^{0}$".format(regex) for entry in candidates: dn = to_unicode(entry.get("dn")) if attribute not in entry.get("attributes") or not dn.endswith(search_base): continue values_from_directory = entry.get("attributes").get(attribute) if isinstance(values_from_directory, list): for item in values_from_directory: if attribute == "objectGUID": item = _convert_objectGUID(item) if match_using_regex: m = re.match(regex, str(item), re.I) if m: entry["type"] = "searchResEntry" matches.append(entry) else: if item == value: entry["type"] = "searchResEntry" matches.append(entry) else: if attribute == "objectGUID": values_from_directory = _convert_objectGUID(values_from_directory) if match_using_regex: m = re.match(regex, str(values_from_directory), re.I) if m: entry["type"] = "searchResEntry" matches.append(entry) else: # The value, which we compare is unicode, so we convert # the values_from_directory to unicode rather than str. if isinstance(values_from_directory, bytes): values_from_directory = values_from_directory.decode( "utf-8") elif type(values_from_directory) == int: values_from_directory = u"{0!s}".format(values_from_directory) if value == values_from_directory: entry["type"] = "searchResEntry" matches.append(entry) return matches
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def image_url(self, name, override_receipt_id=None): """ We dynamically generate this, since we want it the expiration clock to start when the message is created, not when the record is created. Arguments: name (str): Name of the image (e.g. "photo_id" or "face") Keyword Arguments: override_receipt_id (str): If provided, use this receipt ID instead of the ID for this attempt. This is useful for reverification where we need to construct a URL to a previously-submitted photo ID image. Returns: string: The expiring URL for the image. """ path = self._get_path(name, override_receipt_id=override_receipt_id) return self._storage.url(path)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _match_notequal_to(search_base, attribute, value, candidates): matches = list() match_using_regex = False if "*" in value: match_using_regex = True #regex = check_escape(value) regex = value.replace('*', '.*') regex = "^{0}$".format(regex) for entry in candidates: found = False dn = entry.get("dn") if not dn.endswith(search_base): continue values_from_directory = entry.get("attributes").get(attribute) if isinstance(values_from_directory, list): for item in values_from_directory: if attribute == "objectGUID": item = _convert_objectGUID(item) if match_using_regex: m = re.match(regex, str(item), re.I) if m: found = True else: if item == value: found = True if found is False: entry["type"] = "searchResEntry" matches.append(entry) else: if attribute == "objectGUID": values_from_directory = _convert_objectGUID(values_from_directory) if match_using_regex: m = re.match(regex, str(values_from_directory), re.I) if not m: entry["type"] = "searchResEntry" matches.append(entry) else: if str(value) != str(values_from_directory): entry["type"] = "searchResEntry" matches.append(entry) return matches
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _storage(self): """ Return the configured django storage backend. """ config = settings.VERIFY_STUDENT["SOFTWARE_SECURE"] # Default to the S3 backend for backward compatibility storage_class = config.get("STORAGE_CLASS", "storages.backends.s3boto.S3BotoStorage") storage_kwargs = config.get("STORAGE_KWARGS", {}) # Map old settings to the parameters expected by the storage backend if "AWS_ACCESS_KEY" in config: storage_kwargs["access_key"] = config["AWS_ACCESS_KEY"] if "AWS_SECRET_KEY" in config: storage_kwargs["secret_key"] = config["AWS_SECRET_KEY"] if "S3_BUCKET" in config: storage_kwargs["bucket"] = config["S3_BUCKET"] storage_kwargs["querystring_expire"] = self.IMAGE_LINK_DURATION return get_storage(storage_class, **storage_kwargs)
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _parse_filter(): op = pyparsing.oneOf('! & |') lpar = pyparsing.Literal('(').suppress() rpar = pyparsing.Literal(')').suppress() k = pyparsing.Word(pyparsing.alphanums) # NOTE: We may need to expand on this list, but as this is not a real # LDAP server we should be OK. # Value to contain: # numbers, upper/lower case letters, astrisk, at symbol, minus, full # stop, backslash or a space v = pyparsing.Word(pyparsing.alphanums + "-*@.\\ äöü") rel = pyparsing.oneOf("= ~= >= <=") expr = pyparsing.Forward() atom = pyparsing.Group(lpar + op + expr + rpar) \ | pyparsing.Combine(lpar + k + rel + v + rpar) expr << atom + pyparsing.ZeroOrMore( expr ) return expr
def dist(a, b): return sum((i-j)**2 for i, j in zip(a, b))
def _get_path(self, prefix, override_receipt_id=None): """ Returns the path to a resource with this instance's `receipt_id`. If `override_receipt_id` is given, the path to that resource will be retrieved instead. This allows us to retrieve images submitted in previous attempts (used for reverification, where we send a new face photo with the same photo ID from a previous attempt). """ receipt_id = self.receipt_id if override_receipt_id is None else override_receipt_id return os.path.join(prefix, receipt_id)