repo_name
stringlengths
6
112
path
stringlengths
4
204
copies
stringlengths
1
3
size
stringlengths
4
6
content
stringlengths
714
810k
license
stringclasses
15 values
kvh/ramp
ramp/tests/test_modeling.py
1
6228
import os import sys sys.path.append('../..') import unittest import numpy as np import pandas as pd from pandas import DataFrame, Series, Index from pandas.util.testing import assert_almost_equal from ramp.estimators.base import Probabilities from ramp.features.base import F, Map from ramp.features.trained import Predictions from ramp.model_definition import ModelDefinition from ramp import modeling from ramp.modeling import (fit_model, cross_validate, build_and_package_model, generate_train, generate_test) from ramp.tests.test_features import make_data class DummyEstimator(object): def __init__(self): pass def fit(self, x, y): self.fitx = x self.fity = y def predict(self, x): self.predictx = x p = np.zeros(len(x)) return p class DummyCVEstimator(object): def __init__(self): self.fitx = [] self.fity = [] self.predictx = [] def fit(self, x, y): self.fitx.append(x) self.fity.append(y) def predict(self, x): self.predictx.append(x) p = np.zeros(len(x)) return p class DummyProbEstimator(object): def __init__(self, n_clses): self.n_clses = n_clses def fit(self, x, y): pass def predict_proba(self, x): return np.zeros((len(x), self.n_clses)) class TestBasicModeling(unittest.TestCase): def setUp(self): self.data = make_data(10) def make_model_def_basic(self): features = [F(10), F('a')] target = F('b') estimator = DummyEstimator() model_def = ModelDefinition(features=features, estimator=estimator, target=target) return model_def def test_generate_train(self): model_def = self.make_model_def_basic() train_index = self.data.index[:5] x_train, y_train, ff, x = generate_train(model_def, self.data, train_index=train_index) assert_almost_equal(x_train.index, train_index) def test_generate_test(self): model_def = self.make_model_def_basic() x, y, fitted_model = fit_model(model_def, self.data) test_index = self.data.index[:5] x_test, y_test = generate_test(model_def, self.data.loc[test_index], fitted_model) assert_almost_equal(x_test.index, test_index) def test_fit_model(self): model_def = self.make_model_def_basic() x, y, fitted_model = fit_model(model_def, self.data) fe = fitted_model.fitted_estimator self.assertEqual(fe.fitx.shape, x.shape) self.assertEqual(fe.fity.shape, y.shape) def test_predict(self): model_def = self.make_model_def_basic() x, y, fitted_model = fit_model(model_def, self.data) x, y_true = modeling.generate_test(model_def, self.data[:3], fitted_model) y_preds = fitted_model.fitted_estimator.predict(x) self.assertEqual(len(x), 3) self.assertEqual(len(y_true), 3) self.assertEqual(len(y_preds), 3) y_preds2 = modeling.predict_with_model(fitted_model, self.data[:3]) assert_almost_equal(y_preds, y_preds2.values) def test_cross_validate(self): model_def = self.make_model_def_basic() results = cross_validate(model_def, self.data, folds=3) self.assertEqual(len(results), 3) def test_build_and_package_model(self): model_def = self.make_model_def_basic() desc = "State-of-the-Art Model" pkg = build_and_package_model(model_def, self.data, desc, train_index=self.data.index[:3]) self.assertEqual(pkg.data_description, desc) self.assertTrue(pkg.fitted_model) # and evaluate pkg = build_and_package_model(model_def, self.data, desc, evaluate=True, train_index=self.data.index[:3]) self.assertEqual(pkg.data_description, desc) self.assertTrue(pkg.fitted_model) def test_cross_validate_with_alternative_predictions(self): features = [F(10), F('a')] target = Map('b', np.log) estimator = DummyEstimator() model_def = ModelDefinition(features=features, estimator=estimator, target=target, evaluation_transformation=Map('__predictions', np.exp), evaluation_target=F('b')) results = cross_validate(model_def, self.data, folds=3) self.assertEqual(len(results), 3) # assert we've transformed predictions correctly yt = results[0].y_test assert_almost_equal(yt.values, self.data['b'].reindex(yt.index).values) assert_almost_equal(estimator.fity, np.log(self.data['b'].reindex(results[-1].y_train.index)).values) class TestNestedModeling(unittest.TestCase): def setUp(self): self.data = make_data(10) def test_predictions_nest(self): inner_estimator = DummyEstimator() inner_model = ModelDefinition(features=[F('a')], estimator=inner_estimator, target=F('b')) features = [F('c'), Predictions(inner_model)] target = F('b') estimator = DummyEstimator() model_def = ModelDefinition(features=features, estimator=estimator, target=target) train_index = self.data.index[:5] x, y, fitted_model = fit_model(model_def, self.data, train_index=train_index) self.assertEqual(fitted_model.fitted_features[1].trained_data.fitted_estimator.fitx.shape, (5, 1)) self.assertEqual(x.shape, (len(train_index), 2)) x, y_true = modeling.generate_test(model_def, self.data[:3], fitted_model) assert_almost_equal(x[x.columns[1]].values, np.zeros(3)) if __name__ == '__main__': unittest.main()
mit
dunovank/jupyter-themes
setup.py
1
2939
import os from glob import glob from setuptools import setup from itertools import chain pkgname = 'jupyterthemes' major = 0 minor = 20 patch = 2 version = '.'.join([str(v) for v in [major, minor, patch]]) url = 'https://github.com/dunovank/jupyter-themes' download_url = '/'.join([url, 'tarball', 'v' + version]) # get readme content after screenshots for pypi site README = os.path.join(os.path.dirname(__file__), 'README.md') with open(README) as read_me: longdescr = '' startReading = False for line in read_me: if "Travis" in line.strip(): startReading = True if "Monospace Fonts" in line.strip(): break if startReading: longdescr += line # add layout, .less styles, and compiled .css files to pkg data layout = os.path.join(pkgname, 'layout') styles = os.path.join(pkgname, 'styles') stylesCompiled = os.path.join(styles, 'compiled') datafiles = {pkgname: []} for subdir in ['defaults', 'layout', 'styles', 'styles/compiled']: filetypes = '*.*ss' if subdir=='defaults': filetypes = '*.*s' files = glob(os.path.join(pkgname, subdir, filetypes)) filesLocalPath = [os.sep.join(f.split(os.sep)[1:]) for f in files] datafiles[pkgname].extend(filesLocalPath) # recursively point to all included font directories fontfams = ['monospace', 'sans-serif', 'serif'] fsubdirs = [os.path.join(pkgname, 'fonts', subdir) for subdir in fontfams] fontsdata = chain.from_iterable([[os.sep.join(f.split(os.sep)[1:]) for f in glob(os.path.join(fsub, '*', '*'))] for fsub in fsubdirs]) datafiles[pkgname].extend(list(fontsdata)) install_requires = ['jupyter_core', 'notebook>=5.6.0', 'ipython>=5.4.1', 'matplotlib>=1.4.3', 'lesscpy>=0.11.2'] setup( name='jupyterthemes', version=version, packages=['jupyterthemes'], include_package_data=True, package_data=datafiles, description='Select and install a Jupyter notebook theme', long_description=longdescr, license='MIT', url=url, download_url=download_url, author='dunovank', author_email='[email protected]', classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', ], install_requires=install_requires, keywords=['jupyter', 'python', 'ipython', 'notebook', 'theme', 'less', 'css'], entry_points={ 'console_scripts': [ 'jupyter-theme = jupyterthemes:main', 'jt = jupyterthemes:main', ], })
mit
mjudsp/Tsallis
sklearn/ensemble/forest.py
1
64666
"""Forest of trees-based ensemble methods Those methods include random forests and extremely randomized trees. The module structure is the following: - The ``BaseForest`` base class implements a common ``fit`` method for all the estimators in the module. The ``fit`` method of the base ``Forest`` class calls the ``fit`` method of each sub-estimator on random samples (with replacement, a.k.a. bootstrap) of the training set. The init of the sub-estimator is further delegated to the ``BaseEnsemble`` constructor. - The ``ForestClassifier`` and ``ForestRegressor`` base classes further implement the prediction logic by computing an average of the predicted outcomes of the sub-estimators. - The ``RandomForestClassifier`` and ``RandomForestRegressor`` derived classes provide the user with concrete implementations of the forest ensemble method using classical, deterministic ``DecisionTreeClassifier`` and ``DecisionTreeRegressor`` as sub-estimator implementations. - The ``ExtraTreesClassifier`` and ``ExtraTreesRegressor`` derived classes provide the user with concrete implementations of the forest ensemble method using the extremely randomized trees ``ExtraTreeClassifier`` and ``ExtraTreeRegressor`` as sub-estimator implementations. Single and multi-output problems are both handled. """ # Authors: Gilles Louppe <[email protected]> # Brian Holt <[email protected]> # Joly Arnaud <[email protected]> # Fares Hedayati <[email protected]> # # License: BSD 3 clause from __future__ import division import warnings from warnings import warn from abc import ABCMeta, abstractmethod import numpy as np from scipy.sparse import issparse from scipy.sparse import hstack as sparse_hstack from ..base import ClassifierMixin, RegressorMixin from ..externals.joblib import Parallel, delayed from ..externals import six from ..feature_selection.from_model import _LearntSelectorMixin from ..metrics import r2_score from ..preprocessing import OneHotEncoder from ..tree import (DecisionTreeClassifier, DecisionTreeRegressor, ExtraTreeClassifier, ExtraTreeRegressor) from ..tree._tree import DTYPE, DOUBLE from ..utils import check_random_state, check_array, compute_sample_weight from ..exceptions import DataConversionWarning, NotFittedError from .base import BaseEnsemble, _partition_estimators from ..utils.fixes import bincount, parallel_helper from ..utils.multiclass import check_classification_targets __all__ = ["RandomForestClassifier", "RandomForestRegressor", "ExtraTreesClassifier", "ExtraTreesRegressor", "RandomTreesEmbedding"] MAX_INT = np.iinfo(np.int32).max def _generate_sample_indices(random_state, n_samples): """Private function used to _parallel_build_trees function.""" random_instance = check_random_state(random_state) sample_indices = random_instance.randint(0, n_samples, n_samples) return sample_indices def _generate_unsampled_indices(random_state, n_samples): """Private function used to forest._set_oob_score function.""" sample_indices = _generate_sample_indices(random_state, n_samples) sample_counts = bincount(sample_indices, minlength=n_samples) unsampled_mask = sample_counts == 0 indices_range = np.arange(n_samples) unsampled_indices = indices_range[unsampled_mask] return unsampled_indices def _parallel_build_trees(tree, forest, X, y, sample_weight, tree_idx, n_trees, verbose=0, class_weight=None): """Private function used to fit a single tree in parallel.""" if verbose > 1: print("building tree %d of %d" % (tree_idx + 1, n_trees)) if forest.bootstrap: n_samples = X.shape[0] if sample_weight is None: curr_sample_weight = np.ones((n_samples,), dtype=np.float64) else: curr_sample_weight = sample_weight.copy() indices = _generate_sample_indices(tree.random_state, n_samples) sample_counts = bincount(indices, minlength=n_samples) curr_sample_weight *= sample_counts if class_weight == 'subsample': with warnings.catch_warnings(): warnings.simplefilter('ignore', DeprecationWarning) curr_sample_weight *= compute_sample_weight('auto', y, indices) elif class_weight == 'balanced_subsample': curr_sample_weight *= compute_sample_weight('balanced', y, indices) tree.fit(X, y, sample_weight=curr_sample_weight, check_input=False) else: tree.fit(X, y, sample_weight=sample_weight, check_input=False) return tree class BaseForest(six.with_metaclass(ABCMeta, BaseEnsemble, _LearntSelectorMixin)): """Base class for forests of trees. Warning: This class should not be used directly. Use derived classes instead. """ @abstractmethod def __init__(self, base_estimator, n_estimators=10, estimator_params=tuple(), bootstrap=False, oob_score=False, n_jobs=1, random_state=None, verbose=0, warm_start=False, class_weight=None): super(BaseForest, self).__init__( base_estimator=base_estimator, n_estimators=n_estimators, estimator_params=estimator_params) self.bootstrap = bootstrap self.oob_score = oob_score self.n_jobs = n_jobs self.random_state = random_state self.verbose = verbose self.warm_start = warm_start self.class_weight = class_weight def apply(self, X): """Apply trees in the forest to X, return leaf indices. Parameters ---------- X : array-like or sparse matrix, shape = [n_samples, n_features] The input samples. Internally, it will be converted to ``dtype=np.float32`` and if a sparse matrix is provided to a sparse ``csr_matrix``. Returns ------- X_leaves : array_like, shape = [n_samples, n_estimators] For each datapoint x in X and for each tree in the forest, return the index of the leaf x ends up in. """ X = self._validate_X_predict(X) results = Parallel(n_jobs=self.n_jobs, verbose=self.verbose, backend="threading")( delayed(parallel_helper)(tree, 'apply', X, check_input=False) for tree in self.estimators_) return np.array(results).T def decision_path(self, X): """Return the decision path in the forest Parameters ---------- X : array-like or sparse matrix, shape = [n_samples, n_features] The input samples. Internally, it will be converted to ``dtype=np.float32`` and if a sparse matrix is provided to a sparse ``csr_matrix``. Returns ------- indicator : sparse csr array, shape = [n_samples, n_nodes] Return a node indicator matrix where non zero elements indicates that the samples goes through the nodes. n_nodes_ptr : array of size (n_estimators + 1, ) The columns from indicator[n_nodes_ptr[i]:n_nodes_ptr[i+1]] gives the indicator value for the i-th estimator. """ X = self._validate_X_predict(X) indicators = Parallel(n_jobs=self.n_jobs, verbose=self.verbose, backend="threading")( delayed(parallel_helper)(tree, 'decision_path', X, check_input=False) for tree in self.estimators_) n_nodes = [0] n_nodes.extend([i.shape[1] for i in indicators]) n_nodes_ptr = np.array(n_nodes).cumsum() return sparse_hstack(indicators).tocsr(), n_nodes_ptr def fit(self, X, y, sample_weight=None): """Build a forest of trees from the training set (X, y). Parameters ---------- X : array-like or sparse matrix of shape = [n_samples, n_features] The training input samples. Internally, it will be converted to ``dtype=np.float32`` and if a sparse matrix is provided to a sparse ``csc_matrix``. y : array-like, shape = [n_samples] or [n_samples, n_outputs] The target values (class labels in classification, real numbers in regression). sample_weight : array-like, shape = [n_samples] or None Sample weights. If None, then samples are equally weighted. Splits that would create child nodes with net zero or negative weight are ignored while searching for a split in each node. In the case of classification, splits are also ignored if they would result in any single class carrying a negative weight in either child node. Returns ------- self : object Returns self. """ # Validate or convert input data X = check_array(X, accept_sparse="csc", dtype=DTYPE) y = check_array(y, accept_sparse='csc', ensure_2d=False, dtype=None) if issparse(X): # Pre-sort indices to avoid that each individual tree of the # ensemble sorts the indices. X.sort_indices() # Remap output n_samples, self.n_features_ = X.shape y = np.atleast_1d(y) if y.ndim == 2 and y.shape[1] == 1: warn("A column-vector y was passed when a 1d array was" " expected. Please change the shape of y to " "(n_samples,), for example using ravel().", DataConversionWarning, stacklevel=2) if y.ndim == 1: # reshape is necessary to preserve the data contiguity against vs # [:, np.newaxis] that does not. y = np.reshape(y, (-1, 1)) self.n_outputs_ = y.shape[1] y, expanded_class_weight = self._validate_y_class_weight(y) if getattr(y, "dtype", None) != DOUBLE or not y.flags.contiguous: y = np.ascontiguousarray(y, dtype=DOUBLE) if expanded_class_weight is not None: if sample_weight is not None: sample_weight = sample_weight * expanded_class_weight else: sample_weight = expanded_class_weight # Check parameters self._validate_estimator() if not self.bootstrap and self.oob_score: raise ValueError("Out of bag estimation only available" " if bootstrap=True") random_state = check_random_state(self.random_state) if not self.warm_start: # Free allocated memory, if any self.estimators_ = [] n_more_estimators = self.n_estimators - len(self.estimators_) if n_more_estimators < 0: raise ValueError('n_estimators=%d must be larger or equal to ' 'len(estimators_)=%d when warm_start==True' % (self.n_estimators, len(self.estimators_))) elif n_more_estimators == 0: warn("Warm-start fitting without increasing n_estimators does not " "fit new trees.") else: if self.warm_start and len(self.estimators_) > 0: # We draw from the random state to get the random state we # would have got if we hadn't used a warm_start. random_state.randint(MAX_INT, size=len(self.estimators_)) trees = [] for i in range(n_more_estimators): tree = self._make_estimator(append=False) tree.set_params(random_state=random_state.randint(MAX_INT)) trees.append(tree) # Parallel loop: we use the threading backend as the Cython code # for fitting the trees is internally releasing the Python GIL # making threading always more efficient than multiprocessing in # that case. trees = Parallel(n_jobs=self.n_jobs, verbose=self.verbose, backend="threading")( delayed(_parallel_build_trees)( t, self, X, y, sample_weight, i, len(trees), verbose=self.verbose, class_weight=self.class_weight) for i, t in enumerate(trees)) # Collect newly grown trees self.estimators_.extend(trees) if self.oob_score: self._set_oob_score(X, y) # Decapsulate classes_ attributes if hasattr(self, "classes_") and self.n_outputs_ == 1: self.n_classes_ = self.n_classes_[0] self.classes_ = self.classes_[0] return self @abstractmethod def _set_oob_score(self, X, y): """Calculate out of bag predictions and score.""" def _validate_y_class_weight(self, y): # Default implementation return y, None def _validate_X_predict(self, X): """Validate X whenever one tries to predict, apply, predict_proba""" if self.estimators_ is None or len(self.estimators_) == 0: raise NotFittedError("Estimator not fitted, " "call `fit` before exploiting the model.") return self.estimators_[0]._validate_X_predict(X, check_input=True) @property def feature_importances_(self): """Return the feature importances (the higher, the more important the feature). Returns ------- feature_importances_ : array, shape = [n_features] """ if self.estimators_ is None or len(self.estimators_) == 0: raise NotFittedError("Estimator not fitted, " "call `fit` before `feature_importances_`.") all_importances = Parallel(n_jobs=self.n_jobs, backend="threading")( delayed(getattr)(tree, 'feature_importances_') for tree in self.estimators_) return sum(all_importances) / len(self.estimators_) class ForestClassifier(six.with_metaclass(ABCMeta, BaseForest, ClassifierMixin)): """Base class for forest of trees-based classifiers. Warning: This class should not be used directly. Use derived classes instead. """ @abstractmethod def __init__(self, base_estimator, n_estimators=10, estimator_params=tuple(), bootstrap=False, oob_score=False, n_jobs=1, random_state=None, verbose=0, warm_start=False, class_weight=None): super(ForestClassifier, self).__init__( base_estimator, n_estimators=n_estimators, estimator_params=estimator_params, bootstrap=bootstrap, oob_score=oob_score, n_jobs=n_jobs, random_state=random_state, verbose=verbose, warm_start=warm_start, class_weight=class_weight) def _set_oob_score(self, X, y): """Compute out-of-bag score""" X = check_array(X, dtype=DTYPE, accept_sparse='csr') n_classes_ = self.n_classes_ n_samples = y.shape[0] oob_decision_function = [] oob_score = 0.0 predictions = [] for k in range(self.n_outputs_): predictions.append(np.zeros((n_samples, n_classes_[k]))) for estimator in self.estimators_: unsampled_indices = _generate_unsampled_indices( estimator.random_state, n_samples) p_estimator = estimator.predict_proba(X[unsampled_indices, :], check_input=False) if self.n_outputs_ == 1: p_estimator = [p_estimator] for k in range(self.n_outputs_): predictions[k][unsampled_indices, :] += p_estimator[k] for k in range(self.n_outputs_): if (predictions[k].sum(axis=1) == 0).any(): warn("Some inputs do not have OOB scores. " "This probably means too few trees were used " "to compute any reliable oob estimates.") decision = (predictions[k] / predictions[k].sum(axis=1)[:, np.newaxis]) oob_decision_function.append(decision) oob_score += np.mean(y[:, k] == np.argmax(predictions[k], axis=1), axis=0) if self.n_outputs_ == 1: self.oob_decision_function_ = oob_decision_function[0] else: self.oob_decision_function_ = oob_decision_function self.oob_score_ = oob_score / self.n_outputs_ def _validate_y_class_weight(self, y): check_classification_targets(y) y = np.copy(y) expanded_class_weight = None if self.class_weight is not None: y_original = np.copy(y) self.classes_ = [] self.n_classes_ = [] y_store_unique_indices = np.zeros(y.shape, dtype=np.int) for k in range(self.n_outputs_): classes_k, y_store_unique_indices[:, k] = np.unique(y[:, k], return_inverse=True) self.classes_.append(classes_k) self.n_classes_.append(classes_k.shape[0]) y = y_store_unique_indices if self.class_weight is not None: valid_presets = ('auto', 'balanced', 'subsample', 'balanced_subsample') if isinstance(self.class_weight, six.string_types): if self.class_weight not in valid_presets: raise ValueError('Valid presets for class_weight include ' '"balanced" and "balanced_subsample". Given "%s".' % self.class_weight) if self.class_weight == "subsample": warn("class_weight='subsample' is deprecated in 0.17 and" "will be removed in 0.19. It was replaced by " "class_weight='balanced_subsample' using the balanced" "strategy.", DeprecationWarning) if self.warm_start: warn('class_weight presets "balanced" or "balanced_subsample" are ' 'not recommended for warm_start if the fitted data ' 'differs from the full dataset. In order to use ' '"balanced" weights, use compute_class_weight("balanced", ' 'classes, y). In place of y you can use a large ' 'enough sample of the full training set target to ' 'properly estimate the class frequency ' 'distributions. Pass the resulting weights as the ' 'class_weight parameter.') if (self.class_weight not in ['subsample', 'balanced_subsample'] or not self.bootstrap): if self.class_weight == 'subsample': class_weight = 'auto' elif self.class_weight == "balanced_subsample": class_weight = "balanced" else: class_weight = self.class_weight with warnings.catch_warnings(): if class_weight == "auto": warnings.simplefilter('ignore', DeprecationWarning) expanded_class_weight = compute_sample_weight(class_weight, y_original) return y, expanded_class_weight def predict(self, X): """Predict class for X. The predicted class of an input sample is a vote by the trees in the forest, weighted by their probability estimates. That is, the predicted class is the one with highest mean probability estimate across the trees. Parameters ---------- X : array-like or sparse matrix of shape = [n_samples, n_features] The input samples. Internally, it will be converted to ``dtype=np.float32`` and if a sparse matrix is provided to a sparse ``csr_matrix``. Returns ------- y : array of shape = [n_samples] or [n_samples, n_outputs] The predicted classes. """ proba = self.predict_proba(X) if self.n_outputs_ == 1: return self.classes_.take(np.argmax(proba, axis=1), axis=0) else: n_samples = proba[0].shape[0] predictions = np.zeros((n_samples, self.n_outputs_)) for k in range(self.n_outputs_): predictions[:, k] = self.classes_[k].take(np.argmax(proba[k], axis=1), axis=0) return predictions def predict_proba(self, X): """Predict class probabilities for X. The predicted class probabilities of an input sample is computed as the mean predicted class probabilities of the trees in the forest. The class probability of a single tree is the fraction of samples of the same class in a leaf. Parameters ---------- X : array-like or sparse matrix of shape = [n_samples, n_features] The input samples. Internally, it will be converted to ``dtype=np.float32`` and if a sparse matrix is provided to a sparse ``csr_matrix``. Returns ------- p : array of shape = [n_samples, n_classes], or a list of n_outputs such arrays if n_outputs > 1. The class probabilities of the input samples. The order of the classes corresponds to that in the attribute `classes_`. """ # Check data X = self._validate_X_predict(X) # Assign chunk of trees to jobs n_jobs, _, _ = _partition_estimators(self.n_estimators, self.n_jobs) # Parallel loop all_proba = Parallel(n_jobs=n_jobs, verbose=self.verbose, backend="threading")( delayed(parallel_helper)(e, 'predict_proba', X, check_input=False) for e in self.estimators_) # Reduce proba = all_proba[0] if self.n_outputs_ == 1: for j in range(1, len(all_proba)): proba += all_proba[j] proba /= len(self.estimators_) else: for j in range(1, len(all_proba)): for k in range(self.n_outputs_): proba[k] += all_proba[j][k] for k in range(self.n_outputs_): proba[k] /= self.n_estimators return proba def predict_log_proba(self, X): """Predict class log-probabilities for X. The predicted class log-probabilities of an input sample is computed as the log of the mean predicted class probabilities of the trees in the forest. Parameters ---------- X : array-like or sparse matrix of shape = [n_samples, n_features] The input samples. Internally, it will be converted to ``dtype=np.float32`` and if a sparse matrix is provided to a sparse ``csr_matrix``. Returns ------- p : array of shape = [n_samples, n_classes], or a list of n_outputs such arrays if n_outputs > 1. The class probabilities of the input samples. The order of the classes corresponds to that in the attribute `classes_`. """ proba = self.predict_proba(X) if self.n_outputs_ == 1: return np.log(proba) else: for k in range(self.n_outputs_): proba[k] = np.log(proba[k]) return proba class ForestRegressor(six.with_metaclass(ABCMeta, BaseForest, RegressorMixin)): """Base class for forest of trees-based regressors. Warning: This class should not be used directly. Use derived classes instead. """ @abstractmethod def __init__(self, base_estimator, n_estimators=10, estimator_params=tuple(), bootstrap=False, oob_score=False, n_jobs=1, random_state=None, verbose=0, warm_start=False): super(ForestRegressor, self).__init__( base_estimator, n_estimators=n_estimators, estimator_params=estimator_params, bootstrap=bootstrap, oob_score=oob_score, n_jobs=n_jobs, random_state=random_state, verbose=verbose, warm_start=warm_start) def predict(self, X): """Predict regression target for X. The predicted regression target of an input sample is computed as the mean predicted regression targets of the trees in the forest. Parameters ---------- X : array-like or sparse matrix of shape = [n_samples, n_features] The input samples. Internally, it will be converted to ``dtype=np.float32`` and if a sparse matrix is provided to a sparse ``csr_matrix``. Returns ------- y : array of shape = [n_samples] or [n_samples, n_outputs] The predicted values. """ # Check data X = self._validate_X_predict(X) # Assign chunk of trees to jobs n_jobs, _, _ = _partition_estimators(self.n_estimators, self.n_jobs) # Parallel loop all_y_hat = Parallel(n_jobs=n_jobs, verbose=self.verbose, backend="threading")( delayed(parallel_helper)(e, 'predict', X, check_input=False) for e in self.estimators_) # Reduce y_hat = sum(all_y_hat) / len(self.estimators_) return y_hat def _set_oob_score(self, X, y): """Compute out-of-bag scores""" X = check_array(X, dtype=DTYPE, accept_sparse='csr') n_samples = y.shape[0] predictions = np.zeros((n_samples, self.n_outputs_)) n_predictions = np.zeros((n_samples, self.n_outputs_)) for estimator in self.estimators_: unsampled_indices = _generate_unsampled_indices( estimator.random_state, n_samples) p_estimator = estimator.predict( X[unsampled_indices, :], check_input=False) if self.n_outputs_ == 1: p_estimator = p_estimator[:, np.newaxis] predictions[unsampled_indices, :] += p_estimator n_predictions[unsampled_indices, :] += 1 if (n_predictions == 0).any(): warn("Some inputs do not have OOB scores. " "This probably means too few trees were used " "to compute any reliable oob estimates.") n_predictions[n_predictions == 0] = 1 predictions /= n_predictions self.oob_prediction_ = predictions if self.n_outputs_ == 1: self.oob_prediction_ = \ self.oob_prediction_.reshape((n_samples, )) self.oob_score_ = 0.0 for k in range(self.n_outputs_): self.oob_score_ += r2_score(y[:, k], predictions[:, k]) self.oob_score_ /= self.n_outputs_ class RandomForestClassifier(ForestClassifier): """A random forest classifier. A random forest is a meta estimator that fits a number of decision tree classifiers on various sub-samples of the dataset and use averaging to improve the predictive accuracy and control over-fitting. The sub-sample size is always the same as the original input sample size but the samples are drawn with replacement if `bootstrap=True` (default). Read more in the :ref:`User Guide <forest>`. Parameters ---------- n_estimators : integer, optional (default=10) The number of trees in the forest. criterion : string, optional (default="gini") The function to measure the quality of a split. Supported criteria are "gini" for the Gini impurity and "entropy" for the information gain. Note: this parameter is tree-specific. max_features : int, float, string or None, optional (default="auto") The number of features to consider when looking for the best split: - If int, then consider `max_features` features at each split. - If float, then `max_features` is a percentage and `int(max_features * n_features)` features are considered at each split. - If "auto", then `max_features=sqrt(n_features)`. - If "sqrt", then `max_features=sqrt(n_features)` (same as "auto"). - If "log2", then `max_features=log2(n_features)`. - If None, then `max_features=n_features`. Note: the search for a split does not stop until at least one valid partition of the node samples is found, even if it requires to effectively inspect more than ``max_features`` features. max_depth : integer or None, optional (default=None) The maximum depth of the tree. If None, then nodes are expanded until all leaves are pure or until all leaves contain less than min_samples_split samples. Ignored if ``max_leaf_nodes`` is not None. min_samples_split : int, float, optional (default=2) The minimum number of samples required to split an internal node: - If int, then consider `min_samples_split` as the minimum number. - If float, then `min_samples_split` is a percentage and `ceil(min_samples_split * n_samples)` are the minimum number of samples for each split. min_samples_leaf : int, float, optional (default=1) The minimum number of samples required to be at a leaf node: - If int, then consider `min_samples_leaf` as the minimum number. - If float, then `min_samples_leaf` is a percentage and `ceil(min_samples_leaf * n_samples)` are the minimum number of samples for each node. min_weight_fraction_leaf : float, optional (default=0.) The minimum weighted fraction of the input samples required to be at a leaf node. max_leaf_nodes : int or None, optional (default=None) Grow trees with ``max_leaf_nodes`` in best-first fashion. Best nodes are defined as relative reduction in impurity. If None then unlimited number of leaf nodes. If not None then ``max_depth`` will be ignored. bootstrap : boolean, optional (default=True) Whether bootstrap samples are used when building trees. oob_score : bool Whether to use out-of-bag samples to estimate the generalization accuracy. n_jobs : integer, optional (default=1) The number of jobs to run in parallel for both `fit` and `predict`. If -1, then the number of jobs is set to the number of cores. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. verbose : int, optional (default=0) Controls the verbosity of the tree building process. warm_start : bool, optional (default=False) When set to ``True``, reuse the solution of the previous call to fit and add more estimators to the ensemble, otherwise, just fit a whole new forest. class_weight : dict, list of dicts, "balanced", "balanced_subsample" or None, optional Weights associated with classes in the form ``{class_label: weight}``. If not given, all classes are supposed to have weight one. For multi-output problems, a list of dicts can be provided in the same order as the columns of y. The "balanced" mode uses the values of y to automatically adjust weights inversely proportional to class frequencies in the input data as ``n_samples / (n_classes * np.bincount(y))`` The "balanced_subsample" mode is the same as "balanced" except that weights are computed based on the bootstrap sample for every tree grown. For multi-output, the weights of each column of y will be multiplied. Note that these weights will be multiplied with sample_weight (passed through the fit method) if sample_weight is specified. Attributes ---------- estimators_ : list of DecisionTreeClassifier The collection of fitted sub-estimators. classes_ : array of shape = [n_classes] or a list of such arrays The classes labels (single output problem), or a list of arrays of class labels (multi-output problem). n_classes_ : int or list The number of classes (single output problem), or a list containing the number of classes for each output (multi-output problem). n_features_ : int The number of features when ``fit`` is performed. n_outputs_ : int The number of outputs when ``fit`` is performed. feature_importances_ : array of shape = [n_features] The feature importances (the higher, the more important the feature). oob_score_ : float Score of the training dataset obtained using an out-of-bag estimate. oob_decision_function_ : array of shape = [n_samples, n_classes] Decision function computed with out-of-bag estimate on the training set. If n_estimators is small it might be possible that a data point was never left out during the bootstrap. In this case, `oob_decision_function_` might contain NaN. References ---------- .. [1] L. Breiman, "Random Forests", Machine Learning, 45(1), 5-32, 2001. See also -------- DecisionTreeClassifier, ExtraTreesClassifier """ def __init__(self, n_estimators=10, criterion="gini", pval = [0.0,0.5], max_depth=None, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0., max_features="auto", max_leaf_nodes=None, bootstrap=True, oob_score=False, n_jobs=1, random_state=None, verbose=0, warm_start=False, class_weight=None): super(RandomForestClassifier, self).__init__( base_estimator=DecisionTreeClassifier(), n_estimators=n_estimators, estimator_params=("criterion", "max_depth", "min_samples_split", "min_samples_leaf", "min_weight_fraction_leaf", "max_features", "max_leaf_nodes", "random_state"), bootstrap=bootstrap, oob_score=oob_score, n_jobs=n_jobs, random_state=random_state, verbose=verbose, warm_start=warm_start, class_weight=class_weight) self.criterion = criterion self.max_depth = max_depth self.min_samples_split = min_samples_split self.min_samples_leaf = min_samples_leaf self.min_weight_fraction_leaf = min_weight_fraction_leaf self.max_features = max_features self.max_leaf_nodes = max_leaf_nodes class RandomForestRegressor(ForestRegressor): """A random forest regressor. A random forest is a meta estimator that fits a number of classifying decision trees on various sub-samples of the dataset and use averaging to improve the predictive accuracy and control over-fitting. The sub-sample size is always the same as the original input sample size but the samples are drawn with replacement if `bootstrap=True` (default). Read more in the :ref:`User Guide <forest>`. Parameters ---------- n_estimators : integer, optional (default=10) The number of trees in the forest. criterion : string, optional (default="mse") The function to measure the quality of a split. The only supported criterion is "mse" for the mean squared error. max_features : int, float, string or None, optional (default="auto") The number of features to consider when looking for the best split: - If int, then consider `max_features` features at each split. - If float, then `max_features` is a percentage and `int(max_features * n_features)` features are considered at each split. - If "auto", then `max_features=n_features`. - If "sqrt", then `max_features=sqrt(n_features)`. - If "log2", then `max_features=log2(n_features)`. - If None, then `max_features=n_features`. Note: the search for a split does not stop until at least one valid partition of the node samples is found, even if it requires to effectively inspect more than ``max_features`` features. max_depth : integer or None, optional (default=None) The maximum depth of the tree. If None, then nodes are expanded until all leaves are pure or until all leaves contain less than min_samples_split samples. Ignored if ``max_leaf_nodes`` is not None. min_samples_split : int, float, optional (default=2) The minimum number of samples required to split an internal node: - If int, then consider `min_samples_split` as the minimum number. - If float, then `min_samples_split` is a percentage and `ceil(min_samples_split * n_samples)` are the minimum number of samples for each split. min_samples_leaf : int, float, optional (default=1) The minimum number of samples required to be at a leaf node: - If int, then consider `min_samples_leaf` as the minimum number. - If float, then `min_samples_leaf` is a percentage and `ceil(min_samples_leaf * n_samples)` are the minimum number of samples for each node. min_weight_fraction_leaf : float, optional (default=0.) The minimum weighted fraction of the input samples required to be at a leaf node. max_leaf_nodes : int or None, optional (default=None) Grow trees with ``max_leaf_nodes`` in best-first fashion. Best nodes are defined as relative reduction in impurity. If None then unlimited number of leaf nodes. If not None then ``max_depth`` will be ignored. bootstrap : boolean, optional (default=True) Whether bootstrap samples are used when building trees. oob_score : bool, optional (default=False) whether to use out-of-bag samples to estimate the R^2 on unseen data. n_jobs : integer, optional (default=1) The number of jobs to run in parallel for both `fit` and `predict`. If -1, then the number of jobs is set to the number of cores. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. verbose : int, optional (default=0) Controls the verbosity of the tree building process. warm_start : bool, optional (default=False) When set to ``True``, reuse the solution of the previous call to fit and add more estimators to the ensemble, otherwise, just fit a whole new forest. Attributes ---------- estimators_ : list of DecisionTreeRegressor The collection of fitted sub-estimators. feature_importances_ : array of shape = [n_features] The feature importances (the higher, the more important the feature). n_features_ : int The number of features when ``fit`` is performed. n_outputs_ : int The number of outputs when ``fit`` is performed. oob_score_ : float Score of the training dataset obtained using an out-of-bag estimate. oob_prediction_ : array of shape = [n_samples] Prediction computed with out-of-bag estimate on the training set. References ---------- .. [1] L. Breiman, "Random Forests", Machine Learning, 45(1), 5-32, 2001. See also -------- DecisionTreeRegressor, ExtraTreesRegressor """ def __init__(self, n_estimators=10, criterion="mse", max_depth=None, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0., max_features="auto", max_leaf_nodes=None, bootstrap=True, oob_score=False, n_jobs=1, random_state=None, verbose=0, warm_start=False): super(RandomForestRegressor, self).__init__( base_estimator=DecisionTreeRegressor(), n_estimators=n_estimators, estimator_params=("criterion", "max_depth", "min_samples_split", "min_samples_leaf", "min_weight_fraction_leaf", "max_features", "max_leaf_nodes", "random_state"), bootstrap=bootstrap, oob_score=oob_score, n_jobs=n_jobs, random_state=random_state, verbose=verbose, warm_start=warm_start) self.criterion = criterion self.max_depth = max_depth self.min_samples_split = min_samples_split self.min_samples_leaf = min_samples_leaf self.min_weight_fraction_leaf = min_weight_fraction_leaf self.max_features = max_features self.max_leaf_nodes = max_leaf_nodes class ExtraTreesClassifier(ForestClassifier): """An extra-trees classifier. This class implements a meta estimator that fits a number of randomized decision trees (a.k.a. extra-trees) on various sub-samples of the dataset and use averaging to improve the predictive accuracy and control over-fitting. Read more in the :ref:`User Guide <forest>`. Parameters ---------- n_estimators : integer, optional (default=10) The number of trees in the forest. criterion : string, optional (default="gini") The function to measure the quality of a split. Supported criteria are "gini" for the Gini impurity and "entropy" for the information gain. max_features : int, float, string or None, optional (default="auto") The number of features to consider when looking for the best split: - If int, then consider `max_features` features at each split. - If float, then `max_features` is a percentage and `int(max_features * n_features)` features are considered at each split. - If "auto", then `max_features=sqrt(n_features)`. - If "sqrt", then `max_features=sqrt(n_features)`. - If "log2", then `max_features=log2(n_features)`. - If None, then `max_features=n_features`. Note: the search for a split does not stop until at least one valid partition of the node samples is found, even if it requires to effectively inspect more than ``max_features`` features. max_depth : integer or None, optional (default=None) The maximum depth of the tree. If None, then nodes are expanded until all leaves are pure or until all leaves contain less than min_samples_split samples. Ignored if ``max_leaf_nodes`` is not None. min_samples_split : int, float, optional (default=2) The minimum number of samples required to split an internal node: - If int, then consider `min_samples_split` as the minimum number. - If float, then `min_samples_split` is a percentage and `ceil(min_samples_split * n_samples)` are the minimum number of samples for each split. min_samples_leaf : int, float, optional (default=1) The minimum number of samples required to be at a leaf node: - If int, then consider `min_samples_leaf` as the minimum number. - If float, then `min_samples_leaf` is a percentage and `ceil(min_samples_leaf * n_samples)` are the minimum number of samples for each node. min_weight_fraction_leaf : float, optional (default=0.) The minimum weighted fraction of the input samples required to be at a leaf node. max_leaf_nodes : int or None, optional (default=None) Grow trees with ``max_leaf_nodes`` in best-first fashion. Best nodes are defined as relative reduction in impurity. If None then unlimited number of leaf nodes. If not None then ``max_depth`` will be ignored. bootstrap : boolean, optional (default=False) Whether bootstrap samples are used when building trees. oob_score : bool, optional (default=False) Whether to use out-of-bag samples to estimate the generalization accuracy. n_jobs : integer, optional (default=1) The number of jobs to run in parallel for both `fit` and `predict`. If -1, then the number of jobs is set to the number of cores. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. verbose : int, optional (default=0) Controls the verbosity of the tree building process. warm_start : bool, optional (default=False) When set to ``True``, reuse the solution of the previous call to fit and add more estimators to the ensemble, otherwise, just fit a whole new forest. class_weight : dict, list of dicts, "balanced", "balanced_subsample" or None, optional Weights associated with classes in the form ``{class_label: weight}``. If not given, all classes are supposed to have weight one. For multi-output problems, a list of dicts can be provided in the same order as the columns of y. The "balanced" mode uses the values of y to automatically adjust weights inversely proportional to class frequencies in the input data as ``n_samples / (n_classes * np.bincount(y))`` The "balanced_subsample" mode is the same as "balanced" except that weights are computed based on the bootstrap sample for every tree grown. For multi-output, the weights of each column of y will be multiplied. Note that these weights will be multiplied with sample_weight (passed through the fit method) if sample_weight is specified. Attributes ---------- estimators_ : list of DecisionTreeClassifier The collection of fitted sub-estimators. classes_ : array of shape = [n_classes] or a list of such arrays The classes labels (single output problem), or a list of arrays of class labels (multi-output problem). n_classes_ : int or list The number of classes (single output problem), or a list containing the number of classes for each output (multi-output problem). feature_importances_ : array of shape = [n_features] The feature importances (the higher, the more important the feature). n_features_ : int The number of features when ``fit`` is performed. n_outputs_ : int The number of outputs when ``fit`` is performed. oob_score_ : float Score of the training dataset obtained using an out-of-bag estimate. oob_decision_function_ : array of shape = [n_samples, n_classes] Decision function computed with out-of-bag estimate on the training set. If n_estimators is small it might be possible that a data point was never left out during the bootstrap. In this case, `oob_decision_function_` might contain NaN. References ---------- .. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees", Machine Learning, 63(1), 3-42, 2006. See also -------- sklearn.tree.ExtraTreeClassifier : Base classifier for this ensemble. RandomForestClassifier : Ensemble Classifier based on trees with optimal splits. """ def __init__(self, n_estimators=10, criterion="gini", max_depth=None, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0., max_features="auto", max_leaf_nodes=None, bootstrap=False, oob_score=False, n_jobs=1, random_state=None, verbose=0, warm_start=False, class_weight=None): super(ExtraTreesClassifier, self).__init__( base_estimator=ExtraTreeClassifier(), n_estimators=n_estimators, estimator_params=("criterion", "max_depth", "min_samples_split", "min_samples_leaf", "min_weight_fraction_leaf", "max_features", "max_leaf_nodes", "random_state"), bootstrap=bootstrap, oob_score=oob_score, n_jobs=n_jobs, random_state=random_state, verbose=verbose, warm_start=warm_start, class_weight=class_weight) self.criterion = criterion self.max_depth = max_depth self.min_samples_split = min_samples_split self.min_samples_leaf = min_samples_leaf self.min_weight_fraction_leaf = min_weight_fraction_leaf self.max_features = max_features self.max_leaf_nodes = max_leaf_nodes class ExtraTreesRegressor(ForestRegressor): """An extra-trees regressor. This class implements a meta estimator that fits a number of randomized decision trees (a.k.a. extra-trees) on various sub-samples of the dataset and use averaging to improve the predictive accuracy and control over-fitting. Read more in the :ref:`User Guide <forest>`. Parameters ---------- n_estimators : integer, optional (default=10) The number of trees in the forest. criterion : string, optional (default="mse") The function to measure the quality of a split. The only supported criterion is "mse" for the mean squared error. max_features : int, float, string or None, optional (default="auto") The number of features to consider when looking for the best split: - If int, then consider `max_features` features at each split. - If float, then `max_features` is a percentage and `int(max_features * n_features)` features are considered at each split. - If "auto", then `max_features=n_features`. - If "sqrt", then `max_features=sqrt(n_features)`. - If "log2", then `max_features=log2(n_features)`. - If None, then `max_features=n_features`. Note: the search for a split does not stop until at least one valid partition of the node samples is found, even if it requires to effectively inspect more than ``max_features`` features. max_depth : integer or None, optional (default=None) The maximum depth of the tree. If None, then nodes are expanded until all leaves are pure or until all leaves contain less than min_samples_split samples. Ignored if ``max_leaf_nodes`` is not None. min_samples_split : int, float, optional (default=2) The minimum number of samples required to split an internal node: - If int, then consider `min_samples_split` as the minimum number. - If float, then `min_samples_split` is a percentage and `ceil(min_samples_split * n_samples)` are the minimum number of samples for each split. min_samples_leaf : int, float, optional (default=1) The minimum number of samples required to be at a leaf node: - If int, then consider `min_samples_leaf` as the minimum number. - If float, then `min_samples_leaf` is a percentage and `ceil(min_samples_leaf * n_samples)` are the minimum number of samples for each node. min_weight_fraction_leaf : float, optional (default=0.) The minimum weighted fraction of the input samples required to be at a leaf node. max_leaf_nodes : int or None, optional (default=None) Grow trees with ``max_leaf_nodes`` in best-first fashion. Best nodes are defined as relative reduction in impurity. If None then unlimited number of leaf nodes. If not None then ``max_depth`` will be ignored. bootstrap : boolean, optional (default=False) Whether bootstrap samples are used when building trees. oob_score : bool, optional (default=False) Whether to use out-of-bag samples to estimate the R^2 on unseen data. n_jobs : integer, optional (default=1) The number of jobs to run in parallel for both `fit` and `predict`. If -1, then the number of jobs is set to the number of cores. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. verbose : int, optional (default=0) Controls the verbosity of the tree building process. warm_start : bool, optional (default=False) When set to ``True``, reuse the solution of the previous call to fit and add more estimators to the ensemble, otherwise, just fit a whole new forest. Attributes ---------- estimators_ : list of DecisionTreeRegressor The collection of fitted sub-estimators. feature_importances_ : array of shape = [n_features] The feature importances (the higher, the more important the feature). n_features_ : int The number of features. n_outputs_ : int The number of outputs. oob_score_ : float Score of the training dataset obtained using an out-of-bag estimate. oob_prediction_ : array of shape = [n_samples] Prediction computed with out-of-bag estimate on the training set. References ---------- .. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees", Machine Learning, 63(1), 3-42, 2006. See also -------- sklearn.tree.ExtraTreeRegressor: Base estimator for this ensemble. RandomForestRegressor: Ensemble regressor using trees with optimal splits. """ def __init__(self, n_estimators=10, criterion="mse", max_depth=None, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0., max_features="auto", max_leaf_nodes=None, bootstrap=False, oob_score=False, n_jobs=1, random_state=None, verbose=0, warm_start=False): super(ExtraTreesRegressor, self).__init__( base_estimator=ExtraTreeRegressor(), n_estimators=n_estimators, estimator_params=("criterion", "max_depth", "min_samples_split", "min_samples_leaf", "min_weight_fraction_leaf", "max_features", "max_leaf_nodes", "random_state"), bootstrap=bootstrap, oob_score=oob_score, n_jobs=n_jobs, random_state=random_state, verbose=verbose, warm_start=warm_start) self.criterion = criterion self.max_depth = max_depth self.min_samples_split = min_samples_split self.min_samples_leaf = min_samples_leaf self.min_weight_fraction_leaf = min_weight_fraction_leaf self.max_features = max_features self.max_leaf_nodes = max_leaf_nodes class RandomTreesEmbedding(BaseForest): """An ensemble of totally random trees. An unsupervised transformation of a dataset to a high-dimensional sparse representation. A datapoint is coded according to which leaf of each tree it is sorted into. Using a one-hot encoding of the leaves, this leads to a binary coding with as many ones as there are trees in the forest. The dimensionality of the resulting representation is ``n_out <= n_estimators * max_leaf_nodes``. If ``max_leaf_nodes == None``, the number of leaf nodes is at most ``n_estimators * 2 ** max_depth``. Read more in the :ref:`User Guide <random_trees_embedding>`. Parameters ---------- n_estimators : int Number of trees in the forest. max_depth : int The maximum depth of each tree. If None, then nodes are expanded until all leaves are pure or until all leaves contain less than min_samples_split samples. Ignored if ``max_leaf_nodes`` is not None. min_samples_split : int, float, optional (default=2) The minimum number of samples required to split an internal node: - If int, then consider `min_samples_split` as the minimum number. - If float, then `min_samples_split` is a percentage and `ceil(min_samples_split * n_samples)` is the minimum number of samples for each split. min_samples_leaf : int, float, optional (default=1) The minimum number of samples required to be at a leaf node: - If int, then consider `min_samples_leaf` as the minimum number. - If float, then `min_samples_leaf` is a percentage and `ceil(min_samples_leaf * n_samples)` is the minimum number of samples for each node. min_weight_fraction_leaf : float, optional (default=0.) The minimum weighted fraction of the input samples required to be at a leaf node. max_leaf_nodes : int or None, optional (default=None) Grow trees with ``max_leaf_nodes`` in best-first fashion. Best nodes are defined as relative reduction in impurity. If None then unlimited number of leaf nodes. If not None then ``max_depth`` will be ignored. sparse_output : bool, optional (default=True) Whether or not to return a sparse CSR matrix, as default behavior, or to return a dense array compatible with dense pipeline operators. n_jobs : integer, optional (default=1) The number of jobs to run in parallel for both `fit` and `predict`. If -1, then the number of jobs is set to the number of cores. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. verbose : int, optional (default=0) Controls the verbosity of the tree building process. warm_start : bool, optional (default=False) When set to ``True``, reuse the solution of the previous call to fit and add more estimators to the ensemble, otherwise, just fit a whole new forest. Attributes ---------- estimators_ : list of DecisionTreeClassifier The collection of fitted sub-estimators. References ---------- .. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees", Machine Learning, 63(1), 3-42, 2006. .. [2] Moosmann, F. and Triggs, B. and Jurie, F. "Fast discriminative visual codebooks using randomized clustering forests" NIPS 2007 """ def __init__(self, n_estimators=10, max_depth=5, min_samples_split=2, min_samples_leaf=1, min_weight_fraction_leaf=0., max_leaf_nodes=None, sparse_output=True, n_jobs=1, random_state=None, verbose=0, warm_start=False): super(RandomTreesEmbedding, self).__init__( base_estimator=ExtraTreeRegressor(), n_estimators=n_estimators, estimator_params=("criterion", "max_depth", "min_samples_split", "min_samples_leaf", "min_weight_fraction_leaf", "max_features", "max_leaf_nodes", "random_state"), bootstrap=False, oob_score=False, n_jobs=n_jobs, random_state=random_state, verbose=verbose, warm_start=warm_start) self.criterion = 'mse' self.max_depth = max_depth self.min_samples_split = min_samples_split self.min_samples_leaf = min_samples_leaf self.min_weight_fraction_leaf = min_weight_fraction_leaf self.max_features = 1 self.max_leaf_nodes = max_leaf_nodes self.sparse_output = sparse_output def _set_oob_score(self, X, y): raise NotImplementedError("OOB score not supported by tree embedding") def fit(self, X, y=None, sample_weight=None): """Fit estimator. Parameters ---------- X : array-like or sparse matrix, shape=(n_samples, n_features) The input samples. Use ``dtype=np.float32`` for maximum efficiency. Sparse matrices are also supported, use sparse ``csc_matrix`` for maximum efficiency. Returns ------- self : object Returns self. """ self.fit_transform(X, y, sample_weight=sample_weight) return self def fit_transform(self, X, y=None, sample_weight=None): """Fit estimator and transform dataset. Parameters ---------- X : array-like or sparse matrix, shape=(n_samples, n_features) Input data used to build forests. Use ``dtype=np.float32`` for maximum efficiency. Returns ------- X_transformed : sparse matrix, shape=(n_samples, n_out) Transformed dataset. """ # ensure_2d=False because there are actually unit test checking we fail # for 1d. X = check_array(X, accept_sparse=['csc'], ensure_2d=False) if issparse(X): # Pre-sort indices to avoid that each individual tree of the # ensemble sorts the indices. X.sort_indices() rnd = check_random_state(self.random_state) y = rnd.uniform(size=X.shape[0]) super(RandomTreesEmbedding, self).fit(X, y, sample_weight=sample_weight) self.one_hot_encoder_ = OneHotEncoder(sparse=self.sparse_output) return self.one_hot_encoder_.fit_transform(self.apply(X)) def transform(self, X): """Transform dataset. Parameters ---------- X : array-like or sparse matrix, shape=(n_samples, n_features) Input data to be transformed. Use ``dtype=np.float32`` for maximum efficiency. Sparse matrices are also supported, use sparse ``csr_matrix`` for maximum efficiency. Returns ------- X_transformed : sparse matrix, shape=(n_samples, n_out) Transformed dataset. """ return self.one_hot_encoder_.transform(self.apply(X))
bsd-3-clause
lancezlin/ml_template_py
lib/python2.7/site-packages/mpl_toolkits/tests/test_mplot3d.py
4
11088
import sys import nose from nose.tools import assert_raises from mpl_toolkits.mplot3d import Axes3D, axes3d from matplotlib import cm from matplotlib.testing.decorators import image_comparison, cleanup import matplotlib.pyplot as plt import numpy as np @image_comparison(baseline_images=['bar3d'], remove_text=True) def test_bar3d(): fig = plt.figure() ax = fig.add_subplot(111, projection='3d') for c, z in zip(['r', 'g', 'b', 'y'], [30, 20, 10, 0]): xs = np.arange(20) ys = np.arange(20) cs = [c] * len(xs) cs[0] = 'c' ax.bar(xs, ys, zs=z, zdir='y', color=cs, alpha=0.8) @image_comparison(baseline_images=['contour3d'], remove_text=True) def test_contour3d(): fig = plt.figure() ax = fig.gca(projection='3d') X, Y, Z = axes3d.get_test_data(0.05) cset = ax.contour(X, Y, Z, zdir='z', offset=-100, cmap=cm.coolwarm) cset = ax.contour(X, Y, Z, zdir='x', offset=-40, cmap=cm.coolwarm) cset = ax.contour(X, Y, Z, zdir='y', offset=40, cmap=cm.coolwarm) ax.set_xlim(-40, 40) ax.set_ylim(-40, 40) ax.set_zlim(-100, 100) @image_comparison(baseline_images=['contourf3d'], remove_text=True) def test_contourf3d(): fig = plt.figure() ax = fig.gca(projection='3d') X, Y, Z = axes3d.get_test_data(0.05) cset = ax.contourf(X, Y, Z, zdir='z', offset=-100, cmap=cm.coolwarm) cset = ax.contourf(X, Y, Z, zdir='x', offset=-40, cmap=cm.coolwarm) cset = ax.contourf(X, Y, Z, zdir='y', offset=40, cmap=cm.coolwarm) ax.set_xlim(-40, 40) ax.set_ylim(-40, 40) ax.set_zlim(-100, 100) @image_comparison(baseline_images=['contourf3d_fill'], remove_text=True) def test_contourf3d_fill(): fig = plt.figure() ax = fig.gca(projection='3d') X, Y = np.meshgrid(np.arange(-2, 2, 0.25), np.arange(-2, 2, 0.25)) Z = X.clip(0, 0) # This produces holes in the z=0 surface that causes rendering errors if # the Poly3DCollection is not aware of path code information (issue #4784) Z[::5, ::5] = 0.1 cset = ax.contourf(X, Y, Z, offset=0, levels=[-0.1, 0], cmap=cm.coolwarm) ax.set_xlim(-2, 2) ax.set_ylim(-2, 2) ax.set_zlim(-1, 1) @image_comparison(baseline_images=['lines3d'], remove_text=True) def test_lines3d(): fig = plt.figure() ax = fig.gca(projection='3d') theta = np.linspace(-4 * np.pi, 4 * np.pi, 100) z = np.linspace(-2, 2, 100) r = z ** 2 + 1 x = r * np.sin(theta) y = r * np.cos(theta) ax.plot(x, y, z) @image_comparison(baseline_images=['mixedsubplot'], remove_text=True) def test_mixedsubplots(): def f(t): s1 = np.cos(2*np.pi*t) e1 = np.exp(-t) return np.multiply(s1, e1) t1 = np.arange(0.0, 5.0, 0.1) t2 = np.arange(0.0, 5.0, 0.02) fig = plt.figure(figsize=plt.figaspect(2.)) ax = fig.add_subplot(2, 1, 1) l = ax.plot(t1, f(t1), 'bo', t2, f(t2), 'k--', markerfacecolor='green') ax.grid(True) ax = fig.add_subplot(2, 1, 2, projection='3d') X, Y = np.meshgrid(np.arange(-5, 5, 0.25), np.arange(-5, 5, 0.25)) R = np.sqrt(X ** 2 + Y ** 2) Z = np.sin(R) surf = ax.plot_surface(X, Y, Z, rstride=1, cstride=1, linewidth=0, antialiased=False) ax.set_zlim3d(-1, 1) @image_comparison(baseline_images=['scatter3d'], remove_text=True) def test_scatter3d(): fig = plt.figure() ax = fig.add_subplot(111, projection='3d') ax.scatter(np.arange(10), np.arange(10), np.arange(10), c='r', marker='o') ax.scatter(np.arange(10, 20), np.arange(10, 20), np.arange(10, 20), c='b', marker='^') @image_comparison(baseline_images=['scatter3d_color'], remove_text=True, extensions=['png']) def test_scatter3d_color(): fig = plt.figure() ax = fig.add_subplot(111, projection='3d') ax.scatter(np.arange(10), np.arange(10), np.arange(10), color='r', marker='o') ax.scatter(np.arange(10, 20), np.arange(10, 20), np.arange(10, 20), color='b', marker='s') @image_comparison(baseline_images=['surface3d'], remove_text=True) def test_surface3d(): fig = plt.figure() ax = fig.gca(projection='3d') X = np.arange(-5, 5, 0.25) Y = np.arange(-5, 5, 0.25) X, Y = np.meshgrid(X, Y) R = np.sqrt(X ** 2 + Y ** 2) Z = np.sin(R) surf = ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap=cm.coolwarm, lw=0, antialiased=False) ax.set_zlim(-1.01, 1.01) fig.colorbar(surf, shrink=0.5, aspect=5) @image_comparison(baseline_images=['text3d']) def test_text3d(): fig = plt.figure() ax = fig.gca(projection='3d') zdirs = (None, 'x', 'y', 'z', (1, 1, 0), (1, 1, 1)) xs = (2, 6, 4, 9, 7, 2) ys = (6, 4, 8, 7, 2, 2) zs = (4, 2, 5, 6, 1, 7) for zdir, x, y, z in zip(zdirs, xs, ys, zs): label = '(%d, %d, %d), dir=%s' % (x, y, z, zdir) ax.text(x, y, z, label, zdir) ax.text(1, 1, 1, "red", color='red') ax.text2D(0.05, 0.95, "2D Text", transform=ax.transAxes) ax.set_xlim3d(0, 10) ax.set_ylim3d(0, 10) ax.set_zlim3d(0, 10) ax.set_xlabel('X axis') ax.set_ylabel('Y axis') ax.set_zlabel('Z axis') @image_comparison(baseline_images=['trisurf3d'], remove_text=True) def test_trisurf3d(): n_angles = 36 n_radii = 8 radii = np.linspace(0.125, 1.0, n_radii) angles = np.linspace(0, 2*np.pi, n_angles, endpoint=False) angles = np.repeat(angles[..., np.newaxis], n_radii, axis=1) angles[:, 1::2] += np.pi/n_angles x = np.append(0, (radii*np.cos(angles)).flatten()) y = np.append(0, (radii*np.sin(angles)).flatten()) z = np.sin(-x*y) fig = plt.figure() ax = fig.gca(projection='3d') ax.plot_trisurf(x, y, z, cmap=cm.jet, linewidth=0.2) @image_comparison(baseline_images=['wireframe3d'], remove_text=True) def test_wireframe3d(): fig = plt.figure() ax = fig.add_subplot(111, projection='3d') X, Y, Z = axes3d.get_test_data(0.05) ax.plot_wireframe(X, Y, Z, rstride=10, cstride=10) @image_comparison(baseline_images=['wireframe3dzerocstride'], remove_text=True, extensions=['png']) def test_wireframe3dzerocstride(): fig = plt.figure() ax = fig.add_subplot(111, projection='3d') X, Y, Z = axes3d.get_test_data(0.05) ax.plot_wireframe(X, Y, Z, rstride=10, cstride=0) @image_comparison(baseline_images=['wireframe3dzerorstride'], remove_text=True, extensions=['png']) def test_wireframe3dzerorstride(): fig = plt.figure() ax = fig.add_subplot(111, projection='3d') X, Y, Z = axes3d.get_test_data(0.05) ax.plot_wireframe(X, Y, Z, rstride=0, cstride=10) @cleanup def test_wireframe3dzerostrideraises(): if sys.version_info[:2] < (2, 7): raise nose.SkipTest("assert_raises as context manager " "not supported with Python < 2.7") fig = plt.figure() ax = fig.add_subplot(111, projection='3d') X, Y, Z = axes3d.get_test_data(0.05) with assert_raises(ValueError): ax.plot_wireframe(X, Y, Z, rstride=0, cstride=0) @image_comparison(baseline_images=['quiver3d'], remove_text=True) def test_quiver3d(): fig = plt.figure() ax = fig.gca(projection='3d') x, y, z = np.ogrid[-1:0.8:10j, -1:0.8:10j, -1:0.6:3j] u = np.sin(np.pi * x) * np.cos(np.pi * y) * np.cos(np.pi * z) v = -np.cos(np.pi * x) * np.sin(np.pi * y) * np.cos(np.pi * z) w = (np.sqrt(2.0 / 3.0) * np.cos(np.pi * x) * np.cos(np.pi * y) * np.sin(np.pi * z)) ax.quiver(x, y, z, u, v, w, length=0.1) @image_comparison(baseline_images=['quiver3d_empty'], remove_text=True) def test_quiver3d_empty(): fig = plt.figure() ax = fig.gca(projection='3d') x, y, z = np.ogrid[-1:0.8:0j, -1:0.8:0j, -1:0.6:0j] u = np.sin(np.pi * x) * np.cos(np.pi * y) * np.cos(np.pi * z) v = -np.cos(np.pi * x) * np.sin(np.pi * y) * np.cos(np.pi * z) w = (np.sqrt(2.0 / 3.0) * np.cos(np.pi * x) * np.cos(np.pi * y) * np.sin(np.pi * z)) ax.quiver(x, y, z, u, v, w, length=0.1) @image_comparison(baseline_images=['quiver3d_masked'], remove_text=True) def test_quiver3d_masked(): fig = plt.figure() ax = fig.gca(projection='3d') # Using mgrid here instead of ogrid because masked_where doesn't # seem to like broadcasting very much... x, y, z = np.mgrid[-1:0.8:10j, -1:0.8:10j, -1:0.6:3j] u = np.sin(np.pi * x) * np.cos(np.pi * y) * np.cos(np.pi * z) v = -np.cos(np.pi * x) * np.sin(np.pi * y) * np.cos(np.pi * z) w = (np.sqrt(2.0 / 3.0) * np.cos(np.pi * x) * np.cos(np.pi * y) * np.sin(np.pi * z)) u = np.ma.masked_where((-0.4 < x) & (x < 0.1), u, copy=False) v = np.ma.masked_where((0.1 < y) & (y < 0.7), v, copy=False) ax.quiver(x, y, z, u, v, w, length=0.1) @image_comparison(baseline_images=['quiver3d_pivot_middle'], remove_text=True, extensions=['png']) def test_quiver3d_pivot_middle(): fig = plt.figure() ax = fig.gca(projection='3d') x, y, z = np.ogrid[-1:0.8:10j, -1:0.8:10j, -1:0.6:3j] u = np.sin(np.pi * x) * np.cos(np.pi * y) * np.cos(np.pi * z) v = -np.cos(np.pi * x) * np.sin(np.pi * y) * np.cos(np.pi * z) w = (np.sqrt(2.0 / 3.0) * np.cos(np.pi * x) * np.cos(np.pi * y) * np.sin(np.pi * z)) ax.quiver(x, y, z, u, v, w, length=0.1, pivot='middle') @image_comparison(baseline_images=['quiver3d_pivot_tail'], remove_text=True, extensions=['png']) def test_quiver3d_pivot_tail(): fig = plt.figure() ax = fig.gca(projection='3d') x, y, z = np.ogrid[-1:0.8:10j, -1:0.8:10j, -1:0.6:3j] u = np.sin(np.pi * x) * np.cos(np.pi * y) * np.cos(np.pi * z) v = -np.cos(np.pi * x) * np.sin(np.pi * y) * np.cos(np.pi * z) w = (np.sqrt(2.0 / 3.0) * np.cos(np.pi * x) * np.cos(np.pi * y) * np.sin(np.pi * z)) ax.quiver(x, y, z, u, v, w, length=0.1, pivot='tail') @image_comparison(baseline_images=['axes3d_labelpad'], extensions=['png']) def test_axes3d_labelpad(): from nose.tools import assert_equal from matplotlib import rcParams fig = plt.figure() ax = Axes3D(fig) # labelpad respects rcParams assert_equal(ax.xaxis.labelpad, rcParams['axes.labelpad']) # labelpad can be set in set_label ax.set_xlabel('X LABEL', labelpad=10) assert_equal(ax.xaxis.labelpad, 10) ax.set_ylabel('Y LABEL') ax.set_zlabel('Z LABEL') # or manually ax.yaxis.labelpad = 20 ax.zaxis.labelpad = -40 # Tick labels also respect tick.pad (also from rcParams) for i, tick in enumerate(ax.yaxis.get_major_ticks()): tick.set_pad(tick.get_pad() - i * 5) @image_comparison(baseline_images=['axes3d_cla'], extensions=['png']) def test_axes3d_cla(): # fixed in pull request 4553 fig = plt.figure() ax = fig.add_subplot(1,1,1, projection='3d') ax.set_axis_off() ax.cla() # make sure the axis displayed is 3D (not 2D) if __name__ == '__main__': import nose nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
mit
Petr-Kovalev/nupic-win32
examples/opf/tools/sp_plotter.py
1
15766
#! /usr/bin/env python # ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2013, Numenta, Inc. Unless you have purchased from # Numenta, Inc. a separate commercial license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- import sys import os import time import copy import csv import numpy as np from nupic.research import FDRCSpatial2 from nupic.bindings.math import GetNTAReal import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt realDType = GetNTAReal() ############################################################################ def generatePlot(outputs, origData): """ Generates a table where each cell represent a frequency of pairs as described below. x coordinate is the % difference between input records (origData list), y coordinate is the % difference between corresponding output records. """ PLOT_PRECISION = 100 distribMatrix = np.zeros((PLOT_PRECISION+1,PLOT_PRECISION+1)) outputSize = len(outputs) for i in range(0,outputSize): for j in range(i+1,outputSize): in1 = outputs[i] in2 = outputs[j] dist = (abs(in1-in2) > 0.1) intDist = int(dist.sum()/2+0.1) orig1 = origData[i] orig2 = origData[j] origDist = (abs(orig1-orig2) > 0.1) intOrigDist = int(origDist.sum()/2+0.1) if intDist < 2 and intOrigDist > 10: print 'Elements %d,%d has very small SP distance: %d' % (i, j, intDist) print 'Input elements distance is %d' % intOrigDist x = int(PLOT_PRECISION*intDist/40.0) y = int(PLOT_PRECISION*intOrigDist/42.0) if distribMatrix[x, y] < 0.1: distribMatrix[x, y] = 3 else: if distribMatrix[x, y] < 10: distribMatrix[x, y] += 1 # Add some elements for the scale drawing distribMatrix[4, 50] = 3 distribMatrix[4, 52] = 4 distribMatrix[4, 54] = 5 distribMatrix[4, 56] = 6 distribMatrix[4, 58] = 7 distribMatrix[4, 60] = 8 distribMatrix[4, 62] = 9 distribMatrix[4, 64] = 10 return distribMatrix ############################################################################ def generateRandomInput(numRecords, elemSize = 400, numSet = 42): """ Generates a set of input record Params: numRecords - how many records to generate elemSize - the size of each record (num 0s or 1s) numSet - how many 1s in each record Returns: a list of inputs """ inputs = [] for _ in xrange(numRecords): input = np.zeros(elemSize, dtype=realDType) for _ in range(0,numSet): ind = np.random.random_integers(0, elemSize-1, 1)[0] input[ind] = 1 while abs(input.sum() - numSet) > 0.1: ind = np.random.random_integers(0, elemSize-1, 1)[0] input[ind] = 1 inputs.append(input) return inputs ############################################################################ def appendInputWithSimilarValues(inputs): """ Creates an 'one-off' record for each record in the inputs. Appends new records to the same inputs list. """ numInputs = len(inputs) for i in xrange(numInputs): input = inputs[i] for j in xrange(len(input)-1): if input[j] == 1 and input[j+1] == 0: newInput = copy.deepcopy(input) newInput[j] = 0 newInput[j+1] = 1 inputs.append(newInput) break ############################################################################ def appendInputWithNSimilarValues(inputs, numNear = 10): """ Creates a neighboring record for each record in the inputs and adds new records at the end of the inputs list """ numInputs = len(inputs) skipOne = False for i in xrange(numInputs): input = inputs[i] numChanged = 0 newInput = copy.deepcopy(input) for j in xrange(len(input)-1): if skipOne: skipOne = False continue if input[j] == 1 and input[j+1] == 0: newInput[j] = 0 newInput[j+1] = 1 inputs.append(newInput) newInput = copy.deepcopy(newInput) #print input #print newInput numChanged += 1 skipOne = True if numChanged == numNear: break ############################################################################ def modifyBits(inputVal, maxChanges): """ Modifies up to maxChanges number of bits in the inputVal """ changes = np.random.random_integers(0, maxChanges, 1)[0] if changes == 0: return inputVal inputWidth = len(inputVal) whatToChange = np.random.random_integers(0, 41, changes) runningIndex = -1 numModsDone = 0 for i in xrange(inputWidth): if numModsDone >= changes: break if inputVal[i] == 1: runningIndex += 1 if runningIndex in whatToChange: if i != 0 and inputVal[i-1] == 0: inputVal[i-1] = 1 inputVal[i] = 0 return inputVal ############################################################################ def getRandomWithMods(inputSpace, maxChanges): """ Returns a random selection from the inputSpace with randomly modified up to maxChanges number of bits. """ size = len(inputSpace) ind = np.random.random_integers(0, size-1, 1)[0] value = copy.deepcopy(inputSpace[ind]) if maxChanges == 0: return value return modifyBits(value, maxChanges) ############################################################################ def testSP(): """ Run a SP test """ elemSize = 400 numSet = 42 addNear = True numRecords = 2 wantPlot = True poolPct = 0.5 itr = 1 doLearn = True while numRecords < 3: # Setup a SP sp = FDRCSpatial2.FDRCSpatial2( coincidencesShape=(2048, 1), inputShape = (1, elemSize), inputBorder = elemSize/2-1, coincInputRadius = elemSize/2, numActivePerInhArea = 40, spVerbosity = 0, stimulusThreshold = 0, seed = 1, coincInputPoolPct = poolPct, globalInhibition = True ) # Generate inputs using rand() inputs = generateRandomInput(numRecords, elemSize, numSet) if addNear: # Append similar entries (distance of 1) appendInputWithNSimilarValues(inputs, 42) inputSize = len(inputs) print 'Num random records = %d, inputs to process %d' % (numRecords, inputSize) # Run a number of iterations, with learning on or off, # retrieve results from the last iteration only outputs = np.zeros((inputSize,2048)) numIter = 1 if doLearn: numIter = itr for iter in xrange(numIter): for i in xrange(inputSize): time.sleep(0.001) if iter == numIter - 1: outputs[i] = sp.compute(inputs[i], learn=doLearn, infer=False) #print outputs[i].sum(), outputs[i] else: sp.compute(inputs[i], learn=doLearn, infer=False) # Build a plot from the generated input and output and display it distribMatrix = generatePlot(outputs, inputs) # If we don't want a plot, just continue if wantPlot: plt.imshow(distribMatrix, origin='lower', interpolation = "nearest") plt.ylabel('SP (2048/40) distance in %') plt.xlabel('Input (400/42) distance in %') title = 'SP distribution' if doLearn: title += ', leaning ON' else: title += ', learning OFF' title += ', inputs = %d' % len(inputs) title += ', iterations = %d' % numIter title += ', poolPct =%f' % poolPct plt.suptitle(title, fontsize=12) plt.show() #plt.savefig(os.path.join('~/Desktop/ExperimentResults/videos5', '%s' % numRecords)) #plt.clf() numRecords += 1 return ############################################################################ def testSPNew(): """ New version of the test""" elemSize = 400 numSet = 42 addNear = True numRecords = 1000 wantPlot = False poolPct = 0.5 itr = 5 pattern = [60, 1000] doLearn = True start = 1 learnIter = 0 noLearnIter = 0 numLearns = 0 numTests = 0 numIter = 1 numGroups = 1000 PLOT_PRECISION = 100.0 distribMatrix = np.zeros((PLOT_PRECISION+1,PLOT_PRECISION+1)) inputs = generateRandomInput(numGroups, elemSize, numSet) # Setup a SP sp = FDRCSpatial2.FDRCSpatial2( coincidencesShape=(2048, 1), inputShape = (1, elemSize), inputBorder = elemSize/2-1, coincInputRadius = elemSize/2, numActivePerInhArea = 40, spVerbosity = 0, stimulusThreshold = 0, synPermConnected = 0.12, seed = 1, coincInputPoolPct = poolPct, globalInhibition = True ) cleanPlot = False for i in xrange(numRecords): input1 = getRandomWithMods(inputs, 4) if i % 2 == 0: input2 = getRandomWithMods(inputs, 4) else: input2 = input1.copy() input2 = modifyBits(input2, 21) inDist = (abs(input1-input2) > 0.1) intInDist = int(inDist.sum()/2+0.1) #print intInDist if start == 0: doLearn = True learnIter += 1 if learnIter == pattern[start]: numLearns += 1 start = 1 noLearnIter = 0 elif start == 1: doLearn = False noLearnIter += 1 if noLearnIter == pattern[start]: numTests += 1 start = 0 learnIter = 0 cleanPlot = True output1 = sp.compute(input1, learn=doLearn, infer=False).copy() output2 = sp.compute(input2, learn=doLearn, infer=False).copy() time.sleep(0.001) outDist = (abs(output1-output2) > 0.1) intOutDist = int(outDist.sum()/2+0.1) if not doLearn and intOutDist < 2 and intInDist > 10: """ sp.spVerbosity = 10 sp.compute(input1, learn=doLearn, infer=False) sp.compute(input2, learn=doLearn, infer=False) sp.spVerbosity = 0 print 'Elements has very small SP distance: %d' % intOutDist print output1.nonzero() print output2.nonzero() print sp._firingBoostFactors[output1.nonzero()[0]] print sp._synPermBoostFactors[output1.nonzero()[0]] print 'Input elements distance is %d' % intInDist print input1.nonzero() print input2.nonzero() sys.stdin.readline() """ if not doLearn: x = int(PLOT_PRECISION*intOutDist/40.0) y = int(PLOT_PRECISION*intInDist/42.0) if distribMatrix[x, y] < 0.1: distribMatrix[x, y] = 3 else: if distribMatrix[x, y] < 10: distribMatrix[x, y] += 1 #print i # If we don't want a plot, just continue if wantPlot and cleanPlot: plt.imshow(distribMatrix, origin='lower', interpolation = "nearest") plt.ylabel('SP (2048/40) distance in %') plt.xlabel('Input (400/42) distance in %') title = 'SP distribution' #if doLearn: # title += ', leaning ON' #else: # title += ', learning OFF' title += ', learn sets = %d' % numLearns title += ', test sets = %d' % numTests title += ', iter = %d' % numIter title += ', groups = %d' % numGroups title += ', Pct =%f' % poolPct plt.suptitle(title, fontsize=12) #plt.show() plt.savefig(os.path.join('~/Desktop/ExperimentResults/videosNew', '%s' % i)) plt.clf() distribMatrix = np.zeros((PLOT_PRECISION+1,PLOT_PRECISION+1)) cleanPlot = False ############################################################################ def testSPFile(): """ Run test on the data file - the file has records previously encoded. """ spSize = 2048 spSet = 40 poolPct = 0.5 pattern = [50, 1000] doLearn = True PLOT_PRECISION = 100.0 distribMatrix = np.zeros((PLOT_PRECISION+1,PLOT_PRECISION+1)) inputs = [] #file = open('~/Desktop/ExperimentResults/sampleArtificial.csv', 'rb') #elemSize = 400 #numSet = 42 #file = open('~/Desktop/ExperimentResults/sampleDataBasilOneField.csv', 'rb') #elemSize = 499 #numSet = 7 outdir = '~/Desktop/ExperimentResults/Basil100x21' inputFile = outdir+'.csv' file = open(inputFile, 'rb') elemSize = 100 numSet = 21 reader = csv.reader(file) for row in reader: input = np.array(map(float, row), dtype=realDType) if len(input.nonzero()[0]) != numSet: continue inputs.append(input.copy()) file.close() # Setup a SP sp = FDRCSpatial2.FDRCSpatial2( coincidencesShape=(spSize, 1), inputShape = (1, elemSize), inputBorder = (elemSize-1)/2, coincInputRadius = elemSize/2, numActivePerInhArea = spSet, spVerbosity = 0, stimulusThreshold = 0, synPermConnected = 0.10, seed = 1, coincInputPoolPct = poolPct, globalInhibition = True ) cleanPlot = False doLearn = False print 'Finished reading file, inputs/outputs to process =', len(inputs) size = len(inputs) for iter in xrange(100): print 'Iteration', iter # Learn if iter != 0: for learnRecs in xrange(pattern[0]): ind = np.random.random_integers(0, size-1, 1)[0] sp.compute(inputs[ind], learn=True, infer=False) # Test for _ in xrange(pattern[1]): rand1 = np.random.random_integers(0, size-1, 1)[0] rand2 = np.random.random_integers(0, size-1, 1)[0] output1 = sp.compute(inputs[rand1], learn=False, infer=True).copy() output2 = sp.compute(inputs[rand2], learn=False, infer=True).copy() outDist = (abs(output1-output2) > 0.1) intOutDist = int(outDist.sum()/2+0.1) inDist = (abs(inputs[rand1]-inputs[rand2]) > 0.1) intInDist = int(inDist.sum()/2+0.1) if intInDist != numSet or intOutDist != spSet: print rand1, rand2, '-', intInDist, intOutDist x = int(PLOT_PRECISION*intOutDist/spSet) y = int(PLOT_PRECISION*intInDist/numSet) if distribMatrix[x, y] < 0.1: distribMatrix[x, y] = 3 else: if distribMatrix[x, y] < 10: distribMatrix[x, y] += 1 if True: plt.imshow(distribMatrix, origin='lower', interpolation = "nearest") plt.ylabel('SP (%d/%d) distance in pct' % (spSize, spSet)) plt.xlabel('Input (%d/%d) distance in pct' % (elemSize, numSet)) title = 'SP distribution' title += ', iter = %d' % iter title += ', Pct =%f' % poolPct plt.suptitle(title, fontsize=12) #plt.savefig(os.path.join('~/Desktop/ExperimentResults/videosArtData', '%s' % iter)) plt.savefig(os.path.join(outdir, '%s' % iter)) plt.clf() distribMatrix = np.zeros((PLOT_PRECISION+1,PLOT_PRECISION+1)) ############################################################################ if __name__ == '__main__': np.random.seed(83) #testSP() #testSPNew() testSPFile()
gpl-3.0
cowlicks/odo
odo/backends/tests/test_hdfstore.py
4
4599
from __future__ import absolute_import, division, print_function import os from odo.backends.hdfstore import discover from contextlib import contextmanager from odo.utils import tmpfile from odo.chunks import chunks from odo import into, append, convert, resource, discover, odo import datashape import pandas as pd from datetime import datetime import numpy as np try: f = pd.HDFStore('foo') except (RuntimeError, ImportError) as e: import pytest pytest.skip('skipping test_hdfstore.py %s' % e) else: f.close() os.remove('foo') df = pd.DataFrame([['a', 1, 10., datetime(2000, 1, 1)], ['ab', 2, 20., datetime(2000, 2, 2)], ['abc', 3, 30., datetime(2000, 3, 3)], ['abcd', 4, 40., datetime(2000, 4, 4)]], columns=['name', 'a', 'b', 'time']) @contextmanager def file(df): with tmpfile('.hdf5') as fn: f = pd.HDFStore(fn) f.put('/data', df, format='table', append=True) try: yield fn, f, f.get_storer('/data') finally: f.close() def test_discover(): with file(df) as (fn, f, dset): assert str(discover(dset)) == str(discover(df)) assert str(discover(f)) == str(discover({'data': df})) def test_discover(): with tmpfile('hdf5') as fn: df.to_hdf(fn, '/a/b/data') df.to_hdf(fn, '/a/b/data2') df.to_hdf(fn, '/a/data') hdf = pd.HDFStore(fn) try: assert discover(hdf) == discover({'a': {'b': {'data': df, 'data2': df}, 'data': df}}) finally: hdf.close() def eq(a, b): if isinstance(a, pd.DataFrame): a = into(np.ndarray, a) if isinstance(b, pd.DataFrame): b = into(np.ndarray, b) c = a == b if isinstance(c, np.ndarray): c = c.all() return c def test_chunks(): with file(df) as (fn, f, dset): c = convert(chunks(pd.DataFrame), dset) assert eq(convert(np.ndarray, c), df) def test_resource_no_info(): with tmpfile('.hdf5') as fn: r = resource('hdfstore://' + fn) assert isinstance(r, pd.HDFStore) r.close() def test_resource_of_dataset(): with tmpfile('.hdf5') as fn: ds = datashape.dshape('{x: int32, y: 3 * int32}') r = resource('hdfstore://'+fn+'::/x', dshape=ds) assert r r.parent.close() def test_append(): with file(df) as (fn, f, dset): append(dset, df) append(dset, df) assert discover(dset).shape == (len(df) * 3,) def test_into_resource(): with tmpfile('.hdf5') as fn: d = into('hdfstore://' + fn + '::/x', df) assert discover(d) == discover(df) assert eq(into(pd.DataFrame, d), df) d.parent.close() def test_convert_pandas(): with file(df) as (fn, f, dset): assert eq(convert(pd.DataFrame, dset), df) def test_convert_chunks(): with file(df) as (fn, f, dset): c = convert(chunks(pd.DataFrame), dset, chunksize=len(df) / 2) assert len(list(c)) == 2 assert eq(convert(pd.DataFrame, c), df) def test_append_chunks(): with file(df) as (fn, f, dset): append(dset, chunks(pd.DataFrame)([df, df])) assert discover(dset).shape[0] == len(df) * 3 def test_append_other(): with tmpfile('.hdf5') as fn: x = into(np.ndarray, df) dset = into('hdfstore://'+fn+'::/data', x) assert discover(dset) == discover(df) dset.parent.close() def test_fixed_shape(): with tmpfile('.hdf5') as fn: df.to_hdf(fn, 'foo') r = resource('hdfstore://'+fn+'::/foo') assert isinstance(r.shape, list) assert discover(r).shape == (len(df),) r.parent.close() def test_fixed_convert(): with tmpfile('.hdf5') as fn: df.to_hdf(fn, 'foo') r = resource('hdfstore://'+fn+'::/foo') assert eq(convert(pd.DataFrame, r), df) r.parent.close() def test_append_vs_write(): import pandas.util.testing as tm with tmpfile('.hdf5') as fn: df.to_hdf(fn, 'foo', append=True) store = odo(df, 'hdfstore://%s::foo' % fn) try: newdf = odo(store, pd.DataFrame) finally: store.parent.close() tm.assert_frame_equal(newdf, pd.concat([df, df])) with tmpfile('.hdf5') as fn: store = odo(df, 'hdfstore://%s::foo' % fn, mode='w') try: newdf = odo(store, pd.DataFrame) finally: store.parent.close() tm.assert_frame_equal(newdf, df)
bsd-3-clause
leggitta/mne-python
examples/plot_compute_mne_inverse.py
21
1885
""" ================================================ Compute MNE-dSPM inverse solution on evoked data ================================================ Compute dSPM inverse solution on MNE evoked dataset and stores the solution in stc files for visualisation. """ # Author: Alexandre Gramfort <[email protected]> # # License: BSD (3-clause) import matplotlib.pyplot as plt from mne.datasets import sample from mne import read_evokeds from mne.minimum_norm import apply_inverse, read_inverse_operator print(__doc__) data_path = sample.data_path() fname_inv = data_path + '/MEG/sample/sample_audvis-meg-oct-6-meg-inv.fif' fname_evoked = data_path + '/MEG/sample/sample_audvis-ave.fif' subjects_dir = data_path + '/subjects' snr = 3.0 lambda2 = 1.0 / snr ** 2 method = "dSPM" # use dSPM method (could also be MNE or sLORETA) # Load data evoked = read_evokeds(fname_evoked, condition=0, baseline=(None, 0)) inverse_operator = read_inverse_operator(fname_inv) # Compute inverse solution stc = apply_inverse(evoked, inverse_operator, lambda2, method, pick_ori=None) # Save result in stc files stc.save('mne_%s_inverse' % method) ############################################################################### # View activation time-series plt.plot(1e3 * stc.times, stc.data[::100, :].T) plt.xlabel('time (ms)') plt.ylabel('%s value' % method) plt.show() # Plot brain in 3D with PySurfer if available brain = stc.plot(hemi='rh', subjects_dir=subjects_dir) brain.show_view('lateral') # use peak getter to move vizualization to the time point of the peak vertno_max, time_idx = stc.get_peak(hemi='rh', time_as_index=True) brain.set_data_time_index(time_idx) # draw marker at maximum peaking vertex brain.add_foci(vertno_max, coords_as_verts=True, hemi='rh', color='blue', scale_factor=0.6) brain.save_image('dSPM_map.png')
bsd-3-clause
pv/scikit-learn
sklearn/cluster/tests/test_dbscan.py
114
11393
""" Tests for DBSCAN clustering algorithm """ import pickle import numpy as np from scipy.spatial import distance from scipy import sparse from sklearn.utils.testing import assert_equal from sklearn.utils.testing import assert_array_equal from sklearn.utils.testing import assert_raises from sklearn.utils.testing import assert_in from sklearn.utils.testing import assert_not_in from sklearn.cluster.dbscan_ import DBSCAN from sklearn.cluster.dbscan_ import dbscan from sklearn.cluster.tests.common import generate_clustered_data from sklearn.metrics.pairwise import pairwise_distances n_clusters = 3 X = generate_clustered_data(n_clusters=n_clusters) def test_dbscan_similarity(): # Tests the DBSCAN algorithm with a similarity array. # Parameters chosen specifically for this task. eps = 0.15 min_samples = 10 # Compute similarities D = distance.squareform(distance.pdist(X)) D /= np.max(D) # Compute DBSCAN core_samples, labels = dbscan(D, metric="precomputed", eps=eps, min_samples=min_samples) # number of clusters, ignoring noise if present n_clusters_1 = len(set(labels)) - (1 if -1 in labels else 0) assert_equal(n_clusters_1, n_clusters) db = DBSCAN(metric="precomputed", eps=eps, min_samples=min_samples) labels = db.fit(D).labels_ n_clusters_2 = len(set(labels)) - int(-1 in labels) assert_equal(n_clusters_2, n_clusters) def test_dbscan_feature(): # Tests the DBSCAN algorithm with a feature vector array. # Parameters chosen specifically for this task. # Different eps to other test, because distance is not normalised. eps = 0.8 min_samples = 10 metric = 'euclidean' # Compute DBSCAN # parameters chosen for task core_samples, labels = dbscan(X, metric=metric, eps=eps, min_samples=min_samples) # number of clusters, ignoring noise if present n_clusters_1 = len(set(labels)) - int(-1 in labels) assert_equal(n_clusters_1, n_clusters) db = DBSCAN(metric=metric, eps=eps, min_samples=min_samples) labels = db.fit(X).labels_ n_clusters_2 = len(set(labels)) - int(-1 in labels) assert_equal(n_clusters_2, n_clusters) def test_dbscan_sparse(): core_sparse, labels_sparse = dbscan(sparse.lil_matrix(X), eps=.8, min_samples=10) core_dense, labels_dense = dbscan(X, eps=.8, min_samples=10) assert_array_equal(core_dense, core_sparse) assert_array_equal(labels_dense, labels_sparse) def test_dbscan_no_core_samples(): rng = np.random.RandomState(0) X = rng.rand(40, 10) X[X < .8] = 0 for X_ in [X, sparse.csr_matrix(X)]: db = DBSCAN(min_samples=6).fit(X_) assert_array_equal(db.components_, np.empty((0, X_.shape[1]))) assert_array_equal(db.labels_, -1) assert_equal(db.core_sample_indices_.shape, (0,)) def test_dbscan_callable(): # Tests the DBSCAN algorithm with a callable metric. # Parameters chosen specifically for this task. # Different eps to other test, because distance is not normalised. eps = 0.8 min_samples = 10 # metric is the function reference, not the string key. metric = distance.euclidean # Compute DBSCAN # parameters chosen for task core_samples, labels = dbscan(X, metric=metric, eps=eps, min_samples=min_samples, algorithm='ball_tree') # number of clusters, ignoring noise if present n_clusters_1 = len(set(labels)) - int(-1 in labels) assert_equal(n_clusters_1, n_clusters) db = DBSCAN(metric=metric, eps=eps, min_samples=min_samples, algorithm='ball_tree') labels = db.fit(X).labels_ n_clusters_2 = len(set(labels)) - int(-1 in labels) assert_equal(n_clusters_2, n_clusters) def test_dbscan_balltree(): # Tests the DBSCAN algorithm with balltree for neighbor calculation. eps = 0.8 min_samples = 10 D = pairwise_distances(X) core_samples, labels = dbscan(D, metric="precomputed", eps=eps, min_samples=min_samples) # number of clusters, ignoring noise if present n_clusters_1 = len(set(labels)) - int(-1 in labels) assert_equal(n_clusters_1, n_clusters) db = DBSCAN(p=2.0, eps=eps, min_samples=min_samples, algorithm='ball_tree') labels = db.fit(X).labels_ n_clusters_2 = len(set(labels)) - int(-1 in labels) assert_equal(n_clusters_2, n_clusters) db = DBSCAN(p=2.0, eps=eps, min_samples=min_samples, algorithm='kd_tree') labels = db.fit(X).labels_ n_clusters_3 = len(set(labels)) - int(-1 in labels) assert_equal(n_clusters_3, n_clusters) db = DBSCAN(p=1.0, eps=eps, min_samples=min_samples, algorithm='ball_tree') labels = db.fit(X).labels_ n_clusters_4 = len(set(labels)) - int(-1 in labels) assert_equal(n_clusters_4, n_clusters) db = DBSCAN(leaf_size=20, eps=eps, min_samples=min_samples, algorithm='ball_tree') labels = db.fit(X).labels_ n_clusters_5 = len(set(labels)) - int(-1 in labels) assert_equal(n_clusters_5, n_clusters) def test_input_validation(): # DBSCAN.fit should accept a list of lists. X = [[1., 2.], [3., 4.]] DBSCAN().fit(X) # must not raise exception def test_dbscan_badargs(): # Test bad argument values: these should all raise ValueErrors assert_raises(ValueError, dbscan, X, eps=-1.0) assert_raises(ValueError, dbscan, X, algorithm='blah') assert_raises(ValueError, dbscan, X, metric='blah') assert_raises(ValueError, dbscan, X, leaf_size=-1) assert_raises(ValueError, dbscan, X, p=-1) def test_pickle(): obj = DBSCAN() s = pickle.dumps(obj) assert_equal(type(pickle.loads(s)), obj.__class__) def test_boundaries(): # ensure min_samples is inclusive of core point core, _ = dbscan([[0], [1]], eps=2, min_samples=2) assert_in(0, core) # ensure eps is inclusive of circumference core, _ = dbscan([[0], [1], [1]], eps=1, min_samples=2) assert_in(0, core) core, _ = dbscan([[0], [1], [1]], eps=.99, min_samples=2) assert_not_in(0, core) def test_weighted_dbscan(): # ensure sample_weight is validated assert_raises(ValueError, dbscan, [[0], [1]], sample_weight=[2]) assert_raises(ValueError, dbscan, [[0], [1]], sample_weight=[2, 3, 4]) # ensure sample_weight has an effect assert_array_equal([], dbscan([[0], [1]], sample_weight=None, min_samples=6)[0]) assert_array_equal([], dbscan([[0], [1]], sample_weight=[5, 5], min_samples=6)[0]) assert_array_equal([0], dbscan([[0], [1]], sample_weight=[6, 5], min_samples=6)[0]) assert_array_equal([0, 1], dbscan([[0], [1]], sample_weight=[6, 6], min_samples=6)[0]) # points within eps of each other: assert_array_equal([0, 1], dbscan([[0], [1]], eps=1.5, sample_weight=[5, 1], min_samples=6)[0]) # and effect of non-positive and non-integer sample_weight: assert_array_equal([], dbscan([[0], [1]], sample_weight=[5, 0], eps=1.5, min_samples=6)[0]) assert_array_equal([0, 1], dbscan([[0], [1]], sample_weight=[5.9, 0.1], eps=1.5, min_samples=6)[0]) assert_array_equal([0, 1], dbscan([[0], [1]], sample_weight=[6, 0], eps=1.5, min_samples=6)[0]) assert_array_equal([], dbscan([[0], [1]], sample_weight=[6, -1], eps=1.5, min_samples=6)[0]) # for non-negative sample_weight, cores should be identical to repetition rng = np.random.RandomState(42) sample_weight = rng.randint(0, 5, X.shape[0]) core1, label1 = dbscan(X, sample_weight=sample_weight) assert_equal(len(label1), len(X)) X_repeated = np.repeat(X, sample_weight, axis=0) core_repeated, label_repeated = dbscan(X_repeated) core_repeated_mask = np.zeros(X_repeated.shape[0], dtype=bool) core_repeated_mask[core_repeated] = True core_mask = np.zeros(X.shape[0], dtype=bool) core_mask[core1] = True assert_array_equal(np.repeat(core_mask, sample_weight), core_repeated_mask) # sample_weight should work with precomputed distance matrix D = pairwise_distances(X) core3, label3 = dbscan(D, sample_weight=sample_weight, metric='precomputed') assert_array_equal(core1, core3) assert_array_equal(label1, label3) # sample_weight should work with estimator est = DBSCAN().fit(X, sample_weight=sample_weight) core4 = est.core_sample_indices_ label4 = est.labels_ assert_array_equal(core1, core4) assert_array_equal(label1, label4) est = DBSCAN() label5 = est.fit_predict(X, sample_weight=sample_weight) core5 = est.core_sample_indices_ assert_array_equal(core1, core5) assert_array_equal(label1, label5) assert_array_equal(label1, est.labels_) def test_dbscan_core_samples_toy(): X = [[0], [2], [3], [4], [6], [8], [10]] n_samples = len(X) for algorithm in ['brute', 'kd_tree', 'ball_tree']: # Degenerate case: every sample is a core sample, either with its own # cluster or including other close core samples. core_samples, labels = dbscan(X, algorithm=algorithm, eps=1, min_samples=1) assert_array_equal(core_samples, np.arange(n_samples)) assert_array_equal(labels, [0, 1, 1, 1, 2, 3, 4]) # With eps=1 and min_samples=2 only the 3 samples from the denser area # are core samples. All other points are isolated and considered noise. core_samples, labels = dbscan(X, algorithm=algorithm, eps=1, min_samples=2) assert_array_equal(core_samples, [1, 2, 3]) assert_array_equal(labels, [-1, 0, 0, 0, -1, -1, -1]) # Only the sample in the middle of the dense area is core. Its two # neighbors are edge samples. Remaining samples are noise. core_samples, labels = dbscan(X, algorithm=algorithm, eps=1, min_samples=3) assert_array_equal(core_samples, [2]) assert_array_equal(labels, [-1, 0, 0, 0, -1, -1, -1]) # It's no longer possible to extract core samples with eps=1: # everything is noise. core_samples, labels = dbscan(X, algorithm=algorithm, eps=1, min_samples=4) assert_array_equal(core_samples, []) assert_array_equal(labels, -np.ones(n_samples)) def test_dbscan_precomputed_metric_with_degenerate_input_arrays(): # see https://github.com/scikit-learn/scikit-learn/issues/4641 for # more details X = np.ones((10, 2)) labels = DBSCAN(eps=0.5, metric='precomputed').fit(X).labels_ assert_equal(len(set(labels)), 1) X = np.zeros((10, 2)) labels = DBSCAN(eps=0.5, metric='precomputed').fit(X).labels_ assert_equal(len(set(labels)), 1)
bsd-3-clause
statsmodels/statsmodels.github.io
v0.12.2/plots/graphics_gofplots_qqplot.py
10
1927
# -*- coding: utf-8 -*- """ Created on Sun May 06 05:32:15 2012 Author: Josef Perktold editted by: Paul Hobson (2012-08-19) """ # example with the new ProbPlot class from matplotlib import pyplot as plt import numpy as np from scipy import stats import statsmodels.api as sm #example from docstring data = sm.datasets.longley.load(as_pandas=False) data.exog = sm.add_constant(data.exog, prepend=True) mod_fit = sm.OLS(data.endog, data.exog).fit() res = mod_fit.resid left = -1.8 #x coordinate for text insert fig = plt.figure() ax = fig.add_subplot(2, 2, 1) sm.graphics.qqplot(res, ax=ax) top = ax.get_ylim()[1] * 0.75 txt = ax.text(left, top, 'no keywords', verticalalignment='top') txt.set_bbox(dict(facecolor='k', alpha=0.1)) ax = fig.add_subplot(2, 2, 2) sm.graphics.qqplot(res, line='s', ax=ax) top = ax.get_ylim()[1] * 0.75 txt = ax.text(left, top, "line='s'", verticalalignment='top') txt.set_bbox(dict(facecolor='k', alpha=0.1)) ax = fig.add_subplot(2, 2, 3) sm.graphics.qqplot(res, line='45', fit=True, ax=ax) ax.set_xlim(-2, 2) top = ax.get_ylim()[1] * 0.75 txt = ax.text(left, top, "line='45', \nfit=True", verticalalignment='top') txt.set_bbox(dict(facecolor='k', alpha=0.1)) ax = fig.add_subplot(2, 2, 4) sm.graphics.qqplot(res, dist=stats.t, line='45', fit=True, ax=ax) ax.set_xlim(-2, 2) top = ax.get_ylim()[1] * 0.75 txt = ax.text(left, top, "dist=stats.t, \nline='45', \nfit=True", verticalalignment='top') txt.set_bbox(dict(facecolor='k', alpha=0.1)) fig.tight_layout() plt.gcf() x = np.random.normal(loc=8.25, scale=3.5, size=37) y = np.random.normal(loc=8.00, scale=3.25, size=37) pp_x = sm.ProbPlot(x, fit=True) pp_y = sm.ProbPlot(y, fit=True) # probability of exceedance fig2 = pp_x.probplot(exceed=True) # compare x quantiles to y quantiles fig3 = pp_x.qqplot(other=pp_y, line='45') # same as above with probabilities/percentiles fig4 = pp_x.ppplot(other=pp_y, line='45')
bsd-3-clause
McDermott-Group/LabRAD
LabRAD/TestScripts/fpgaTest/pyle/build/lib/pyle/dataking/automateDaily.py
2
41937
from datetime import datetime import itertools import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D import numpy as np from scipy.optimize import leastsq, fsolve import time import random import labrad from labrad.units import Unit ns, us, GHz, MHz = [Unit(s) for s in ('ns', 'us', 'GHz', 'MHz')] from scripts import GHz_DAC_bringup from pyle.dataking import measurement from pyle.dataking import multiqubit as mq from pyle.dataking import util as util from pyle.util import sweeptools as st from pyle.dataking import noon from pyle.dataking import hadamard as hadi from pyle.plotting import dstools as ds from pyle import tomo # TODO: Big dream here. Create a "report" or a dictionary of all the calibrations for each qubit # then print that to the screen/save it to registry/ save fits to dataset # TODO add fits to e.g. T1 and T2 # TODO separate datataking from analysis (so can rerun analysis on datasets directly from data vault) # TODO save all fitting params with datasets # TODO retune measure pulse amplitude for maximum visibility def smsNotify(cxn, msg, username): return cxn.telecomm_server.send_sms('automate daily', msg, username) def getBoardGroup(cxn, sample): """ Get the board group used by the experiment associated to sample""" fpgas = cxn.ghz_fpgas boardGroups = fpgas.list_board_groups() def getAnyBoard(): for dev in sample.values(): try: #Look in channels to see if we can find any FPGA board return dict(dev['channels'])['uwave'][1][0] except (KeyError,TypeError): #Don't do anything, just try the next one pass board = getAnyBoard() if board is None: return board for bg in boardGroups: if board in [name[1] for name in fpgas.list_devices(bg)]: return bg return None def getReadoutType(sample, device): pass def daily_bringup(s, pause=False): cxn = s._cxn username = s._dir[1] sample, qubits = util.loadQubits(s) boardGroup = getBoardGroup(cxn, sample) #Bring up FPGA boards. If it fails, send SMS to the user and end daily bringup if not bringupBoards(cxn, boardGroup): smsNotify(cxn, 'board bringup failed', username) return False #Set up DC bias (SQUID steps, resonator readout, etc.) bringup_dcBias(s, pause=pause) return bringup_stepedge(s, pause=pause) #run stepedge and find stepedge for each qubit bringup_scurve(s, pause=pause) #measure scurve over reasonable range. find_mpa_func bringup_sample(s, pause=pause) #spectroscopy, tune pi freq/amp, visibility #fluxFunc/zpaFunc on reasonable frequency ranges #todo the pulshape tune-up single_qubit_scans(s) #Coherence factors qubit_coupling_resonator_scans(s) #For each qubit: swapTuner,fockTuner. Then inter-qubit timing. #qubit_memory_resonator_scans(s) gate_bringup(s) create_bell_state_iswap(s,zSweep=False) def bringupBoards(cxn, boardGroup): ok = True resultWords = {True:'ok',False:'failed'} fpgas = cxn.ghz_fpgas try: successDict = GHz_DAC_bringup.bringupBoardGroup(fpgas, boardGroup) for board, successes in successDict.items(): for item,success in successes.items(): if not success: print 'board %s %s failed'%(board,item) ok = False except Exception: ok = False return ok def bringup_dcBias(s, pause=True): pass def bringup_squidsteps(s, pause=True): N = len(s['config']) for i in range(N): print 'measuring squidsteps for qubit %d...' % i, mq.squidsteps(s, measure=i, noisy=False, update=pause) print 'done.' def bringup_stepedge(s, pause=True): N = len(s['config']) for i in range(N): print 'measuring step edge, qubit %d...' % i, mq.stepedge(s, measure=i, noisy=False, update=pause) print 'done.' for i in range(N): print 'binary searching to find step edge %d...' % i mq.find_step_edge(s, measure=i, noisy=False) print 'done.' def bringup_scurve(s, pause=True): N = len(s['config']) for i in range(N): print 'measuring scurve, qubit %d...' % i mpa05 = mq.find_mpa(s, measure=i, target=0.05, noisy=False, update=False) print '5% tunneling at mpa =', mpa05 mpa95 = mq.find_mpa(s, measure=i, target=0.95, noisy=False, update=False) print '95% tunneling at mpa =', mpa95 low = st.nearest(mpa05 - (mpa95 - mpa05) * 1.0, 0.002) high = st.nearest(mpa95 + (mpa95 - mpa05) * 1.0, 0.002) step = 0.002 * np.sign(high - low) mpa_range = st.r[low:high:step] mq.scurve(s, mpa_range, measure=i, stats=1200, noisy=False, update=pause) print 'done.' for i in range(N): print 'binary searching to find mpa %d...' % i mq.find_mpa(s, measure=i, noisy=False, update=True) mq.find_mpa_func(s, measure=i, noisy=False, update=True) print 'done.' def bringup_spectroscopy(s, freq_range=(6.0*GHz, 6.8*GHz)): qubits = s['config'] N = len(qubits) for i in range(N): mq.spectroscopy(s, st.r[freq_range[0]:freq_range[1]:0.005*GHz], measure=i, update=True) def bringup_sample(s, pause=False, fine_tune=True): N = len(s['config']) bringup_pi_pulses(s, pause=pause) if fine_tune: for i in range(N): # choose frequency range to cover all qubits fmin = min(s[qubit]['f10'] for qubit in s['config']) - 0.1*GHz fmax = max(s[qubit]['f10'] for qubit in s['config']) + 0.1*GHz print 'measuring flux func, qubit %d...' % i, mq.find_flux_func(s, (fmin, fmax), measure=i, noisy=False) print 'done.' print 'measuring zpa func, qubit %d...' % i, mq.find_zpa_func(s, (fmin, fmax), measure=i, noisy=False) print 'done.' # update the calibrated ratio of DAC amplitudes to detuning and rabi freqs update_cal_ratios(s) def update_cal_ratios(s): s, _qubits, Qubits = util.loadQubits(s, write_access=True) # single-qubit bringup for Q in Qubits: # convert microwave amplitude to rabi frequency fwhm = Q['piFWHM'][ns] A = float(Q['piAmp']) Q['calRabiOverUwa'] = 2*np.sqrt(np.log(2)/np.pi)/(A*fwhm)*GHz # total area is 1 cycle # convert z amplitude to detuning frequency a = float(Q['calZpaFunc'][0]) f = Q['f10'][GHz] Q['calDfOverZpa'] = 1/(4*a*f**3)*GHz def bringup_pi_pulses(s, pause=False): N = len(s['config']) for i in range(N): print 'measuring spectroscopy, qubit %d...' % i, mq.spectroscopy(s, measure=i, noisy=False, update=pause) # zoom in on resonance peak mq.spectroscopy_two_state(s, measure=i, noisy=False, update=pause) print 'done.' for i in range(N): print 'calibrating pi pulse, qubit %d...' % i, mq.pitunerHD(s, measure=i, noisy=False) print 'done.' print 'fine-tuning frequency, qubit %d...' % i, mq.freqtuner(s, iterations=1, measure=i, save=True) print 'done.' print 'redoing pi pulse calibration, qubit %d...' % i, mq.pitunerHD(s, measure=i, noisy=False) print 'done.' print 'checking visibility, qubit %d...' % i mpa1_05 = mq.find_mpa(s, measure=i, pi_pulse=True, target=0.05, noisy=False, update=False) print '5% tunneling of 1 at mpa =', mpa1_05 mpa0_95 = mq.find_mpa(s, measure=i, pi_pulse=False, target=0.95, noisy=False, update=False) print '95% tunneling of 0 at mpa =', mpa0_95 low = max(st.nearest(mpa1_05 - (mpa0_95 - mpa1_05) * 0.5, 0.002), 0) high = min(st.nearest(mpa0_95 + (mpa0_95 - mpa1_05) * 0.5, 0.002), 2) step = 0.002 * np.sign(high - low) mpa_range = st.r[low:high:step] mq.visibility(s, mpa_range, stats=1200, measure=i, noisy=False) print 'done.' # TODO adjust measurePulse_amplitude for maximum visibility # measure e0, e1 and visibility very carefully at the correct measure-pulse amplitude print 'measuring visibility at calibrated mpa %d...' % i, Q = s[s['config'][i]] data = mq.visibility(s, [Q['measureAmp']]*100, stats=600, measure=i, noisy=False, name='Measurement Fidelity', collect=True) e0, f1 = np.mean(data[:,1]), np.mean(data[:,2]) print 'done.' print ' e0: %g, f0: %g' % (e0, 1-e0) print ' e1: %g, f1: %g' % (1-f1, f1) Q['measureE0'] = e0 Q['measureF0'] = 1-e0 Q['measureE1'] = 1-f1 Q['measureF1'] = f1 def bringup_timing(s): N = len(s['config']) for i in range(N): print 'measuring timing delay on qubit %d...' % i mq.testdelay(s, measure=i, update=True, plot=True, noisy=False) print 'done.' for i in range(1,N): print 'measuring timing delay between qubit 0 and %d...' % i mq.testdelay_x(s, measure=0, z_pulse=i, update=True, plot=True, noisy=False) print 'done.' def bringup_xtalk(s): """Measure the z-pulse crosstalk between each pair of qubits. We then create the crosstalk matrix and store it in the registry. In addition, we invert the crosstalk matrix, since this is needed to correct the z-pulse signals if desired. Assumes you have already run spectroscopy2DZauto, so that the cal_zpa_func has already been set, as xtalk is relative to this. """ s, qubits, Qubits = util.loadQubits(s, write_access=True) A = np.eye(len(qubits)) for i, qi in enumerate(qubits): for j, _qj in enumerate(qubits): if i == j: continue print 'measuring crosstalk on %s from z-pulse on %s' % (i, j) xtfunc = mq.spectroscopy2DZxtalk(s, measure=i, z_pulse=j, noisy=False) aii = float(qi['calZpaFunc'][0]) aij = float(xtfunc[0]) print 'crosstalk =', aii/aij A[i,j] = aii/aij Ainv = np.linalg.inv(A) print print 'xtalk matrix:\n', A print print 'inverse xtalk matrix:\n', Ainv for i, Qi in enumerate(Qubits): Qi['calZpaXtalk'] = A[i] Qi['calZpaXtalkInv'] = Ainv[i] def test_xtalk(s): s, qubits = util.loadQubits(s, write_access=False) readouts = [(0,1,2,3), (0,1,3,2), (3,2,1,0), (1,2,0,3), (2,0,1,3), (2,1,3,0)] for readout_order in readouts: for q, order in zip(qubits, readout_order): q['squidReadoutDelay'] = (order+1) * 10*us for i in range(len(qubits)): mq.meas_xtalk(s, name='meas-xtalk simult readout order %s' % (readout_order,), drive=i, simult=True, stats=1200, noisy=False) for readout_order in readouts: for q, order in zip(qubits, readout_order): q['squidReadoutDelay'] = (order+1) * 10*us for i in range(len(qubits)): mq.meas_xtalk(s, name='meas-xtalk readout order %s' % (readout_order,), drive=i, simult=False, stats=1200, noisy=False) #def test_measurements(s): # """Test the various ways of measuring qubits.""" # s, qubits = util.loadQubits(s) # q0, _q1, q2 = qubits # # zpa0 = q0['wZpulseAmp'] # zpa2 = q2['wZpulseAmp'] # # # couple all three qubits together # kw = dict(pi_pulse_on=1, t_couple=32*ns, name='w-state meas_test', delay=st.r[30:36:1,ns], zpas=[zpa0, 0, zpa2], stats=3000) # # werner.w_state(s, measure=0, **kw) # werner.w_state(s, measure=[1], **kw) # werner.w_state(s, measure=[0,1,2], **kw) # werner.w_state(s, measure=measurement.Null(3, [0,1]), **kw) # werner.w_state(s, measure=measurement.Null(3, [0,1,2]), **kw) # werner.w_state(s, measure=measurement.Tomo(3, [0]), **kw) # werner.w_state(s, measure=measurement.Tomo(3, [0,1]), **kw) # werner.w_state(s, measure=measurement.Tomo(3, [0,1,2]), **kw) # werner.w_state(s, measure=measurement.TomoNull(3, [0]), pipesize=2, **kw) # werner.w_state(s, measure=measurement.TomoNull(3, [0,1]), pipesize=2, **kw) # werner.w_state(s, measure=measurement.TomoNull(3, [0,1,2]), pipesize=2, **kw) # werner.w_state(s, measure=measurement.Octomo(3, [0]), pipesize=2, **kw) # werner.w_state(s, measure=measurement.Octomo(3, [0,1]), pipesize=2, **kw) # werner.w_state(s, measure=measurement.Octomo(3, [0,1,2]), pipesize=2, **kw) # werner.w_state(s, measure=measurement.OctomoNull(3, [0,1,2]), pipesize=2, **kw) def single_qubit_scans(s): N = len(s['config']) for i in range(N): print 'measuring T1, qubit %d' % i, mq.t1(s, stats=1800, measure=i, noisy=False) #TODO add T1 fits print 'done.' print 'measuring ramsey fringe, qubit %d' % i, #TODO bring T1 fit from above and turn on T2 fit mq.ramsey(s, stats=1800, measure=i, noisy=False) print 'done.' print 'measuring spin_echo, qubit %d' % i, mq.spinEcho(s, stats=1800, measure=i, noisy=False) print 'done.' def qubit_coupling_resonator_scans(s): start = datetime.now() N = len(s['config']) for i in range(N): print 'measuring SWAP10 Spectroscopy, qubit %d' % i, swap10Len, swap10Amp = mq.swap10tuner(s, measure=i, stats=1800, noisy=False, whichRes='Coupler') print 'measuring 2D-SWAP Spec around Coupling resonator, for qubit %d' % i, swapAmpBND = 0.2 swapAmpSteps = 0.001 coarseSet = np.arange(0,swap10Amp*(1-swapAmpBND),swapAmpSteps*5) fineSet = np.arange(swap10Amp*(1-swapAmpBND),swap10Amp*(1+swapAmpBND), swapAmpSteps) swap10Amp = np.hstack((coarseSet,fineSet)) mq.swapSpectroscopy(s, state=1, swapLen=st.arangePQ(0,75,2,ns), swapAmp=swap10Amp, measure=i, save=True, noisy=False) #run focktuner level =1 print 'fock tuner for fine calibratin of cZControlLen' mq.fockTuner(s, n=1, iteration=3, tuneOS=False, stats=1800, measure=i, save=True, noisy=False) print 'done. Calibrated Control qubits' print 'Tuning up pi-pulse for |2> of qubit %d' % i, noon.pituner21(s, stats = 1800, measure=i, noisy=False, findMPA=True) print 'done' print 'measuring SWAP21 Spectroscopy' swap21Len, swap21Amp = mq.swap21tuner(s, measure=i, stats=1800, noisy=False) print 'measuring 2D-SWAP Spec around resonator, for qubit %d' % i, mq.swapSpectroscopy(s, state=2, swapLen=st.arangePQ(0,60,2,ns), swapAmp=st.r[swap21Amp*(1-0.2):swap21Amp*(1+0.2):0.001], measure=i, save=True, noisy=False) mq.fockTuners21(s, n=1, iteration=3, tuneOS=False, stats=1800, measure=i, save=True, noisy=False) print 'done. Calibrated Target qubits' print 'now starting qubit-qubit timing calibrations...' print 'measuring qubit-qubit delay via the resonator' for j,k in [(0,1),(1,0), (0,2),(2,0), (1,2),(2,1), (0,3),(3,0), (1,3),(3,1), (2,3),(3,2)]: #( add back in when all 4 qubits work! mq.testQubResDelayCmp(s,measureC=j, measureT=k) print 'now measuring resonator T1 using q0 for photon exchange' noon.resonatorT1(s, stats=1800, measure=0, whichRes='Coupler') end = datetime.now() print 'start:', start print 'end:', end print 'elapsed time for qubit-resonator scans:', (end-start) def qubit_memory_resonator_scans(s, stats=1800): start = datetime.now() N = len(s['config']) for i in range(N): print 'measuring SWAP10 Spectroscopy, qubit %d' % i, swap10Len, swap10Amp = mq.swap10tuner(s, measure=i, stats=stats, noisy=False, whichRes='Memory') print 'measuring 2D-SWAP Spec around Memory resonator, for qubit %d' % i, swapAmpBND = 0.2 swapAmpSteps = 0.001 coarseSet = np.arange(0,swap10Amp*(1-swapAmpBND),swapAmpSteps*5) fineSet = np.arange(swap10Amp*(1-swapAmpBND),swap10Amp*(1+swapAmpBND), swapAmpSteps) swap10Amp = np.hstack((coarseSet,fineSet)) mq.swapSpectroscopy(s, swapLen=st.arangePQ(0,300,5,ns), swapAmp=swap10Amp, measure=i, save=True, noisy=False, stats=stats, whichRes='Memory') #run focktuner level =1 print 'fock tuner for fine calibratin of memoryReadWriteLen' mq.fockTuner(s, n=1, iteration=3, tuneOS=False, stats=stats, measure=i, save=True, noisy=False, whichRes='Memory') print 'done. Memory resonator tuned up' print 'now measuring memory resonator T1 for resonator %d' %i, noon.resonatorT1(s, stats=stats, measure=i, whichRes='Memory') end = datetime.now() print 'start:', start print 'end:', end print 'elapsed time for qubit-mem-resonator scans:', (end-start) def gate_bringup(s): start = datetime.now() N = len(s['config']) for i in range(N): print 'Begin Calibrating Single Qubit Hadamard Gates' print 'Z-pi pulse tuner' mq.pitunerZ(s, measure=i, save=True, stats = 1800, update=True, noisy=False) print 'done tuning Z-pi amplitude for qubit %d' %i, hadi.hadamardTrajectory(s, measure=i, stats=1500, useHD=True, useTomo=True, tBuf=5*ns, save=True, noisy=False) print 'plotting hadamard trajectory on Bloch Sphere' print 'correcting for visibilities...generating pretty plots' hadi.plotTrajectory(path=s._dir, dataset=None, state=None) #grabs the most recent dataset in the current session hadi.plotDensityArrowPlot(path=s._dir, dataset = None) #grabs most recent dataset in the current session end = datetime.now() print 'start:', start print 'end:', end print 'elapsed time for single qubit gate bringups:', (end-start) def create_bell_state_iswap(s,zSweep=False): start = datetime.now() for j,k in [(0,1),(0,2),(1,2)]: #(0,3),(1,3),(2,3) add back in when all 4 qubits work! Qj = s[s['config'][j]] print 'measuring SWAPs between q%d and q%d via Rc' %(j,k) shor.iSwap(s, measure=[j,k], stats=1500, noisy=False) if zSweep: bellPhase = Qj['piAmpZ'] bellPhases = np.arange(-1.0,1.0,0.1)*bellPhase for phase in bellPhases: print 'Preparing Bell-States via SQRT(iSWAP) between q%d and q%d via Rc' %(j,k) shor.bellStateiSwap(s, reps=5, measure=[j,k], stats=1800, corrAmp=phase) else: print 'Preparing Bell-States via SQRT(iSWAP) between q%d and q%d via Rc' %(j,k) shor.bellStateiSwap(s, reps=5, measure=[j,k], stats=1800, corrAmp=0.0) end = datetime.now() print 'start:', start print 'end:', end print 'elapsed time for single qubit gate bringups:', (end-start) def cPhase_bringup(s): start = datetime.now() N = len(s['config']) for i in range(N): print 'done with COWS' end = datetime.now() print 'start:', start print 'end:', end print 'elapsed time for c-phase bringups:', (end-start) def full_run(s): bringup_multiqubits(s) measure_w(s) # do tomography def bringup_multiqubits(s): start = datetime.now() test_coupling(s, guess_zpa=True, use_overshoots=False) tune_swaps(s) #test_coupling(s, guess_zpa=False, use_overshoots=True) # try the swap again with correct overshoot tune_phases(s) # tune microwave phases between channels check_phase_vs_time(s) # tune microwave phase between channels as a function of time tune_swap_dphases(s) # tune phase change due to a swap z-pulse tune_dphases(s) # tune phase change due to z-pulses of any length end = datetime.now() print 'start:', start print 'end:', end print 'elapsed:', (end-start) # TODO save hyperbolic fit to coupling strength, so we can adjust for unequal coupling strengths def test_coupling(s, guess_zpa=True, use_overshoots=False): """Determine the z-pulse amplitude needed to bring qubits into resonance. Also, measure coupling strength between qubits. sets: w_zpulse_amp, w_swap_amp """ s, qubits, Qubits = util.loadQubits(s, write_access=True) q0, q1, q2 = qubits Q0, _Q1, Q2 = Qubits zpafunc0 = mq.get_zpa_func(q0) zpafunc1 = mq.get_zpa_func(q1) zpafunc2 = mq.get_zpa_func(q2) S = 0.015 * GHz # expected coupling strength if guess_zpa: # guess the required zpa zpa0 = q0['wZpulseAmp'] = zpafunc0(q1['f10']) zpa2 = q2['wZpulseAmp'] = zpafunc2(q1['f10']) # calculate zpa limits to give a reasonable range based on the expected coupling strength zpalims0 = sorted([zpafunc0(q1['f10'] - S*2), zpafunc0(q1['f10'] + S*2)]) zpalims2 = sorted([zpafunc2(q1['f10'] - S*2), zpafunc2(q1['f10'] + S*2)]) else: # use calibrated zpa zpa0 = q0['wZpulseAmp'] zpa2 = q2['wZpulseAmp'] # calculate zpa limits based on calibrated coupling change with zpa dzpa0 = abs(S[GHz]*2 / q0['coupling1DsByDzpa'][GHz]) zpalims0 = [zpa0 - dzpa0, zpa0 + dzpa0] dzpa2 = abs(S[GHz]*2 / q2['coupling1DsByDzpa'][GHz]) zpalims2 = [zpa2 - dzpa2, zpa2 + dzpa2] if not use_overshoots: q0['wZpulseOvershoot'] = 0.0 q2['wZpulseOvershoot'] = 0.0 from pyle.fitting import fourierplot opts = { 'collect': True, 'noisy': False, } null012 = measurement.Null(3, [0,1,2]) if 1: # couple q0 with q1 rng0 = st.r[zpalims0[0]:zpalims0[1]:(zpalims0[1] - zpalims0[0]) / 25] data0 = werner.w_state(s, name='coupling 0 and 1 2D', pi_pulse_on=1, measure=[0], t_couple=1000*ns, delay=st.r[0:200:4,ns], zpas=[rng0, 0, 0], **opts) S0, zpa0, ds_by_dzpa0 = fourierplot.fitswap(data0, return_fit=True) # find swap frequency and optimal z-pulse print S0, zpa0, ds_by_dzpa0 Q0['swapAmp'] = Q0['wZpulseAmp'] = zpa0 Q0['coupling1'] = S0*MHz Q0['coupling1DsByDzpa'] = ds_by_dzpa0*MHz # do a 1D scan with the optimal pulse amplitude data0 = werner.w_state(s, name='coupling 0 and 1', pi_pulse_on=1, measure=null012, t_couple=1000*ns, delay=st.r[0:100:2,ns], zpas=[zpa0, 0, 0], stats=3000, **opts) if 1: # couple q2 with q1 rng2 = st.r[zpalims2[0]:zpalims2[1]:(zpalims2[1] - zpalims2[0]) / 25] data2 = werner.w_state(s, name='coupling 1 and 2 2D', pi_pulse_on=1, measure=[2], t_couple=1000*ns, delay=st.r[0:200:4,ns], zpas=[0, 0, rng2], **opts) S2, zpa2, ds_by_dzpa2 = fourierplot.fitswap(data2, return_fit=True) # find swap frequency and optimal z-pulse print S2, zpa2, ds_by_dzpa2 Q2['swapAmp'] = Q2['wZpulseAmp'] = zpa2 Q2['coupling1'] = S2*MHz Q2['coupling1DsByDzpa'] = ds_by_dzpa2*MHz # do a 1D scan with the optimal pulse amplitude data2 = werner.w_state(s, name='coupling 1 and 2', pi_pulse_on=1, measure=null012, t_couple=1000*ns, delay=st.r[0:100:2,ns], zpas=[0, 0, zpa2], stats=3000, **opts) if 1: # couple q0 with q2, moving q1 to negative detuning zpa1 = zpafunc1(q2['f10']) # move q1 out of the way rng2 = st.r[zpalims2[0]:zpalims2[1]:(zpalims2[1] - zpalims2[0]) / 25] data2 = werner.w_state(s, name='coupling 0 and 2 2D', pi_pulse_on=0, measure=[2], t_couple=1000*ns, delay=st.r[0:200:4,ns], zpas=[zpa0, zpa1, rng2], **opts) S2, zpa2, ds_by_dzpa2 = fourierplot.fitswap(data2, return_fit=True) # find swap frequency and optimal z-pulse print S2, zpa2, ds_by_dzpa2 #Q2['swapAmp'] = Q2['wZpulseAmp'] = zpa2 Q2['coupling0'] = S2*MHz # save this coupling value, but not in the standard place Q2['coupling0DsByDzpa'] = ds_by_dzpa2*MHz # save fit, but not in the standard place # do a 1D scan with the optimal pulse amplitude data2 = werner.w_state(s, name='coupling 0 and 2', pi_pulse_on=0, measure=null012, t_couple=1000*ns, delay=st.r[0:100:2,ns], zpas=[zpa0, zpa1, zpa2], stats=3000, **opts) def tune_swaps(s): """Adjust overshoot and pulse length to get the best swap.""" # overshoots don't seem to help s.q0['swapOvershoot'] = 0.0 s.q2['swapOvershoot'] = 0.0 werner.swaptuner(s, measure=0, pi_pulse_on=1, noisy=False, update=True, save=False, stats=3000, tune_overshoot=False) werner.swaptuner(s, measure=2, pi_pulse_on=1, noisy=False, update=True, save=False, stats=3000, tune_overshoot=False) # set overshoots for w-state to be equal to calibrated swap overshoots s.q0['wZpulseOvershoot'] = s.q0['swapOvershoot'] s.q2['wZpulseOvershoot'] = s.q2['swapOvershoot'] def tune_phases(s, t0=None, calibrated_amp=True, stats=3000L, res=50, plot=True): s, qubits, Qubits = util.loadQubits(s, write_access=True) q0, q1, q2 = qubits if calibrated_amp: zpa0 = q0['swapAmp'] zpa2 = q2['swapAmp'] else: zpafunc0 = mq.get_zpa_func(q0) zpafunc2 = mq.get_zpa_func(q2) zpa0 = zpafunc0(q1['f10']) zpa2 = zpafunc2(q1['f10']) f_couple = 0.015*GHz t_couple = (1/f_couple/4)[ns]*ns phase = st.r[-np.pi:np.pi:np.pi/res] data0 = werner.uwave_phase_adjust(s, phase=phase, t0=t0, t_couple=t_couple, adjust=0, ref=1, zpas=[zpa0, 0.0, 0.0], collect=True, noisy=False, stats=stats) data2 = werner.uwave_phase_adjust(s, phase=phase, t0=t0, t_couple=t_couple, adjust=2, ref=1, zpas=[0.0, 0.0, zpa2], collect=True, noisy=False, stats=stats) def fitfunc(x, c): return -np.sin(x - c[0]) * c[1] + c[2] ph, _p00, p01, p10, _p11 = data0.T fit0, _ = leastsq(lambda c: fitfunc(ph, c) - p10, [q0['uwavePhase'], (max(p10)-min(p10))/2.0, (max(p10)+min(p10))/2.0]) if fit0[1] < 0: fit0[0] = (fit0[0] + 2*np.pi) % (2*np.pi) - np.pi fit0[1] *= -1 fit0[0] = (fit0[0] + np.pi) % (2*np.pi) - np.pi if plot: fig = plt.figure() ax = fig.add_subplot(111) ax.plot(ph, p10, 'b.', label='|10>') ax.plot(ph, p01, 'g.', label='|01>') ax.plot(ph, fitfunc(ph, fit0), 'r-') ax.axvline(fit0[0], linestyle='--', color='gray') ax.set_title('microwave phase adjustment, qubit 0, ref 1: phase = %0.5g' % fit0[0]) ax.legend() print 'old phase:', q0['uwavePhase'] print 'new phase:', fit0[0] Qubits[0]['uwavePhase'] = fit0[0] ph, _p00, p01, p10, _p11 = data2.T fit2, _ = leastsq(lambda c: fitfunc(ph, c) - p01, [q2['uwavePhase'], (max(p01)-min(p01))/2.0, (max(p01)+min(p01))/2.0]) if fit2[1] < 0: fit2[0] = (fit2[0] + 2*np.pi) % (2*np.pi) - np.pi fit2[1] *= -1 fit2[0] = (fit2[0] + np.pi) % (2*np.pi) - np.pi if plot: fig = plt.figure() ax = fig.add_subplot(111) ax.plot(ph, p10, 'b.', label='|10>') ax.plot(ph, p01, 'g.', label='|01>') ax.plot(ph, fitfunc(ph, fit2), 'r-') ax.axvline(fit2[0], linestyle='--', color='gray') ax.set_title('microwave phase adjustment, qubit 2, ref 1: phase = %0.5g' % fit2[0]) ax.legend() print 'old phase:', q2['uwavePhase'] print 'new phase:', fit2[0] Qubits[2]['uwavePhase'] = fit2[0] return fit0[0], fit2[0] def check_phase_vs_time(s, plot=True): s, qubits, Qubits = util.loadQubits(s, write_access=True) phases0 = [] phases2 = [] t0s = st.r[0:12:1,ns] for t0 in t0s: ph0, ph2 = tune_phases(s, t0, stats=1200, res=20, plot=False) phases0.append(ph0) phases2.append(ph2) phases0 = np.unwrap(phases0) phases2 = np.unwrap(phases2) fit0 = np.polyfit(t0s, phases0, 1) fit2 = np.polyfit(t0s, phases2, 1) df0 = (s.q1['f10'] - s.q0['f10'])[GHz] df2 = (s.q1['f10'] - s.q2['f10'])[GHz] if plot: fig = plt.figure() ax = fig.add_subplot(111) ax.plot(t0s, phases0, 'b.', label='measured phase') ax.plot(t0s, np.polyval(fit0, t0s), 'r-', label='phase fit') ax.plot(t0s, np.polyval([-2*np.pi*df0, 0], t0s), 'c-', label='detuning') ax.legend() fig = plt.figure() ax = fig.add_subplot(111) ax.plot(t0s, phases2, 'b.', label='measured phase') ax.plot(t0s, np.polyval(fit2, t0s), 'r-', label='phase fit') ax.plot(t0s, np.polyval([-2*np.pi*df2, 0], t0s), 'c-', label='detuning') ax.legend() print 'qubit 0:' print ' detuning:', df0 print ' phase fit:', fit0[0]/(2*np.pi) print ' phase offset:', fit0[1]/(2*np.pi) print Qubits[0]['uwavePhaseSlope'] = fit0[0]/(2*np.pi) * GHz Qubits[0]['uwavePhaseOfs'] = fit0[1] Qubits[0]['uwavePhaseFit'] = fit0 print 'qubit 2:' print ' detuning q2:', df2 print ' phase fit:', fit2[0]/(2*np.pi) print ' phase offset:', fit2[1]/(2*np.pi) print Qubits[2]['uwavePhaseSlope'] = fit2[0]/(2*np.pi) * GHz Qubits[2]['uwavePhaseOfs'] = fit2[1] Qubits[2]['uwavePhaseFit'] = fit2 def tune_swap_dphases(s, calibrated_amp=True): s, qubits, Qubits = util.loadQubits(s, write_access=True) q0, q1, q2 = qubits Q0, _Q1, Q2 = Qubits if not calibrated_amp: zpafunc0 = mq.get_zpa_func(q0) zpafunc2 = mq.get_zpa_func(q2) q0['swapAmp'] = zpafunc0(q1['f10']) q2['swapAmp'] = zpafunc2(q1['f10']) def fitfunc(x, c): return np.cos(x - c[0]) * c[1] + c[2] def fit_dphase(i, q): print 'measuring qubit', i phase = st.r[-np.pi:np.pi:np.pi/20] data = werner.swap_dphase_adjust(s, phase, adjust=i, ref=1, stats=600, noisy=False, collect=True, save=False) ph, p1 = data.T fit, _ = leastsq(lambda c: fitfunc(ph, c) - p1, [ph[np.argmax(p1)], (max(p1)-min(p1))/2.0, (max(p1)+min(p1))/2.0]) if fit[1] < 0: fit[0] = (fit[0] + 2*np.pi) % (2*np.pi) - np.pi fit[1] *= -1 print ' dphase =', fit[0] dphase = fit[0] return dphase dphase0 = fit_dphase(0, q0) dphase2 = fit_dphase(2, q2) print 'qubit 0:' print ' swapDphase:', dphase0 print Q0['swapDphase'] = dphase0 print 'qubit 2:' print ' swapDphase:', dphase2 print Q2['swapDphase'] = dphase2 def tune_dphases(s, calibrated_amp=True): s, qubits, Qubits = util.loadQubits(s, write_access=True) q0, q1, q2 = qubits Q0, _Q1, Q2 = Qubits if not calibrated_amp: zpafunc0 = mq.get_zpa_func(q0) zpafunc2 = mq.get_zpa_func(q2) q0['wZpulseAmp'] = zpafunc0(q1['f10']) q2['wZpulseAmp'] = zpafunc2(q1['f10']) def fitfunc(x, c): return np.cos(x - c[0]) * c[1] + c[2] zp_rng = st.r[0:25:1,ns] ts = np.array([zp_len[ns] for zp_len in zp_rng]) def fit_phases(i, q): print 'measuring qubit', i dphases = [] for zp_len in zp_rng: q['wZpulseLen'] = zp_len phase = st.r[-np.pi:np.pi:np.pi/20] data = werner.w_dphase_adjust(s, phase, adjust=i, ref=1, stats=600, noisy=False, collect=True, save=True) ph, p1 = data.T fit, _ = leastsq(lambda c: fitfunc(ph, c) - p1, [ph[np.argmax(p1)], (max(p1)-min(p1))/2.0, (max(p1)+min(p1))/2.0]) if fit[1] < 0: fit[0] = (fit[0] + 2*np.pi) % (2*np.pi) - np.pi fit[1] *= -1 print ' t =', zp_len[ns], ' dphase =', fit[0] dphases.append(fit[0]) print return np.unwrap(dphases) dphases0 = fit_phases(0, q0) dphases2 = fit_phases(2, q2) fit0 = np.polyfit(ts, dphases0, 1) fit2 = np.polyfit(ts, dphases2, 1) df0 = (q1['f10'] - q0['f10'])[GHz] df2 = (q1['f10'] - q2['f10'])[GHz] fig = plt.figure() ax = fig.add_subplot(111) ax.plot(ts, dphases0, 'b.', label='measured phase') ax.plot(ts, np.polyval(fit0, ts), 'r-', label='phase fit') ax.plot(ts, np.polyval([-2*np.pi*df0, 0], ts), 'c-', label='detuning') ax.legend() fig = plt.figure() ax = fig.add_subplot(111) ax.plot(ts, dphases2, 'b.') ax.plot(ts, np.polyval(fit2, ts), 'r-') ax.plot(ts, np.polyval([-2*np.pi*df2, 0], ts), 'c-', label='detuning') ax.legend() print 'qubit 0:' print ' detuning:', df0 print ' phase fit:', fit0[0]/(2*np.pi) print ' phase offset:', fit0[1]/(2*np.pi) print Q0['wDphaseSlope'] = fit0[0]/(2*np.pi) * GHz Q0['wDphaseFit'] = fit0 print 'qubit 2:' print ' detuning q2:', df2 print ' phase fit:', fit2[0]/(2*np.pi) print ' phase offset:', fit2[1]/(2*np.pi) print Q2['wDphaseSlope'] = fit2[0]/(2*np.pi) * GHz Q2['wDphaseFit'] = fit2 def measure_w(s, with_tomo=True): s, qubits = util.loadQubits(s) q0, _q1, q2 = qubits t_swap = (q0['swapLen'] + q2['swapLen']) / 2 t_couple = t_swap * 4.0/9.0 for _i in itertools.count(): # couple all three qubits together null012 = measurement.Null(3, [0,1,2]) werner.w_state(s, pi_pulse_on=1, t_couple=1000*ns, delay=st.r[0:50:1,ns], measure=null012, stats=1200) werner.w_state(s, pi_pulse_on=1, t_couple=t_couple, delay=st.r[0:50:1,ns], measure=null012, stats=1200) if with_tomo: # do tomography tomo012 = measurement.TomoNull(3, [0,1,2]) opts = { 'pi_pulse_on': 1, 'measure': tomo012, 'stats': 600, 'pipesize': 1, } werner.w_state(s, t_couple=1000*ns, delay=st.r[0:30:1,ns], **opts) werner.w_state(s, t_couple=t_couple, delay=st.r[0:30:1,ns], **opts) werner.w_state(s, t_couple=1000*ns, delay=st.r[15:20:0.25,ns], **opts) werner.w_state(s, t_couple=t_couple, delay=st.r[15:20:0.25,ns], **opts) def tweak_detunings(s): s, qubits = util.loadQubits(s) q0, _q1, q2 = qubits zpa0 = q0['swapAmp'] zpa2 = q2['swapAmp'] def sfunc(q): p0 = q['coupling1'][MHz] p1 = q['coupling1DsByDzpa'][MHz] zpa0 = q['swap_amp'] return lambda zpa: np.sqrt(p0**2 + p1**2*(zpa - zpa0)) sfunc0 = sfunc(q0) sfunc2 = sfunc(q2) smin0 = sfunc0(q0['swapAmp']) smin2 = sfunc2(q2['swapAmp']) print 'minimum splittings:' print ' q0 <-> q1: %g MHz' % smin0 print ' q2 <-> q1: %g MHz' % smin2 print if smin0 < smin2: # adjust zpa 0 zpa0opt = fsolve(lambda zpa: sfunc0(zpa) - smin2, zpa0) det0 = q0['coupling1DsByDzpa'] * (zpa0opt - q0['swapAmp']) print 'qubit0 optimal zpa=%g, s=%g, det=%g' % (zpa0opt, sfunc0(zpa0opt), det0) zpas = sorted([zpa0opt, 2*zpa0 - zpa0opt]) print 'trying', zpas for zpa0 in zpas: q0['swapAmp'] = zpa0 measure_w(s, with_tomo=False) else: # adjust zpa 0 zpa2opt = fsolve(lambda zpa: sfunc2(zpa) - smin0, zpa2) det2 = q2['coupling1DsByDzpa'] * (zpa2opt - q2['swapAmp']) print 'qubit2 optimal zpa=%g, s=%g, det=%g' % (zpa2opt, sfunc2(zpa2opt), det2) zpas = sorted([zpa2opt, 2*zpa2 - zpa2opt]) print 'trying', zpas for zpa2 in zpas: q2['swapAmp'] = zpa2 measure_w(s, with_tomo=False) def measure_ghz(s, with_tomo=True, with_ghz=True): s, qubits = util.loadQubits(s) q0, _q1, q2 = qubits for _i in [0]: #itertools.count(): # couple all three qubits together null012 = measurement.Null(3, [0,1,2]) #mq.w_state(s, pi_pulse_on=1, t_couple=1000*ns, delay=st.r[0:50:1,ns], measure=null012, stats=1200) #mq.w_state(s, pi_pulse_on=1, t_couple=17.5*ns, delay=st.r[0:50:1,ns], measure=null012, stats=1200) ghz.ghz_simult(s, stage=st.r[0:3:0.05], measure=measurement.Null(3), stats=1800) ghz.ghz_iswap(s, stage=st.r[0:4:0.05], measure=measurement.Null(3), stats=1800) if with_ghz: ghz.ghz_simult(s, stage=st.r[0:3:0.1], measure=ghz.GHZ(), stats=1200) ghz.ghz_iswap(s, stage=st.r[0:4:0.1], measure=ghz.GHZ(), stats=1200) if with_tomo: # do tomography tomo012 = measurement.TomoNull(3, [0,1,2]) opts = { 'pi_pulse_on': 1, 'measure': tomo012, 'stats': 600, 'pipesize': 1, } #mq.w_state(s, t_couple=1000*ns, delay=st.r[0:30:1,ns], **opts) #mq.w_state(s, t_couple=19*ns, delay=st.r[0:30:1,ns], **opts) #mq.w_state(s, t_couple=1000*ns, delay=st.r[15:25:0.25,ns], **opts) #mq.w_state(s, t_couple=19*ns, delay=st.r[15:25:0.25,ns], **opts) ghz.ghz_simult(s, stage=st.r[0:3:0.1], measure=measurement.TomoNull(3), pipesize=1, stats=1200) ghz.ghz_iswap(s, stage=st.r[0:4:0.1], measure=measurement.TomoNull(3), pipesize=1, stats=1200) def measure_ghz_iswap(s, with_tomo=True, with_ghz=True): s, qubits = util.loadQubits(s) q0, _q1, q2 = qubits while True: #for sf, ef, es in [(0, 0, 0), (0, 0, 1), (0, 1, 0), (0, 1, 1), # (2, 0, 0), (2, 0, 1), (2, 1, 0), (2, 1, 1)]: for sf, ef, es in [(0, 1, 0), (0, 1, 1), (2, 0, 0), (2, 0, 1), (2, 1, 0), (2, 1, 1)]: # couple all three qubits together null012 = measurement.Null(3, [0,1,2]) #mq.w_state(s, pi_pulse_on=1, t_couple=1000*ns, delay=st.r[0:50:1,ns], measure=null012, stats=1200) #mq.w_state(s, pi_pulse_on=1, t_couple=17.5*ns, delay=st.r[0:50:1,ns], measure=null012, stats=1200) opts = { 'swap_first': sf, 'swap_second': 2-sf, 'echo_first': ef, 'echo_second': es, } opts2 = { 'swap_first': sf, 'swap_second': 2-sf, } #ghz.ghz_simult(s, stage=st.r[0:3:0.05], measure=measurement.Null(3), stats=1800) #ghz.ghz_iswap(s, stage=st.r[0:4:0.05], measure=null012, stats=1200, **opts) #if with_ghz: # ghz.ghz_simult(s, stage=st.r[0:3:0.1], measure=ghz.GHZ(), stats=1200) # ghz.ghz_iswap(s, stage=st.r[0:4:0.1], measure=ghz.GHZ(), stats=1200) if with_tomo: # do tomography tomo012 = measurement.TomoNull(3, [0,1,2]) #opts = { # 'pi_pulse_on': 1, # 'measure': tomo012, # 'stats': 600, # 'pipesize': 1, #} #mq.w_state(s, t_couple=1000*ns, delay=st.r[0:30:1,ns], **opts) #mq.w_state(s, t_couple=19*ns, delay=st.r[0:30:1,ns], **opts) #mq.w_state(s, t_couple=1000*ns, delay=st.r[15:25:0.25,ns], **opts) #mq.w_state(s, t_couple=19*ns, delay=st.r[15:25:0.25,ns], **opts) #ghz.ghz_simult(s, stage=st.r[0:3:0.1], measure=measurement.TomoNull(3), pipesize=1, stats=600) #ghz.ghz_iswap(s, stage=[4], measure=tomo012, pipesize=1, stats=6000, **opts) ghz.ghz_iswap_tight(s, stage=[4], measure=tomo012, pipesize=1, stats=6000, **opts2) #ghz.ghz_iswap(s, stage=st.r[0:4:0.2], measure=tomo012, pipesize=1, stats=600, **opts) def measure_ghz_iswap_tight(s, with_tomo=True, with_ghz=True): s, qubits = util.loadQubits(s) q0, _q1, q2 = qubits for _i in range(1): for sf in [0, 2]: opts = { 'swap_first': sf, 'swap_second': 2-sf, } #null012 = measurement.Null(3, [0,1,2]) #ghz.ghz_simult(s, stage=st.r[0:3:0.05], measure=null012, stats=1800) #ghz.ghz_iswap(s, stage=st.r[0:4:0.05], measure=null012, stats=1200, **opts) #if with_ghz: # ghz.ghz_simult(s, stage=st.r[0:3:0.1], measure=ghz.GHZ(), stats=1200) # ghz.ghz_iswap(s, stage=st.r[0:4:0.1], measure=ghz.GHZ(), stats=1200) if with_tomo: tomo012 = measurement.TomoNull(3, [0,1,2]) ghz.ghz_iswap_tight(s, stage=[0,1,2,3,4], measure=tomo012, pipesize=1, stats=6000, **opts) def measure_ghz_simult(s, with_tomo=True, with_ghz=True): s, qubits = util.loadQubits(s) q0, _q1, q2 = qubits for _i in range(1): for sf in [0, 2]: opts = { 'swap_first': sf, 'swap_second': 2-sf, } #null012 = measurement.Null(3, [0,1,2]) #ghz.ghz_simult(s, stage=st.r[0:3:0.05], measure=null012, stats=1800) #ghz.ghz_iswap(s, stage=st.r[0:4:0.05], measure=null012, stats=1200, **opts) #if with_ghz: # ghz.ghz_simult(s, stage=st.r[0:3:0.1], measure=ghz.GHZ(), stats=1200) # ghz.ghz_iswap(s, stage=st.r[0:4:0.1], measure=ghz.GHZ(), stats=1200) if with_tomo: tomo012 = measurement.TomoNull(3, [0,1,2]) ghz.ghz_iswap_tight(s, stage=[0,1,2,3,4], measure=tomo012, pipesize=1, stats=6000, **opts)
gpl-2.0
mrshu/scikit-learn
examples/cluster/plot_affinity_propagation.py
2
2276
""" ================================================= Demo of affinity propagation clustering algorithm ================================================= Reference: Brendan J. Frey and Delbert Dueck, "Clustering by Passing Messages Between Data Points", Science Feb. 2007 """ print __doc__ from sklearn.cluster import AffinityPropagation from sklearn import metrics from sklearn.datasets.samples_generator import make_blobs ############################################################################## # Generate sample data centers = [[1, 1], [-1, -1], [1, -1]] X, labels_true = make_blobs(n_samples=300, centers=centers, cluster_std=0.5, random_state=0) ############################################################################## # Compute Affinity Propagation af = AffinityPropagation(preference=-50).fit(X) cluster_centers_indices = af.cluster_centers_indices_ labels = af.labels_ n_clusters_ = len(cluster_centers_indices) print 'Estimated number of clusters: %d' % n_clusters_ print "Homogeneity: %0.3f" % metrics.homogeneity_score(labels_true, labels) print "Completeness: %0.3f" % metrics.completeness_score(labels_true, labels) print "V-measure: %0.3f" % metrics.v_measure_score(labels_true, labels) print "Adjusted Rand Index: %0.3f" % \ metrics.adjusted_rand_score(labels_true, labels) print("Adjusted Mutual Information: %0.3f" % metrics.adjusted_mutual_info_score(labels_true, labels)) print("Silhouette Coefficient: %0.3f" % metrics.silhouette_score(X, labels, metric='sqeuclidean')) ############################################################################## # Plot result import pylab as pl from itertools import cycle pl.close('all') pl.figure(1) pl.clf() colors = cycle('bgrcmykbgrcmykbgrcmykbgrcmyk') for k, col in zip(range(n_clusters_), colors): class_members = labels == k cluster_center = X[cluster_centers_indices[k]] pl.plot(X[class_members, 0], X[class_members, 1], col + '.') pl.plot(cluster_center[0], cluster_center[1], 'o', markerfacecolor=col, markeredgecolor='k', markersize=14) for x in X[class_members]: pl.plot([cluster_center[0], x[0]], [cluster_center[1], x[1]], col) pl.title('Estimated number of clusters: %d' % n_clusters_) pl.show()
bsd-3-clause
ahoyosid/scikit-learn
examples/applications/plot_prediction_latency.py
234
11277
""" ================== Prediction Latency ================== This is an example showing the prediction latency of various scikit-learn estimators. The goal is to measure the latency one can expect when doing predictions either in bulk or atomic (i.e. one by one) mode. The plots represent the distribution of the prediction latency as a boxplot. """ # Authors: Eustache Diemert <[email protected]> # License: BSD 3 clause from __future__ import print_function from collections import defaultdict import time import gc import numpy as np import matplotlib.pyplot as plt from scipy.stats import scoreatpercentile from sklearn.datasets.samples_generator import make_regression from sklearn.ensemble.forest import RandomForestRegressor from sklearn.linear_model.ridge import Ridge from sklearn.linear_model.stochastic_gradient import SGDRegressor from sklearn.svm.classes import SVR def _not_in_sphinx(): # Hack to detect whether we are running by the sphinx builder return '__file__' in globals() def atomic_benchmark_estimator(estimator, X_test, verbose=False): """Measure runtime prediction of each instance.""" n_instances = X_test.shape[0] runtimes = np.zeros(n_instances, dtype=np.float) for i in range(n_instances): instance = X_test[i, :] start = time.time() estimator.predict(instance) runtimes[i] = time.time() - start if verbose: print("atomic_benchmark runtimes:", min(runtimes), scoreatpercentile( runtimes, 50), max(runtimes)) return runtimes def bulk_benchmark_estimator(estimator, X_test, n_bulk_repeats, verbose): """Measure runtime prediction of the whole input.""" n_instances = X_test.shape[0] runtimes = np.zeros(n_bulk_repeats, dtype=np.float) for i in range(n_bulk_repeats): start = time.time() estimator.predict(X_test) runtimes[i] = time.time() - start runtimes = np.array(list(map(lambda x: x / float(n_instances), runtimes))) if verbose: print("bulk_benchmark runtimes:", min(runtimes), scoreatpercentile( runtimes, 50), max(runtimes)) return runtimes def benchmark_estimator(estimator, X_test, n_bulk_repeats=30, verbose=False): """ Measure runtimes of prediction in both atomic and bulk mode. Parameters ---------- estimator : already trained estimator supporting `predict()` X_test : test input n_bulk_repeats : how many times to repeat when evaluating bulk mode Returns ------- atomic_runtimes, bulk_runtimes : a pair of `np.array` which contain the runtimes in seconds. """ atomic_runtimes = atomic_benchmark_estimator(estimator, X_test, verbose) bulk_runtimes = bulk_benchmark_estimator(estimator, X_test, n_bulk_repeats, verbose) return atomic_runtimes, bulk_runtimes def generate_dataset(n_train, n_test, n_features, noise=0.1, verbose=False): """Generate a regression dataset with the given parameters.""" if verbose: print("generating dataset...") X, y, coef = make_regression(n_samples=n_train + n_test, n_features=n_features, noise=noise, coef=True) X_train = X[:n_train] y_train = y[:n_train] X_test = X[n_train:] y_test = y[n_train:] idx = np.arange(n_train) np.random.seed(13) np.random.shuffle(idx) X_train = X_train[idx] y_train = y_train[idx] std = X_train.std(axis=0) mean = X_train.mean(axis=0) X_train = (X_train - mean) / std X_test = (X_test - mean) / std std = y_train.std(axis=0) mean = y_train.mean(axis=0) y_train = (y_train - mean) / std y_test = (y_test - mean) / std gc.collect() if verbose: print("ok") return X_train, y_train, X_test, y_test def boxplot_runtimes(runtimes, pred_type, configuration): """ Plot a new `Figure` with boxplots of prediction runtimes. Parameters ---------- runtimes : list of `np.array` of latencies in micro-seconds cls_names : list of estimator class names that generated the runtimes pred_type : 'bulk' or 'atomic' """ fig, ax1 = plt.subplots(figsize=(10, 6)) bp = plt.boxplot(runtimes, ) cls_infos = ['%s\n(%d %s)' % (estimator_conf['name'], estimator_conf['complexity_computer']( estimator_conf['instance']), estimator_conf['complexity_label']) for estimator_conf in configuration['estimators']] plt.setp(ax1, xticklabels=cls_infos) plt.setp(bp['boxes'], color='black') plt.setp(bp['whiskers'], color='black') plt.setp(bp['fliers'], color='red', marker='+') ax1.yaxis.grid(True, linestyle='-', which='major', color='lightgrey', alpha=0.5) ax1.set_axisbelow(True) ax1.set_title('Prediction Time per Instance - %s, %d feats.' % ( pred_type.capitalize(), configuration['n_features'])) ax1.set_ylabel('Prediction Time (us)') plt.show() def benchmark(configuration): """Run the whole benchmark.""" X_train, y_train, X_test, y_test = generate_dataset( configuration['n_train'], configuration['n_test'], configuration['n_features']) stats = {} for estimator_conf in configuration['estimators']: print("Benchmarking", estimator_conf['instance']) estimator_conf['instance'].fit(X_train, y_train) gc.collect() a, b = benchmark_estimator(estimator_conf['instance'], X_test) stats[estimator_conf['name']] = {'atomic': a, 'bulk': b} cls_names = [estimator_conf['name'] for estimator_conf in configuration[ 'estimators']] runtimes = [1e6 * stats[clf_name]['atomic'] for clf_name in cls_names] boxplot_runtimes(runtimes, 'atomic', configuration) runtimes = [1e6 * stats[clf_name]['bulk'] for clf_name in cls_names] boxplot_runtimes(runtimes, 'bulk (%d)' % configuration['n_test'], configuration) def n_feature_influence(estimators, n_train, n_test, n_features, percentile): """ Estimate influence of the number of features on prediction time. Parameters ---------- estimators : dict of (name (str), estimator) to benchmark n_train : nber of training instances (int) n_test : nber of testing instances (int) n_features : list of feature-space dimensionality to test (int) percentile : percentile at which to measure the speed (int [0-100]) Returns: -------- percentiles : dict(estimator_name, dict(n_features, percentile_perf_in_us)) """ percentiles = defaultdict(defaultdict) for n in n_features: print("benchmarking with %d features" % n) X_train, y_train, X_test, y_test = generate_dataset(n_train, n_test, n) for cls_name, estimator in estimators.items(): estimator.fit(X_train, y_train) gc.collect() runtimes = bulk_benchmark_estimator(estimator, X_test, 30, False) percentiles[cls_name][n] = 1e6 * scoreatpercentile(runtimes, percentile) return percentiles def plot_n_features_influence(percentiles, percentile): fig, ax1 = plt.subplots(figsize=(10, 6)) colors = ['r', 'g', 'b'] for i, cls_name in enumerate(percentiles.keys()): x = np.array(sorted([n for n in percentiles[cls_name].keys()])) y = np.array([percentiles[cls_name][n] for n in x]) plt.plot(x, y, color=colors[i], ) ax1.yaxis.grid(True, linestyle='-', which='major', color='lightgrey', alpha=0.5) ax1.set_axisbelow(True) ax1.set_title('Evolution of Prediction Time with #Features') ax1.set_xlabel('#Features') ax1.set_ylabel('Prediction Time at %d%%-ile (us)' % percentile) plt.show() def benchmark_throughputs(configuration, duration_secs=0.1): """benchmark throughput for different estimators.""" X_train, y_train, X_test, y_test = generate_dataset( configuration['n_train'], configuration['n_test'], configuration['n_features']) throughputs = dict() for estimator_config in configuration['estimators']: estimator_config['instance'].fit(X_train, y_train) start_time = time.time() n_predictions = 0 while (time.time() - start_time) < duration_secs: estimator_config['instance'].predict(X_test[0]) n_predictions += 1 throughputs[estimator_config['name']] = n_predictions / duration_secs return throughputs def plot_benchmark_throughput(throughputs, configuration): fig, ax = plt.subplots(figsize=(10, 6)) colors = ['r', 'g', 'b'] cls_infos = ['%s\n(%d %s)' % (estimator_conf['name'], estimator_conf['complexity_computer']( estimator_conf['instance']), estimator_conf['complexity_label']) for estimator_conf in configuration['estimators']] cls_values = [throughputs[estimator_conf['name']] for estimator_conf in configuration['estimators']] plt.bar(range(len(throughputs)), cls_values, width=0.5, color=colors) ax.set_xticks(np.linspace(0.25, len(throughputs) - 0.75, len(throughputs))) ax.set_xticklabels(cls_infos, fontsize=10) ymax = max(cls_values) * 1.2 ax.set_ylim((0, ymax)) ax.set_ylabel('Throughput (predictions/sec)') ax.set_title('Prediction Throughput for different estimators (%d ' 'features)' % configuration['n_features']) plt.show() ############################################################################### # main code start_time = time.time() # benchmark bulk/atomic prediction speed for various regressors configuration = { 'n_train': int(1e3), 'n_test': int(1e2), 'n_features': int(1e2), 'estimators': [ {'name': 'Linear Model', 'instance': SGDRegressor(penalty='elasticnet', alpha=0.01, l1_ratio=0.25, fit_intercept=True), 'complexity_label': 'non-zero coefficients', 'complexity_computer': lambda clf: np.count_nonzero(clf.coef_)}, {'name': 'RandomForest', 'instance': RandomForestRegressor(), 'complexity_label': 'estimators', 'complexity_computer': lambda clf: clf.n_estimators}, {'name': 'SVR', 'instance': SVR(kernel='rbf'), 'complexity_label': 'support vectors', 'complexity_computer': lambda clf: len(clf.support_vectors_)}, ] } benchmark(configuration) # benchmark n_features influence on prediction speed percentile = 90 percentiles = n_feature_influence({'ridge': Ridge()}, configuration['n_train'], configuration['n_test'], [100, 250, 500], percentile) plot_n_features_influence(percentiles, percentile) # benchmark throughput throughputs = benchmark_throughputs(configuration) plot_benchmark_throughput(throughputs, configuration) stop_time = time.time() print("example run in %.2fs" % (stop_time - start_time))
bsd-3-clause
brian-team/brian2cuda
examples/compartmental/bipolar_cell_cuda.py
1
2019
''' A pseudo MSO neuron, with two dendrites and one axon (fake geometry). ''' import os import matplotlib matplotlib.use('Agg') from brian2 import * import brian2cuda # cuda_standalone device name = os.path.basename(__file__).replace('.py', '') codefolder = os.path.join('code', name) print('runing example {}'.format(name)) print('compiling model in {}'.format(codefolder)) set_device('cuda_standalone', build_on_run=False) # multiple runs require this change (see below) # Morphology morpho = Soma(30*um) morpho.axon = Cylinder(diameter=1*um, length=300*um, n=100) morpho.L = Cylinder(diameter=1*um, length=100*um, n=50) morpho.R = Cylinder(diameter=1*um, length=150*um, n=50) # Passive channels gL = 1e-4*siemens/cm**2 EL = -70*mV eqs=''' Im = gL * (EL - v) : amp/meter**2 I : amp (point current) ''' neuron = SpatialNeuron(morphology=morpho, model=eqs, Cm=1*uF/cm**2, Ri=100*ohm*cm, method='exponential_euler') neuron.v = EL neuron.I = 0*amp # Monitors mon_soma = StateMonitor(neuron, 'v', record=[0]) mon_L = StateMonitor(neuron.L, 'v', record=True) mon_R = StateMonitor(neuron, 'v', record=morpho.R[75*um]) run(1*ms) neuron.I[morpho.L[50*um]] = 0.2*nA # injecting in the left dendrite run(5*ms) neuron.I = 0*amp run(50*ms, report='text', profile=True) print(profiling_summary()) # cf. https://brian2.readthedocs.io/en/stable/user/computation.html#multiple-run-calls device.build( directory=codefolder, compile = True, run = True, debug=False) subplot(211) plot(mon_L.t/ms, mon_soma[0].v/mV, 'k') plot(mon_L.t/ms, mon_L[morpho.L[50*um]].v/mV, 'r') plot(mon_L.t/ms, mon_R[morpho.R[75*um]].v/mV, 'b') ylabel('v (mV)') subplot(212) for x in linspace(0*um, 100*um, 10, endpoint=False): plot(mon_L.t/ms, mon_L[morpho.L[x]].v/mV) xlabel('Time (ms)') ylabel('v (mV)') #show() plotpath = os.path.join('plots', '{}.png'.format(name)) savefig(plotpath) print('plot saved in {}'.format(plotpath)) print('the generated model in {} needs to removed manually if wanted'.format(codefolder))
gpl-2.0
tomlof/scikit-learn
examples/cross_decomposition/plot_compare_cross_decomposition.py
19
4761
""" =================================== Compare cross decomposition methods =================================== Simple usage of various cross decomposition algorithms: - PLSCanonical - PLSRegression, with multivariate response, a.k.a. PLS2 - PLSRegression, with univariate response, a.k.a. PLS1 - CCA Given 2 multivariate covarying two-dimensional datasets, X, and Y, PLS extracts the 'directions of covariance', i.e. the components of each datasets that explain the most shared variance between both datasets. This is apparent on the **scatterplot matrix** display: components 1 in dataset X and dataset Y are maximally correlated (points lie around the first diagonal). This is also true for components 2 in both dataset, however, the correlation across datasets for different components is weak: the point cloud is very spherical. """ print(__doc__) import numpy as np import matplotlib.pyplot as plt from sklearn.cross_decomposition import PLSCanonical, PLSRegression, CCA ############################################################################### # Dataset based latent variables model n = 500 # 2 latents vars: l1 = np.random.normal(size=n) l2 = np.random.normal(size=n) latents = np.array([l1, l1, l2, l2]).T X = latents + np.random.normal(size=4 * n).reshape((n, 4)) Y = latents + np.random.normal(size=4 * n).reshape((n, 4)) X_train = X[:n // 2] Y_train = Y[:n // 2] X_test = X[n // 2:] Y_test = Y[n // 2:] print("Corr(X)") print(np.round(np.corrcoef(X.T), 2)) print("Corr(Y)") print(np.round(np.corrcoef(Y.T), 2)) ############################################################################### # Canonical (symmetric) PLS # Transform data # ~~~~~~~~~~~~~~ plsca = PLSCanonical(n_components=2) plsca.fit(X_train, Y_train) X_train_r, Y_train_r = plsca.transform(X_train, Y_train) X_test_r, Y_test_r = plsca.transform(X_test, Y_test) # Scatter plot of scores # ~~~~~~~~~~~~~~~~~~~~~~ # 1) On diagonal plot X vs Y scores on each components plt.figure(figsize=(12, 8)) plt.subplot(221) plt.plot(X_train_r[:, 0], Y_train_r[:, 0], "ob", label="train") plt.plot(X_test_r[:, 0], Y_test_r[:, 0], "or", label="test") plt.xlabel("x scores") plt.ylabel("y scores") plt.title('Comp. 1: X vs Y (test corr = %.2f)' % np.corrcoef(X_test_r[:, 0], Y_test_r[:, 0])[0, 1]) plt.xticks(()) plt.yticks(()) plt.legend(loc="best") plt.subplot(224) plt.plot(X_train_r[:, 1], Y_train_r[:, 1], "ob", label="train") plt.plot(X_test_r[:, 1], Y_test_r[:, 1], "or", label="test") plt.xlabel("x scores") plt.ylabel("y scores") plt.title('Comp. 2: X vs Y (test corr = %.2f)' % np.corrcoef(X_test_r[:, 1], Y_test_r[:, 1])[0, 1]) plt.xticks(()) plt.yticks(()) plt.legend(loc="best") # 2) Off diagonal plot components 1 vs 2 for X and Y plt.subplot(222) plt.plot(X_train_r[:, 0], X_train_r[:, 1], "*b", label="train") plt.plot(X_test_r[:, 0], X_test_r[:, 1], "*r", label="test") plt.xlabel("X comp. 1") plt.ylabel("X comp. 2") plt.title('X comp. 1 vs X comp. 2 (test corr = %.2f)' % np.corrcoef(X_test_r[:, 0], X_test_r[:, 1])[0, 1]) plt.legend(loc="best") plt.xticks(()) plt.yticks(()) plt.subplot(223) plt.plot(Y_train_r[:, 0], Y_train_r[:, 1], "*b", label="train") plt.plot(Y_test_r[:, 0], Y_test_r[:, 1], "*r", label="test") plt.xlabel("Y comp. 1") plt.ylabel("Y comp. 2") plt.title('Y comp. 1 vs Y comp. 2 , (test corr = %.2f)' % np.corrcoef(Y_test_r[:, 0], Y_test_r[:, 1])[0, 1]) plt.legend(loc="best") plt.xticks(()) plt.yticks(()) plt.show() ############################################################################### # PLS regression, with multivariate response, a.k.a. PLS2 n = 1000 q = 3 p = 10 X = np.random.normal(size=n * p).reshape((n, p)) B = np.array([[1, 2] + [0] * (p - 2)] * q).T # each Yj = 1*X1 + 2*X2 + noize Y = np.dot(X, B) + np.random.normal(size=n * q).reshape((n, q)) + 5 pls2 = PLSRegression(n_components=3) pls2.fit(X, Y) print("True B (such that: Y = XB + Err)") print(B) # compare pls2.coef_ with B print("Estimated B") print(np.round(pls2.coef_, 1)) pls2.predict(X) ############################################################################### # PLS regression, with univariate response, a.k.a. PLS1 n = 1000 p = 10 X = np.random.normal(size=n * p).reshape((n, p)) y = X[:, 0] + 2 * X[:, 1] + np.random.normal(size=n * 1) + 5 pls1 = PLSRegression(n_components=3) pls1.fit(X, y) # note that the number of components exceeds 1 (the dimension of y) print("Estimated betas") print(np.round(pls1.coef_, 1)) ############################################################################### # CCA (PLS mode B with symmetric deflation) cca = CCA(n_components=2) cca.fit(X_train, Y_train) X_train_r, Y_train_r = cca.transform(X_train, Y_train) X_test_r, Y_test_r = cca.transform(X_test, Y_test)
bsd-3-clause
suiyuan2009/tensorflow
tensorflow/contrib/learn/python/learn/learn_io/data_feeder_test.py
71
12923
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for `DataFeeder`.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import six from six.moves import xrange # pylint: disable=redefined-builtin # pylint: disable=wildcard-import from tensorflow.contrib.learn.python.learn.learn_io import * from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.platform import test # pylint: enable=wildcard-import class DataFeederTest(test.TestCase): # pylint: disable=undefined-variable """Tests for `DataFeeder`.""" def _wrap_dict(self, data, prepend=''): return {prepend + '1': data, prepend + '2': data} def _assert_raises(self, input_data): with self.assertRaisesRegexp(TypeError, 'annot convert'): data_feeder.DataFeeder(input_data, None, n_classes=0, batch_size=1) def test_input_uint32(self): data = np.matrix([[1, 2], [3, 4]], dtype=np.uint32) self._assert_raises(data) self._assert_raises(self._wrap_dict(data)) def test_input_uint64(self): data = np.matrix([[1, 2], [3, 4]], dtype=np.uint64) self._assert_raises(data) self._assert_raises(self._wrap_dict(data)) def _assert_dtype(self, expected_np_dtype, expected_tf_dtype, input_data): feeder = data_feeder.DataFeeder(input_data, None, n_classes=0, batch_size=1) if isinstance(input_data, dict): for k, v in list(feeder.input_dtype.items()): self.assertEqual(expected_np_dtype, v) else: self.assertEqual(expected_np_dtype, feeder.input_dtype) with ops.Graph().as_default() as g, self.test_session(g): inp, _ = feeder.input_builder() if isinstance(inp, dict): for k, v in list(inp.items()): self.assertEqual(expected_tf_dtype, v.dtype) else: self.assertEqual(expected_tf_dtype, inp.dtype) def test_input_int8(self): data = np.matrix([[1, 2], [3, 4]], dtype=np.int8) self._assert_dtype(np.int8, dtypes.int8, data) self._assert_dtype(np.int8, dtypes.int8, self._wrap_dict(data)) def test_input_int16(self): data = np.matrix([[1, 2], [3, 4]], dtype=np.int16) self._assert_dtype(np.int16, dtypes.int16, data) self._assert_dtype(np.int16, dtypes.int16, self._wrap_dict(data)) def test_input_int32(self): data = np.matrix([[1, 2], [3, 4]], dtype=np.int32) self._assert_dtype(np.int32, dtypes.int32, data) self._assert_dtype(np.int32, dtypes.int32, self._wrap_dict(data)) def test_input_int64(self): data = np.matrix([[1, 2], [3, 4]], dtype=np.int64) self._assert_dtype(np.int64, dtypes.int64, data) self._assert_dtype(np.int64, dtypes.int64, self._wrap_dict(data)) def test_input_uint8(self): data = np.matrix([[1, 2], [3, 4]], dtype=np.uint8) self._assert_dtype(np.uint8, dtypes.uint8, data) self._assert_dtype(np.uint8, dtypes.uint8, self._wrap_dict(data)) def test_input_uint16(self): data = np.matrix([[1, 2], [3, 4]], dtype=np.uint16) self._assert_dtype(np.uint16, dtypes.uint16, data) self._assert_dtype(np.uint16, dtypes.uint16, self._wrap_dict(data)) def test_input_float16(self): data = np.matrix([[1, 2], [3, 4]], dtype=np.float16) self._assert_dtype(np.float16, dtypes.float16, data) self._assert_dtype(np.float16, dtypes.float16, self._wrap_dict(data)) def test_input_float32(self): data = np.matrix([[1, 2], [3, 4]], dtype=np.float32) self._assert_dtype(np.float32, dtypes.float32, data) self._assert_dtype(np.float32, dtypes.float32, self._wrap_dict(data)) def test_input_float64(self): data = np.matrix([[1, 2], [3, 4]], dtype=np.float64) self._assert_dtype(np.float64, dtypes.float64, data) self._assert_dtype(np.float64, dtypes.float64, self._wrap_dict(data)) def test_input_bool(self): data = np.array([[False for _ in xrange(2)] for _ in xrange(2)]) self._assert_dtype(np.bool, dtypes.bool, data) self._assert_dtype(np.bool, dtypes.bool, self._wrap_dict(data)) def test_input_string(self): input_data = np.array([['str%d' % i for i in xrange(2)] for _ in xrange(2)]) self._assert_dtype(input_data.dtype, dtypes.string, input_data) self._assert_dtype(input_data.dtype, dtypes.string, self._wrap_dict(input_data)) def _assertAllClose(self, src, dest, src_key_of=None, src_prop=None): def func(x): val = getattr(x, src_prop) if src_prop else x return val if src_key_of is None else src_key_of[val] if isinstance(src, dict): for k in list(src.keys()): self.assertAllClose(func(src[k]), dest) else: self.assertAllClose(func(src), dest) def test_unsupervised(self): def func(feeder): with self.test_session(): inp, _ = feeder.input_builder() feed_dict_fn = feeder.get_feed_dict_fn() feed_dict = feed_dict_fn() self._assertAllClose(inp, [[1, 2]], feed_dict, 'name') data = np.matrix([[1, 2], [2, 3], [3, 4]]) func(data_feeder.DataFeeder(data, None, n_classes=0, batch_size=1)) func( data_feeder.DataFeeder( self._wrap_dict(data), None, n_classes=0, batch_size=1)) def test_data_feeder_regression(self): def func(df): inp, out = df.input_builder() feed_dict_fn = df.get_feed_dict_fn() feed_dict = feed_dict_fn() self._assertAllClose(inp, [[3, 4], [1, 2]], feed_dict, 'name') self._assertAllClose(out, [2, 1], feed_dict, 'name') x = np.matrix([[1, 2], [3, 4]]) y = np.array([1, 2]) func(data_feeder.DataFeeder(x, y, n_classes=0, batch_size=3)) func( data_feeder.DataFeeder( self._wrap_dict(x, 'in'), self._wrap_dict(y, 'out'), n_classes=self._wrap_dict(0, 'out'), batch_size=3)) def test_epoch(self): def func(feeder): with self.test_session(): feeder.input_builder() epoch = feeder.make_epoch_variable() feed_dict_fn = feeder.get_feed_dict_fn() # First input feed_dict = feed_dict_fn() self.assertAllClose(feed_dict[epoch.name], [0]) # Second input feed_dict = feed_dict_fn() self.assertAllClose(feed_dict[epoch.name], [0]) # Third input feed_dict = feed_dict_fn() self.assertAllClose(feed_dict[epoch.name], [0]) # Back to the first input again, so new epoch. feed_dict = feed_dict_fn() self.assertAllClose(feed_dict[epoch.name], [1]) data = np.matrix([[1, 2], [2, 3], [3, 4]]) labels = np.array([0, 0, 1]) func(data_feeder.DataFeeder(data, labels, n_classes=0, batch_size=1)) func( data_feeder.DataFeeder( self._wrap_dict(data, 'in'), self._wrap_dict(labels, 'out'), n_classes=self._wrap_dict(0, 'out'), batch_size=1)) def test_data_feeder_multioutput_regression(self): def func(df): inp, out = df.input_builder() feed_dict_fn = df.get_feed_dict_fn() feed_dict = feed_dict_fn() self._assertAllClose(inp, [[3, 4], [1, 2]], feed_dict, 'name') self._assertAllClose(out, [[3, 4], [1, 2]], feed_dict, 'name') x = np.matrix([[1, 2], [3, 4]]) y = np.array([[1, 2], [3, 4]]) func(data_feeder.DataFeeder(x, y, n_classes=0, batch_size=2)) func( data_feeder.DataFeeder( self._wrap_dict(x, 'in'), self._wrap_dict(y, 'out'), n_classes=self._wrap_dict(0, 'out'), batch_size=2)) def test_data_feeder_multioutput_classification(self): def func(df): inp, out = df.input_builder() feed_dict_fn = df.get_feed_dict_fn() feed_dict = feed_dict_fn() self._assertAllClose(inp, [[3, 4], [1, 2]], feed_dict, 'name') self._assertAllClose( out, [[[0, 0, 1, 0, 0], [0, 0, 0, 1, 0], [0, 0, 0, 0, 1]], [[1, 0, 0, 0, 0], [0, 1, 0, 0, 0], [0, 0, 1, 0, 0]]], feed_dict, 'name') x = np.matrix([[1, 2], [3, 4]]) y = np.array([[0, 1, 2], [2, 3, 4]]) func(data_feeder.DataFeeder(x, y, n_classes=5, batch_size=2)) func( data_feeder.DataFeeder( self._wrap_dict(x, 'in'), self._wrap_dict(y, 'out'), n_classes=self._wrap_dict(5, 'out'), batch_size=2)) def test_streaming_data_feeder(self): def func(df): inp, out = df.input_builder() feed_dict_fn = df.get_feed_dict_fn() feed_dict = feed_dict_fn() self._assertAllClose(inp, [[[1, 2]], [[3, 4]]], feed_dict, 'name') self._assertAllClose(out, [[[1], [2]], [[2], [2]]], feed_dict, 'name') def x_iter(wrap_dict=False): yield np.array([[1, 2]]) if not wrap_dict else self._wrap_dict( np.array([[1, 2]]), 'in') yield np.array([[3, 4]]) if not wrap_dict else self._wrap_dict( np.array([[3, 4]]), 'in') def y_iter(wrap_dict=False): yield np.array([[1], [2]]) if not wrap_dict else self._wrap_dict( np.array([[1], [2]]), 'out') yield np.array([[2], [2]]) if not wrap_dict else self._wrap_dict( np.array([[2], [2]]), 'out') func( data_feeder.StreamingDataFeeder( x_iter(), y_iter(), n_classes=0, batch_size=2)) func( data_feeder.StreamingDataFeeder( x_iter(True), y_iter(True), n_classes=self._wrap_dict(0, 'out'), batch_size=2)) # Test non-full batches. func( data_feeder.StreamingDataFeeder( x_iter(), y_iter(), n_classes=0, batch_size=10)) func( data_feeder.StreamingDataFeeder( x_iter(True), y_iter(True), n_classes=self._wrap_dict(0, 'out'), batch_size=10)) def test_dask_data_feeder(self): if HAS_PANDAS and HAS_DASK: x = pd.DataFrame( dict( a=np.array([.1, .3, .4, .6, .2, .1, .6]), b=np.array([.7, .8, .1, .2, .5, .3, .9]))) x = dd.from_pandas(x, npartitions=2) y = pd.DataFrame(dict(labels=np.array([1, 0, 2, 1, 0, 1, 2]))) y = dd.from_pandas(y, npartitions=2) # TODO(ipolosukhin): Remove or restore this. # x = extract_dask_data(x) # y = extract_dask_labels(y) df = data_feeder.DaskDataFeeder(x, y, n_classes=2, batch_size=2) inp, out = df.input_builder() feed_dict_fn = df.get_feed_dict_fn() feed_dict = feed_dict_fn() self.assertAllClose(feed_dict[inp.name], [[0.40000001, 0.1], [0.60000002, 0.2]]) self.assertAllClose(feed_dict[out.name], [[0., 0., 1.], [0., 1., 0.]]) def test_hdf5_data_feeder(self): def func(df): inp, out = df.input_builder() feed_dict_fn = df.get_feed_dict_fn() feed_dict = feed_dict_fn() self._assertAllClose(inp, [[3, 4], [1, 2]], feed_dict, 'name') self.assertAllClose(out, [2, 1], feed_dict, 'name') try: import h5py # pylint: disable=g-import-not-at-top x = np.matrix([[1, 2], [3, 4]]) y = np.array([1, 2]) h5f = h5py.File('test_hdf5.h5', 'w') h5f.create_dataset('x', data=x) h5f.create_dataset('y', data=y) h5f.close() h5f = h5py.File('test_hdf5.h5', 'r') x = h5f['x'] y = h5f['y'] func(data_feeder.DataFeeder(x, y, n_classes=0, batch_size=3)) func( data_feeder.DataFeeder( self._wrap_dict(x, 'in'), self._wrap_dict(y, 'out'), n_classes=self._wrap_dict(0, 'out'), batch_size=3)) except ImportError: print("Skipped test for hdf5 since it's not installed.") class SetupPredictDataFeederTest(DataFeederTest): """Tests for `DataFeeder.setup_predict_data_feeder`.""" def test_iterable_data(self): # pylint: disable=undefined-variable def func(df): self._assertAllClose(six.next(df), [[1, 2], [3, 4]]) self._assertAllClose(six.next(df), [[5, 6]]) data = [[1, 2], [3, 4], [5, 6]] x = iter(data) x_dict = iter([self._wrap_dict(v) for v in iter(data)]) func(data_feeder.setup_predict_data_feeder(x, batch_size=2)) func(data_feeder.setup_predict_data_feeder(x_dict, batch_size=2)) if __name__ == '__main__': test.main()
apache-2.0
arahuja/scikit-learn
examples/ensemble/plot_ensemble_oob.py
259
3265
""" ============================= OOB Errors for Random Forests ============================= The ``RandomForestClassifier`` is trained using *bootstrap aggregation*, where each new tree is fit from a bootstrap sample of the training observations :math:`z_i = (x_i, y_i)`. The *out-of-bag* (OOB) error is the average error for each :math:`z_i` calculated using predictions from the trees that do not contain :math:`z_i` in their respective bootstrap sample. This allows the ``RandomForestClassifier`` to be fit and validated whilst being trained [1]. The example below demonstrates how the OOB error can be measured at the addition of each new tree during training. The resulting plot allows a practitioner to approximate a suitable value of ``n_estimators`` at which the error stabilizes. .. [1] T. Hastie, R. Tibshirani and J. Friedman, "Elements of Statistical Learning Ed. 2", p592-593, Springer, 2009. """ import matplotlib.pyplot as plt from collections import OrderedDict from sklearn.datasets import make_classification from sklearn.ensemble import RandomForestClassifier, ExtraTreesClassifier # Author: Kian Ho <[email protected]> # Gilles Louppe <[email protected]> # Andreas Mueller <[email protected]> # # License: BSD 3 Clause print(__doc__) RANDOM_STATE = 123 # Generate a binary classification dataset. X, y = make_classification(n_samples=500, n_features=25, n_clusters_per_class=1, n_informative=15, random_state=RANDOM_STATE) # NOTE: Setting the `warm_start` construction parameter to `True` disables # support for paralellised ensembles but is necessary for tracking the OOB # error trajectory during training. ensemble_clfs = [ ("RandomForestClassifier, max_features='sqrt'", RandomForestClassifier(warm_start=True, oob_score=True, max_features="sqrt", random_state=RANDOM_STATE)), ("RandomForestClassifier, max_features='log2'", RandomForestClassifier(warm_start=True, max_features='log2', oob_score=True, random_state=RANDOM_STATE)), ("RandomForestClassifier, max_features=None", RandomForestClassifier(warm_start=True, max_features=None, oob_score=True, random_state=RANDOM_STATE)) ] # Map a classifier name to a list of (<n_estimators>, <error rate>) pairs. error_rate = OrderedDict((label, []) for label, _ in ensemble_clfs) # Range of `n_estimators` values to explore. min_estimators = 15 max_estimators = 175 for label, clf in ensemble_clfs: for i in range(min_estimators, max_estimators + 1): clf.set_params(n_estimators=i) clf.fit(X, y) # Record the OOB error for each `n_estimators=i` setting. oob_error = 1 - clf.oob_score_ error_rate[label].append((i, oob_error)) # Generate the "OOB error rate" vs. "n_estimators" plot. for label, clf_err in error_rate.items(): xs, ys = zip(*clf_err) plt.plot(xs, ys, label=label) plt.xlim(min_estimators, max_estimators) plt.xlabel("n_estimators") plt.ylabel("OOB error rate") plt.legend(loc="upper right") plt.show()
bsd-3-clause
Lawrence-Liu/scikit-learn
sklearn/qda.py
140
7682
""" Quadratic Discriminant Analysis """ # Author: Matthieu Perrot <[email protected]> # # License: BSD 3 clause import warnings import numpy as np from .base import BaseEstimator, ClassifierMixin from .externals.six.moves import xrange from .utils import check_array, check_X_y from .utils.validation import check_is_fitted from .utils.fixes import bincount __all__ = ['QDA'] class QDA(BaseEstimator, ClassifierMixin): """ Quadratic Discriminant Analysis (QDA) A classifier with a quadratic decision boundary, generated by fitting class conditional densities to the data and using Bayes' rule. The model fits a Gaussian density to each class. Read more in the :ref:`User Guide <lda_qda>`. Parameters ---------- priors : array, optional, shape = [n_classes] Priors on classes reg_param : float, optional Regularizes the covariance estimate as ``(1-reg_param)*Sigma + reg_param*np.eye(n_features)`` Attributes ---------- covariances_ : list of array-like, shape = [n_features, n_features] Covariance matrices of each class. means_ : array-like, shape = [n_classes, n_features] Class means. priors_ : array-like, shape = [n_classes] Class priors (sum to 1). rotations_ : list of arrays For each class k an array of shape [n_features, n_k], with ``n_k = min(n_features, number of elements in class k)`` It is the rotation of the Gaussian distribution, i.e. its principal axis. scalings_ : list of arrays For each class k an array of shape [n_k]. It contains the scaling of the Gaussian distributions along its principal axes, i.e. the variance in the rotated coordinate system. Examples -------- >>> from sklearn.qda import QDA >>> import numpy as np >>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]]) >>> y = np.array([1, 1, 1, 2, 2, 2]) >>> clf = QDA() >>> clf.fit(X, y) QDA(priors=None, reg_param=0.0) >>> print(clf.predict([[-0.8, -1]])) [1] See also -------- sklearn.lda.LDA: Linear discriminant analysis """ def __init__(self, priors=None, reg_param=0.): self.priors = np.asarray(priors) if priors is not None else None self.reg_param = reg_param def fit(self, X, y, store_covariances=False, tol=1.0e-4): """ Fit the QDA model according to the given training data and parameters. Parameters ---------- X : array-like, shape = [n_samples, n_features] Training vector, where n_samples in the number of samples and n_features is the number of features. y : array, shape = [n_samples] Target values (integers) store_covariances : boolean If True the covariance matrices are computed and stored in the `self.covariances_` attribute. tol : float, optional, default 1.0e-4 Threshold used for rank estimation. """ X, y = check_X_y(X, y) self.classes_, y = np.unique(y, return_inverse=True) n_samples, n_features = X.shape n_classes = len(self.classes_) if n_classes < 2: raise ValueError('y has less than 2 classes') if self.priors is None: self.priors_ = bincount(y) / float(n_samples) else: self.priors_ = self.priors cov = None if store_covariances: cov = [] means = [] scalings = [] rotations = [] for ind in xrange(n_classes): Xg = X[y == ind, :] meang = Xg.mean(0) means.append(meang) if len(Xg) == 1: raise ValueError('y has only 1 sample in class %s, covariance ' 'is ill defined.' % str(self.classes_[ind])) Xgc = Xg - meang # Xgc = U * S * V.T U, S, Vt = np.linalg.svd(Xgc, full_matrices=False) rank = np.sum(S > tol) if rank < n_features: warnings.warn("Variables are collinear") S2 = (S ** 2) / (len(Xg) - 1) S2 = ((1 - self.reg_param) * S2) + self.reg_param if store_covariances: # cov = V * (S^2 / (n-1)) * V.T cov.append(np.dot(S2 * Vt.T, Vt)) scalings.append(S2) rotations.append(Vt.T) if store_covariances: self.covariances_ = cov self.means_ = np.asarray(means) self.scalings_ = scalings self.rotations_ = rotations return self def _decision_function(self, X): check_is_fitted(self, 'classes_') X = check_array(X) norm2 = [] for i in range(len(self.classes_)): R = self.rotations_[i] S = self.scalings_[i] Xm = X - self.means_[i] X2 = np.dot(Xm, R * (S ** (-0.5))) norm2.append(np.sum(X2 ** 2, 1)) norm2 = np.array(norm2).T # shape = [len(X), n_classes] u = np.asarray([np.sum(np.log(s)) for s in self.scalings_]) return (-0.5 * (norm2 + u) + np.log(self.priors_)) def decision_function(self, X): """Apply decision function to an array of samples. Parameters ---------- X : array-like, shape = [n_samples, n_features] Array of samples (test vectors). Returns ------- C : array, shape = [n_samples, n_classes] or [n_samples,] Decision function values related to each class, per sample. In the two-class case, the shape is [n_samples,], giving the log likelihood ratio of the positive class. """ dec_func = self._decision_function(X) # handle special case of two classes if len(self.classes_) == 2: return dec_func[:, 1] - dec_func[:, 0] return dec_func def predict(self, X): """Perform classification on an array of test vectors X. The predicted class C for each sample in X is returned. Parameters ---------- X : array-like, shape = [n_samples, n_features] Returns ------- C : array, shape = [n_samples] """ d = self._decision_function(X) y_pred = self.classes_.take(d.argmax(1)) return y_pred def predict_proba(self, X): """Return posterior probabilities of classification. Parameters ---------- X : array-like, shape = [n_samples, n_features] Array of samples/test vectors. Returns ------- C : array, shape = [n_samples, n_classes] Posterior probabilities of classification per class. """ values = self._decision_function(X) # compute the likelihood of the underlying gaussian models # up to a multiplicative constant. likelihood = np.exp(values - values.max(axis=1)[:, np.newaxis]) # compute posterior probabilities return likelihood / likelihood.sum(axis=1)[:, np.newaxis] def predict_log_proba(self, X): """Return posterior probabilities of classification. Parameters ---------- X : array-like, shape = [n_samples, n_features] Array of samples/test vectors. Returns ------- C : array, shape = [n_samples, n_classes] Posterior log-probabilities of classification per class. """ # XXX : can do better to avoid precision overflows probas_ = self.predict_proba(X) return np.log(probas_)
bsd-3-clause
rlung/random
angler.py
1
2727
#!/usr/bin/env python import matplotlib.pyplot as plt import numpy as np from argparse import ArgumentParser, RawTextHelpFormatter import pdb def R(angle): '''Rotation matrix''' c, s = np.cos(angle), np.sin(angle) rot = np.array([[c, -s], [s, c]]) return rot def angler(angle, x, y, degrees=False, plot=False): '''Calculate new stereotactic coordinates for angled axis''' # Convert to radians if necessary if degrees: angle = np.radians(angle) # New coordinates x_angled, y_angled = R(angle).dot([[x], [y]]) x_angled, y_angled = float(x_angled), float(y_angled) # y_angled = np.cos(angle) * (y - x * np.tan(angle)) # x_angled = x / np.cos(angle) + np.tan(angle) * y_angled if plot: fig, ax = plt.subplots(); # Target ax.scatter([x], [y], marker='x', s=125, lw=5, c='k', label='target') ax.annotate( 'old: ({:.3f}, {:.3f})\n' 'new: ({:.3f}, {:.3f})'.format(x, y, x_angled, y_angled), xy=(x, y), xytext=(x - 1, y + 1), ha='right', arrowprops=dict(facecolor='black', width=2, headwidth=10, shrink=0.1), ) # Non-angled path opts_old = {'ls': '--', 'lw': 2, 'c': 'k'} ax.plot([0, x, x], [0, 0, y], label='Normal path', **opts_old) # Angled path opts_angled = {'ls': '--', 'lw': 2} i, j = R(-angle).dot([[x_angled], [0]]) ax.plot([0, i, x], [0, j, y], label='Angled path', **opts_angled) # ax.plot( # [0, x_angled * np.cos(angle), x], # [0, x_angled * -np.sin(angle), -y], # label='Angled path', **opts_angled # ) # Finalize figure and show ax.axis('equal') ax.legend() ax.spines['left'].set_position('zero') ax.spines['bottom'].set_position('zero') ax.spines['top'].set_visible(False) ax.spines['right'].set_visible(False) plt.show() return x_angled, y_angled def main(): parser = ArgumentParser( description="Convert depth camera data from HDF5 to binary", formatter_class=RawTextHelpFormatter ) parser.add_argument('angle', help='Angle of entry') parser.add_argument('x', help='x coordinate of target') parser.add_argument('y', help='y coordinate of target') parser.add_argument('-d', '--degrees', default=False, action='store_true') opts = parser.parse_args() angle = float(opts.angle) x = float(opts.x) y = float(opts.y) x_angled, y_angled = angler(angle, x, y, opts.degrees, plot=True) print('New coordinates angled at {}:\ny: {}\nx: {}' .format(angle, y_angled, x_angled)) if __name__ == '__main__': main()
gpl-3.0
NicovincX2/Python-3.5
Algorithmique/Optimisation/Algorithme d'optimisation/Métaheuristique/Stratégie d'évolution/Covariance Matrix Adaptation Evolution Strategy/cma_mo.py
1
4235
# -*- coding: utf-8 -*- import os # This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see <http://www.gnu.org/licenses/>. import numpy from deap import algorithms from deap import base from deap import benchmarks from deap.benchmarks.tools import hypervolume from deap import cma from deap import creator from deap import tools # Problem size N = 5 # ZDT1, ZDT2, DTLZ2 MIN_BOUND = numpy.zeros(N) MAX_BOUND = numpy.ones(N) # Kursawe # MIN_BOUND = numpy.zeros(N) - 5 # MAX_BOUND = numpy.zeros(N) + 5 creator.create("FitnessMin", base.Fitness, weights=(-1.0, -1.0)) creator.create("Individual", list, fitness=creator.FitnessMin) def distance(feasible_ind, original_ind): """A distance function to the feasibility region.""" return sum((f - o)**2 for f, o in zip(feasible_ind, original_ind)) def closest_feasible(individual): """A function returning a valid individual from an invalid one.""" feasible_ind = numpy.array(individual) feasible_ind = numpy.maximum(MIN_BOUND, feasible_ind) feasible_ind = numpy.minimum(MAX_BOUND, feasible_ind) return feasible_ind def valid(individual): """Determines if the individual is valid or not.""" if any(individual < MIN_BOUND) or any(individual > MAX_BOUND): return False return True toolbox = base.Toolbox() toolbox.register("evaluate", benchmarks.zdt1) toolbox.decorate("evaluate", tools.ClosestValidPenality( valid, closest_feasible, 1.0e-6, distance)) def main(): # The cma module uses the numpy random number generator # numpy.random.seed(128) MU, LAMBDA = 10, 10 NGEN = 500 verbose = True # The MO-CMA-ES algorithm takes a full population as argument population = [creator.Individual(x) for x in ( numpy.random.uniform(0, 1, (MU, N)))] for ind in population: ind.fitness.values = toolbox.evaluate(ind) strategy = cma.StrategyMultiObjective( population, sigma=1.0, mu=MU, lambda_=LAMBDA) toolbox.register("generate", strategy.generate, creator.Individual) toolbox.register("update", strategy.update) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("min", numpy.min, axis=0) stats.register("max", numpy.max, axis=0) logbook = tools.Logbook() logbook.header = ["gen", "nevals"] + (stats.fields if stats else []) for gen in range(NGEN): # Generate a new population population = toolbox.generate() # Evaluate the individuals fitnesses = toolbox.map(toolbox.evaluate, population) for ind, fit in zip(population, fitnesses): ind.fitness.values = fit # Update the strategy with the evaluated individuals toolbox.update(population) record = stats.compile(population) if stats is not None else {} logbook.record(gen=gen, nevals=len(population), **record) if verbose: print(logbook.stream) if verbose: print("Final population hypervolume is %f" % hypervolume(strategy.parents, [11.0, 11.0])) # import matplotlib.pyplot as plt # valid_front = numpy.array([ind.fitness.values for ind in strategy.parents if valid(ind)]) # invalid_front = numpy.array([ind.fitness.values for ind in strategy.parents if not valid(ind)]) # fig = plt.figure() # if len(valid_front) > 0: # plt.scatter(valid_front[:,0], valid_front[:,1], c="g") # if len(invalid_front) > 0: # plt.scatter(invalid_front[:,0], invalid_front[:,1], c="r") # plt.show() return strategy.parents if __name__ == "__main__": solutions = main() os.system("pause")
gpl-3.0
zrhans/pythonanywhere
.virtualenvs/django19/lib/python3.4/site-packages/pandas/tseries/tests/test_util.py
10
3569
from pandas.compat import range import nose import numpy as np from numpy.testing.decorators import slow from pandas import Series, date_range import pandas.util.testing as tm from datetime import datetime, date from pandas.tseries.tools import normalize_date from pandas.tseries.util import pivot_annual, isleapyear class TestPivotAnnual(tm.TestCase): """ New pandas of scikits.timeseries pivot_annual """ def test_daily(self): rng = date_range('1/1/2000', '12/31/2004', freq='D') ts = Series(np.random.randn(len(rng)), index=rng) annual = pivot_annual(ts, 'D') doy = ts.index.dayofyear doy[(~isleapyear(ts.index.year)) & (doy >= 60)] += 1 for i in range(1, 367): subset = ts[doy == i] subset.index = [x.year for x in subset.index] result = annual[i].dropna() tm.assert_series_equal(result, subset, check_names=False) self.assertEqual(result.name, i) # check leap days leaps = ts[(ts.index.month == 2) & (ts.index.day == 29)] day = leaps.index.dayofyear[0] leaps.index = leaps.index.year leaps.name = 60 tm.assert_series_equal(annual[day].dropna(), leaps) def test_hourly(self): rng_hourly = date_range( '1/1/1994', periods=(18 * 8760 + 4 * 24), freq='H') data_hourly = np.random.randint(100, 350, rng_hourly.size) ts_hourly = Series(data_hourly, index=rng_hourly) grouped = ts_hourly.groupby(ts_hourly.index.year) hoy = grouped.apply(lambda x: x.reset_index(drop=True)) hoy = hoy.index.droplevel(0).values hoy[~isleapyear(ts_hourly.index.year) & (hoy >= 1416)] += 24 hoy += 1 annual = pivot_annual(ts_hourly) ts_hourly = ts_hourly.astype(float) for i in [1, 1416, 1417, 1418, 1439, 1440, 1441, 8784]: subset = ts_hourly[hoy == i] subset.index = [x.year for x in subset.index] result = annual[i].dropna() tm.assert_series_equal(result, subset, check_names=False) self.assertEqual(result.name, i) leaps = ts_hourly[(ts_hourly.index.month == 2) & (ts_hourly.index.day == 29) & (ts_hourly.index.hour == 0)] hour = leaps.index.dayofyear[0] * 24 - 23 leaps.index = leaps.index.year leaps.name = 1417 tm.assert_series_equal(annual[hour].dropna(), leaps) def test_weekly(self): pass def test_monthly(self): rng = date_range('1/1/2000', '12/31/2004', freq='M') ts = Series(np.random.randn(len(rng)), index=rng) annual = pivot_annual(ts, 'M') month = ts.index.month for i in range(1, 13): subset = ts[month == i] subset.index = [x.year for x in subset.index] result = annual[i].dropna() tm.assert_series_equal(result, subset, check_names=False) self.assertEqual(result.name, i) def test_period_monthly(self): pass def test_period_daily(self): pass def test_period_weekly(self): pass def test_normalize_date(): value = date(2012, 9, 7) result = normalize_date(value) assert(result == datetime(2012, 9, 7)) value = datetime(2012, 9, 7, 12) result = normalize_date(value) assert(result == datetime(2012, 9, 7)) if __name__ == '__main__': nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'], exit=False)
apache-2.0
nmartensen/pandas
pandas/tests/indexes/timedeltas/test_tools.py
6
7568
import pytest from datetime import time, timedelta import numpy as np import pandas as pd import pandas.util.testing as tm from pandas.util.testing import assert_series_equal from pandas import (Series, Timedelta, to_timedelta, isna, TimedeltaIndex) from pandas._libs.tslib import iNaT class TestTimedeltas(object): _multiprocess_can_split_ = True def test_to_timedelta(self): def conv(v): return v.astype('m8[ns]') d1 = np.timedelta64(1, 'D') assert (to_timedelta('1 days 06:05:01.00003', box=False) == conv(d1 + np.timedelta64(6 * 3600 + 5 * 60 + 1, 's') + np.timedelta64(30, 'us'))) assert (to_timedelta('15.5us', box=False) == conv(np.timedelta64(15500, 'ns'))) # empty string result = to_timedelta('', box=False) assert result.astype('int64') == iNaT result = to_timedelta(['', '']) assert isna(result).all() # pass thru result = to_timedelta(np.array([np.timedelta64(1, 's')])) expected = pd.Index(np.array([np.timedelta64(1, 's')])) tm.assert_index_equal(result, expected) # ints result = np.timedelta64(0, 'ns') expected = to_timedelta(0, box=False) assert result == expected # Series expected = Series([timedelta(days=1), timedelta(days=1, seconds=1)]) result = to_timedelta(Series(['1d', '1days 00:00:01'])) tm.assert_series_equal(result, expected) # with units result = TimedeltaIndex([np.timedelta64(0, 'ns'), np.timedelta64( 10, 's').astype('m8[ns]')]) expected = to_timedelta([0, 10], unit='s') tm.assert_index_equal(result, expected) # single element conversion v = timedelta(seconds=1) result = to_timedelta(v, box=False) expected = np.timedelta64(timedelta(seconds=1)) assert result == expected v = np.timedelta64(timedelta(seconds=1)) result = to_timedelta(v, box=False) expected = np.timedelta64(timedelta(seconds=1)) assert result == expected # arrays of various dtypes arr = np.array([1] * 5, dtype='int64') result = to_timedelta(arr, unit='s') expected = TimedeltaIndex([np.timedelta64(1, 's')] * 5) tm.assert_index_equal(result, expected) arr = np.array([1] * 5, dtype='int64') result = to_timedelta(arr, unit='m') expected = TimedeltaIndex([np.timedelta64(1, 'm')] * 5) tm.assert_index_equal(result, expected) arr = np.array([1] * 5, dtype='int64') result = to_timedelta(arr, unit='h') expected = TimedeltaIndex([np.timedelta64(1, 'h')] * 5) tm.assert_index_equal(result, expected) arr = np.array([1] * 5, dtype='timedelta64[s]') result = to_timedelta(arr) expected = TimedeltaIndex([np.timedelta64(1, 's')] * 5) tm.assert_index_equal(result, expected) arr = np.array([1] * 5, dtype='timedelta64[D]') result = to_timedelta(arr) expected = TimedeltaIndex([np.timedelta64(1, 'D')] * 5) tm.assert_index_equal(result, expected) # Test with lists as input when box=false expected = np.array(np.arange(3) * 1000000000, dtype='timedelta64[ns]') result = to_timedelta(range(3), unit='s', box=False) tm.assert_numpy_array_equal(expected, result) result = to_timedelta(np.arange(3), unit='s', box=False) tm.assert_numpy_array_equal(expected, result) result = to_timedelta([0, 1, 2], unit='s', box=False) tm.assert_numpy_array_equal(expected, result) # Tests with fractional seconds as input: expected = np.array( [0, 500000000, 800000000, 1200000000], dtype='timedelta64[ns]') result = to_timedelta([0., 0.5, 0.8, 1.2], unit='s', box=False) tm.assert_numpy_array_equal(expected, result) def test_to_timedelta_invalid(self): # bad value for errors parameter msg = "errors must be one of" tm.assert_raises_regex(ValueError, msg, to_timedelta, ['foo'], errors='never') # these will error pytest.raises(ValueError, lambda: to_timedelta([1, 2], unit='foo')) pytest.raises(ValueError, lambda: to_timedelta(1, unit='foo')) # time not supported ATM pytest.raises(ValueError, lambda: to_timedelta(time(second=1))) assert to_timedelta(time(second=1), errors='coerce') is pd.NaT pytest.raises(ValueError, lambda: to_timedelta(['foo', 'bar'])) tm.assert_index_equal(TimedeltaIndex([pd.NaT, pd.NaT]), to_timedelta(['foo', 'bar'], errors='coerce')) tm.assert_index_equal(TimedeltaIndex(['1 day', pd.NaT, '1 min']), to_timedelta(['1 day', 'bar', '1 min'], errors='coerce')) # gh-13613: these should not error because errors='ignore' invalid_data = 'apple' assert invalid_data == to_timedelta(invalid_data, errors='ignore') invalid_data = ['apple', '1 days'] tm.assert_numpy_array_equal( np.array(invalid_data, dtype=object), to_timedelta(invalid_data, errors='ignore')) invalid_data = pd.Index(['apple', '1 days']) tm.assert_index_equal(invalid_data, to_timedelta( invalid_data, errors='ignore')) invalid_data = Series(['apple', '1 days']) tm.assert_series_equal(invalid_data, to_timedelta( invalid_data, errors='ignore')) def test_to_timedelta_via_apply(self): # GH 5458 expected = Series([np.timedelta64(1, 's')]) result = Series(['00:00:01']).apply(to_timedelta) tm.assert_series_equal(result, expected) result = Series([to_timedelta('00:00:01')]) tm.assert_series_equal(result, expected) def test_to_timedelta_on_missing_values(self): # GH5438 timedelta_NaT = np.timedelta64('NaT') actual = pd.to_timedelta(Series(['00:00:01', np.nan])) expected = Series([np.timedelta64(1000000000, 'ns'), timedelta_NaT], dtype='<m8[ns]') assert_series_equal(actual, expected) actual = pd.to_timedelta(Series(['00:00:01', pd.NaT])) assert_series_equal(actual, expected) actual = pd.to_timedelta(np.nan) assert actual.value == timedelta_NaT.astype('int64') actual = pd.to_timedelta(pd.NaT) assert actual.value == timedelta_NaT.astype('int64') def test_to_timedelta_on_nanoseconds(self): # GH 9273 result = Timedelta(nanoseconds=100) expected = Timedelta('100ns') assert result == expected result = Timedelta(days=1, hours=1, minutes=1, weeks=1, seconds=1, milliseconds=1, microseconds=1, nanoseconds=1) expected = Timedelta(694861001001001) assert result == expected result = Timedelta(microseconds=1) + Timedelta(nanoseconds=1) expected = Timedelta('1us1ns') assert result == expected result = Timedelta(microseconds=1) - Timedelta(nanoseconds=1) expected = Timedelta('999ns') assert result == expected result = Timedelta(microseconds=1) + 5 * Timedelta(nanoseconds=-2) expected = Timedelta('990ns') assert result == expected pytest.raises(TypeError, lambda: Timedelta(nanoseconds='abc'))
bsd-3-clause
DHI-GRAS/processing_SWAT
OSFWF_Assimilate_d.py
2
4679
""" *************************************************************************** OSFWF_Assimilate_d.py ------------------------------------- Copyright (C) 2014 TIGER-NET (www.tiger-net.org) *************************************************************************** * This plugin is part of the Water Observation Information System (WOIS) * * developed under the TIGER-NET project funded by the European Space * * Agency as part of the long-term TIGER initiative aiming at promoting * * the use of Earth Observation (EO) for improved Integrated Water * * Resources Management (IWRM) in Africa. * * * * WOIS is a free software i.e. you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published * * by the Free Software Foundation, either version 3 of the License, * * or (at your option) any later version. * * * * WOIS is distributed in the hope that it will be useful, but WITHOUT ANY * * WARRANTY; without even the implied warranty of MERCHANTABILITY or * * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * * for more details. * * * * You should have received a copy of the GNU General Public License along * * with this program. If not, see <http://www.gnu.org/licenses/>. * *************************************************************************** """ import os from datetime import date, timedelta from matplotlib.pylab import * import subprocess from PyQt4 import QtGui from processing.core.GeoAlgorithmExecutionException import GeoAlgorithmExecutionException from read_SWAT_out import read_SWAT_time from SWAT_output_format_specs import SWAT_output_format_specs from ASS_utilities import ReadNoSubs import ASS_module3_Assimilation from processing.core.parameters import * from SWATAlgorithm import SWATAlgorithm OUTSPECS = SWAT_output_format_specs() class OSFWF_Assimilate_d(SWATAlgorithm): SRC_FOLDER = "SRC_FOLDER" MOD_DESC = "MOD_DESC" ASS_FOLDER = "ASS_FOLDER" STARTDATE = "STARTDATE" OBS_FILE = "OBS_FILE" def __init__(self): super(OSFWF_Assimilate_d, self).__init__(__file__) def defineCharacteristics(self): self.name = "4.4 - Assimilate observations (OSFWF) - run assimilation" self.group = "Operational simulation and forecasting workflow (OSFWF)" self.addParameter(ParameterFile(OSFWF_Assimilate_d.SRC_FOLDER, "Select model source folder", True)) self.addParameter(ParameterFile(OSFWF_Assimilate_d.MOD_DESC, "Select model description file", False, False)) self.addParameter(ParameterFile(OSFWF_Assimilate_d.ASS_FOLDER, "Select assimilation folder", True)) self.addParameter(ParameterString(OSFWF_Assimilate_d.STARTDATE, "Issue date (yyyy-mm-dd)", str(date.today()))) self.addParameter(ParameterFile(OSFWF_Assimilate_d.OBS_FILE, "File with observation data (date, obs, measurement error, reach ID)", False)) def processAlgorithm(self, progress): SRC_FOLDER = self.getParameterValue(OSFWF_Assimilate_d.SRC_FOLDER) MOD_DESC = self.getParameterValue(OSFWF_Assimilate_d.MOD_DESC) ASS_FOLDER = self.getParameterValue(OSFWF_Assimilate_d.ASS_FOLDER) STARTDATE = self.getParameterValue(OSFWF_Assimilate_d.STARTDATE) OBS_FILE = self.getParameterValue(OSFWF_Assimilate_d.OBS_FILE) # Extract total number of subbasins from model description file NBRCH = ReadNoSubs(MOD_DESC) # Get startdate from SWAT file.cio SWAT_time_info = read_SWAT_time(SRC_FOLDER) SWAT_startdate = date2num(date(int(SWAT_time_info[1]),1,1) + timedelta(days=int(SWAT_time_info[2])-1)) if SWAT_time_info[4] > 0: # Account for NYSKIP>0 SWAT_startdate = date2num(date(int(SWAT_time_info[1]+SWAT_time_info[4]),1,1)) SWAT_enddate = date2num(date(int(SWAT_time_info[0]+SWAT_time_info[1]-1),1,1)) + SWAT_time_info[3]-1 # Assimilation startdate is equal to SWAT start date ASS_startdate = SWAT_startdate # Assimilation enddate is equal to SWAT end date ASS_enddate = SWAT_enddate ASS_module3_Assimilation.kf_flows(OBS_FILE, ASS_FOLDER, NBRCH, ASS_enddate, ASS_startdate, SWAT_enddate, SWAT_startdate)
gpl-3.0
quheng/scikit-learn
examples/calibration/plot_calibration_curve.py
225
5903
""" ============================== Probability Calibration curves ============================== When performing classification one often wants to predict not only the class label, but also the associated probability. This probability gives some kind of confidence on the prediction. This example demonstrates how to display how well calibrated the predicted probabilities are and how to calibrate an uncalibrated classifier. The experiment is performed on an artificial dataset for binary classification with 100.000 samples (1.000 of them are used for model fitting) with 20 features. Of the 20 features, only 2 are informative and 10 are redundant. The first figure shows the estimated probabilities obtained with logistic regression, Gaussian naive Bayes, and Gaussian naive Bayes with both isotonic calibration and sigmoid calibration. The calibration performance is evaluated with Brier score, reported in the legend (the smaller the better). One can observe here that logistic regression is well calibrated while raw Gaussian naive Bayes performs very badly. This is because of the redundant features which violate the assumption of feature-independence and result in an overly confident classifier, which is indicated by the typical transposed-sigmoid curve. Calibration of the probabilities of Gaussian naive Bayes with isotonic regression can fix this issue as can be seen from the nearly diagonal calibration curve. Sigmoid calibration also improves the brier score slightly, albeit not as strongly as the non-parametric isotonic regression. This can be attributed to the fact that we have plenty of calibration data such that the greater flexibility of the non-parametric model can be exploited. The second figure shows the calibration curve of a linear support-vector classifier (LinearSVC). LinearSVC shows the opposite behavior as Gaussian naive Bayes: the calibration curve has a sigmoid curve, which is typical for an under-confident classifier. In the case of LinearSVC, this is caused by the margin property of the hinge loss, which lets the model focus on hard samples that are close to the decision boundary (the support vectors). Both kinds of calibration can fix this issue and yield nearly identical results. This shows that sigmoid calibration can deal with situations where the calibration curve of the base classifier is sigmoid (e.g., for LinearSVC) but not where it is transposed-sigmoid (e.g., Gaussian naive Bayes). """ print(__doc__) # Author: Alexandre Gramfort <[email protected]> # Jan Hendrik Metzen <[email protected]> # License: BSD Style. import matplotlib.pyplot as plt from sklearn import datasets from sklearn.naive_bayes import GaussianNB from sklearn.svm import LinearSVC from sklearn.linear_model import LogisticRegression from sklearn.metrics import (brier_score_loss, precision_score, recall_score, f1_score) from sklearn.calibration import CalibratedClassifierCV, calibration_curve from sklearn.cross_validation import train_test_split # Create dataset of classification task with many redundant and few # informative features X, y = datasets.make_classification(n_samples=100000, n_features=20, n_informative=2, n_redundant=10, random_state=42) X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.99, random_state=42) def plot_calibration_curve(est, name, fig_index): """Plot calibration curve for est w/o and with calibration. """ # Calibrated with isotonic calibration isotonic = CalibratedClassifierCV(est, cv=2, method='isotonic') # Calibrated with sigmoid calibration sigmoid = CalibratedClassifierCV(est, cv=2, method='sigmoid') # Logistic regression with no calibration as baseline lr = LogisticRegression(C=1., solver='lbfgs') fig = plt.figure(fig_index, figsize=(10, 10)) ax1 = plt.subplot2grid((3, 1), (0, 0), rowspan=2) ax2 = plt.subplot2grid((3, 1), (2, 0)) ax1.plot([0, 1], [0, 1], "k:", label="Perfectly calibrated") for clf, name in [(lr, 'Logistic'), (est, name), (isotonic, name + ' + Isotonic'), (sigmoid, name + ' + Sigmoid')]: clf.fit(X_train, y_train) y_pred = clf.predict(X_test) if hasattr(clf, "predict_proba"): prob_pos = clf.predict_proba(X_test)[:, 1] else: # use decision function prob_pos = clf.decision_function(X_test) prob_pos = \ (prob_pos - prob_pos.min()) / (prob_pos.max() - prob_pos.min()) clf_score = brier_score_loss(y_test, prob_pos, pos_label=y.max()) print("%s:" % name) print("\tBrier: %1.3f" % (clf_score)) print("\tPrecision: %1.3f" % precision_score(y_test, y_pred)) print("\tRecall: %1.3f" % recall_score(y_test, y_pred)) print("\tF1: %1.3f\n" % f1_score(y_test, y_pred)) fraction_of_positives, mean_predicted_value = \ calibration_curve(y_test, prob_pos, n_bins=10) ax1.plot(mean_predicted_value, fraction_of_positives, "s-", label="%s (%1.3f)" % (name, clf_score)) ax2.hist(prob_pos, range=(0, 1), bins=10, label=name, histtype="step", lw=2) ax1.set_ylabel("Fraction of positives") ax1.set_ylim([-0.05, 1.05]) ax1.legend(loc="lower right") ax1.set_title('Calibration plots (reliability curve)') ax2.set_xlabel("Mean predicted value") ax2.set_ylabel("Count") ax2.legend(loc="upper center", ncol=2) plt.tight_layout() # Plot calibration cuve for Gaussian Naive Bayes plot_calibration_curve(GaussianNB(), "Naive Bayes", 1) # Plot calibration cuve for Linear SVC plot_calibration_curve(LinearSVC(), "SVC", 2) plt.show()
bsd-3-clause
David-Estevez/hormodular
bin/evaluate.py
1
6440
""" Evaluate Evaluates an individual based on the milestone file Part of the hormodular project """ from __future__ import division import wx # For GUI from xml.dom.minidom import parseString # for parsing xml files import subprocess import matplotlib matplotlib.use('WXAgg') import pylab # for plotting nice fitness graphs import os from individual import Individual __author__ = 'David Estevez-Fernandez' __license__ = 'GPLv3' class MainWin( wx.Frame): def __init__( self, parent): # Program variables initialization self.individuals = [] self.evaluationProgramPath = '/home/def/Repositories/hormodular/src/build-qt/evaluate-gaits' self.gaitTableFile = '/home/def/Repositories/hormodular/data/gait tables/evaluation.txt' self.robotSceneFile = '' self.logFile = '' # Construct the window super( MainWin, self).__init__( parent, title='hormodular - evaluate', size = ( 400, 300), style=wx.DEFAULT_FRAME_STYLE ^ wx.RESIZE_BORDER) self.InitUI() self.Centre() self.Show() def InitUI( self): """ Definition of the UI """ self.panel = wx.Panel(self) font = wx.SystemSettings_GetFont(wx.SYS_SYSTEM_FONT) font.SetPointSize(9) #Textbox to put the filepath of the milestone file self.fileBox = wx.TextCtrl( self.panel, pos=( 10, 10), size=( 400-10-100-10, 25)) # Button for loading the file self.loadButton = wx.Button( self.panel, label='Load', pos=(400-10-100, 10), size=( 100, 25)) self.loadButton.Bind( wx.EVT_BUTTON, self.loadFile) # Listbox for listing the individuals self.listBoxIndividuals = wx.ListBox( self.panel, pos=(10, 10+25+10), size=(400-10-10, 200)) # Run parameters: self.runText = wx.StaticText( self.panel, label = 'Run for (ms):', pos=( 10, 10+25+10+200+10), size = (80, 25)) self.runtimeBox = wx.TextCtrl( self.panel, pos = ( 10+80+10, 10+25+10+200+5), size = (130, 25) ) self.runtimeBox.SetValue( '-1') self.runButton = wx.Button( self.panel, label = 'Run', pos = ( 10+80+10+130+5, 10+25+10+200+5), size = (75, 25)) self.runButton.Bind( wx.EVT_BUTTON, self.runEvaluation ) # Plot fitness function: self.fitnessButton = wx.Button( self.panel, label = 'Plot fitness', pos = ( 10+80+10+110+5+75+20, 10+25+10+200+5), size = (80, 25)) self.fitnessButton.Bind( wx.EVT_BUTTON, self.plotFitness ) def loadFile(self, e): """ Actions to be made when load button is pressed. """ lastFilePath = self.fileBox.GetValue() # Create a file selection dialog if lastFilePath == '': lastFilePath = '~/Repositories/hormodular/src' else: lastFilePath = os.path.dirname( lastFilePath) print lastFilePath fileDialog = wx.FileDialog( self, defaultDir = lastFilePath, wildcard = '*.*', style = wx.FD_OPEN ) # Show dialog to get file path fileDialog.ShowModal() filePath = fileDialog.GetPath() fileDialog.Destroy() print 'Loading file: ' + filePath self.fileBox.SetValue( filePath ) # At this point filePath should contain the path to the results file # Parse the xml file: self.parseFile( filePath) def runEvaluation( self, e): """ Runs the evaluation program with the specified parameters """ print 'Evaluating things...' print 'Run with individual: ' + str( self.individuals[ self.listBoxIndividuals.GetSelections()[0] ]) # Get the simulation time: sim_time = int(self.runtimeBox.GetValue()) # Create the gait table: file = open( self.gaitTableFile, 'w') file.write( self.individuals[ self.listBoxIndividuals.GetSelections()[0]].generateGaitTable()) file.close() # Run the program: subprocess.call( [ self.evaluationProgramPath, self.robotSceneFile, self.gaitTableFile, str(sim_time) ]) def plotFitness( self, e): """ Extracts the fitness data from the log file and plots it """ print 'Plotting fitness...' # Get text from log file print 'Opening ' + self.logFile f = open( self.logFile, 'r') # Extract fitness history: fitness_history = [] generation_history = [] avg_fitness_history = [] for line in f.readlines(): if line.find( 'Generation: ') != -1: generation_history.append( int( line.split(' ')[1])) continue if line.find( '\tmax: ') != -1: fitness_history.append( float( line.split(' ')[1])) continue if line.find( '\tavg: ') != -1: avg_fitness_history.append( float( line.split(' ')[1])) continue f.close() #print generation_history #print fitness_history # Plot pylab.plot( generation_history, fitness_history, 'b-') #pylab.plot( generation_history, avg_fitness_history, 'r-') pylab.show() def parseFile( self, filePath): """ Parses the output file from the evolution process to extract info about the individuals and their fitness value """ print 'Parsing...' # Open the file and read the contents file = open( filePath, 'r') fileContents = file.read(); file.close() # Parse the xml data xmlfile = parseString( fileContents) # Getting population data: # --------------------------------------------------------------------------------------------- xmlPopulation = xmlfile.getElementsByTagName('Deme')[0] # Create new individuals list: self.individuals = [] # Load the hall of fame individuals self.individuals.append( Individual( 'HallOfFame', xmlPopulation.getElementsByTagName('Individual')[0] )) # Load the population individuals for xmlIndividual in xmlPopulation.getElementsByTagName('Individual')[1:]: self.individuals.append( Individual( 'Individual'+ str(len(self.individuals)), xmlIndividual)) # Set the info about the loaded individuals on listBox self.listBoxIndividuals.Set( [ str(i) for i in self.individuals] ) # Getting scene file / log file filepath # ---------------------------------------------------------------------------------------------- xmlSceneFile = xmlfile.getElementsByTagName('Registry')[0] found = False for entry in xmlSceneFile.getElementsByTagName( 'Entry'): if entry.getAttribute( 'key' ) == 'robot.simulationfile': self.robotSceneFile = entry.childNodes[0].data #print self.robotSceneFile if found: break else: found = True if entry.getAttribute( 'key') == 'log.filename': self.logFile = entry.childNodes[0].data if found: break else: found = True if __name__ == '__main__': app = wx.App() MainWin(None) app.MainLoop()
gpl-3.0
yl565/statsmodels
examples/python/discrete_choice_example.py
30
5786
## Discrete Choice Models ### Fair's Affair data # A survey of women only was conducted in 1974 by *Redbook* asking about extramarital affairs. from __future__ import print_function import numpy as np from scipy import stats import matplotlib.pyplot as plt import statsmodels.api as sm from statsmodels.formula.api import logit, probit, poisson, ols print(sm.datasets.fair.SOURCE) print(sm.datasets.fair.NOTE) dta = sm.datasets.fair.load_pandas().data dta['affair'] = (dta['affairs'] > 0).astype(float) print(dta.head(10)) print(dta.describe()) affair_mod = logit("affair ~ occupation + educ + occupation_husb" "+ rate_marriage + age + yrs_married + children" " + religious", dta).fit() print(affair_mod.summary()) # How well are we predicting? affair_mod.pred_table() # The coefficients of the discrete choice model do not tell us much. What we're after is marginal effects. mfx = affair_mod.get_margeff() print(mfx.summary()) respondent1000 = dta.ix[1000] print(respondent1000) resp = dict(zip(range(1,9), respondent1000[["occupation", "educ", "occupation_husb", "rate_marriage", "age", "yrs_married", "children", "religious"]].tolist())) resp.update({0 : 1}) print(resp) mfx = affair_mod.get_margeff(atexog=resp) print(mfx.summary()) affair_mod.predict(respondent1000) affair_mod.fittedvalues[1000] affair_mod.model.cdf(affair_mod.fittedvalues[1000]) # The "correct" model here is likely the Tobit model. We have an work in progress branch "tobit-model" on github, if anyone is interested in censored regression models. #### Exercise: Logit vs Probit fig = plt.figure(figsize=(12,8)) ax = fig.add_subplot(111) support = np.linspace(-6, 6, 1000) ax.plot(support, stats.logistic.cdf(support), 'r-', label='Logistic') ax.plot(support, stats.norm.cdf(support), label='Probit') ax.legend(); fig = plt.figure(figsize=(12,8)) ax = fig.add_subplot(111) support = np.linspace(-6, 6, 1000) ax.plot(support, stats.logistic.pdf(support), 'r-', label='Logistic') ax.plot(support, stats.norm.pdf(support), label='Probit') ax.legend(); # Compare the estimates of the Logit Fair model above to a Probit model. Does the prediction table look better? Much difference in marginal effects? #### Genarlized Linear Model Example print(sm.datasets.star98.SOURCE) print(sm.datasets.star98.DESCRLONG) print(sm.datasets.star98.NOTE) dta = sm.datasets.star98.load_pandas().data print(dta.columns) print(dta[['NABOVE', 'NBELOW', 'LOWINC', 'PERASIAN', 'PERBLACK', 'PERHISP', 'PERMINTE']].head(10)) print(dta[['AVYRSEXP', 'AVSALK', 'PERSPENK', 'PTRATIO', 'PCTAF', 'PCTCHRT', 'PCTYRRND']].head(10)) formula = 'NABOVE + NBELOW ~ LOWINC + PERASIAN + PERBLACK + PERHISP + PCTCHRT ' formula += '+ PCTYRRND + PERMINTE*AVYRSEXP*AVSALK + PERSPENK*PTRATIO*PCTAF' ##### Aside: Binomial distribution # Toss a six-sided die 5 times, what's the probability of exactly 2 fours? stats.binom(5, 1./6).pmf(2) from scipy.misc import comb comb(5,2) * (1/6.)**2 * (5/6.)**3 from statsmodels.formula.api import glm glm_mod = glm(formula, dta, family=sm.families.Binomial()).fit() print(glm_mod.summary()) # The number of trials glm_mod.model.data.orig_endog.sum(1) glm_mod.fittedvalues * glm_mod.model.data.orig_endog.sum(1) # First differences: We hold all explanatory variables constant at their means and manipulate the percentage of low income households to assess its impact # on the response variables: exog = glm_mod.model.data.orig_exog # get the dataframe means25 = exog.mean() print(means25) means25['LOWINC'] = exog['LOWINC'].quantile(.25) print(means25) means75 = exog.mean() means75['LOWINC'] = exog['LOWINC'].quantile(.75) print(means75) resp25 = glm_mod.predict(means25) resp75 = glm_mod.predict(means75) diff = resp75 - resp25 # The interquartile first difference for the percentage of low income households in a school district is: print("%2.4f%%" % (diff[0]*100)) nobs = glm_mod.nobs y = glm_mod.model.endog yhat = glm_mod.mu from statsmodels.graphics.api import abline_plot fig = plt.figure(figsize=(12,8)) ax = fig.add_subplot(111, ylabel='Observed Values', xlabel='Fitted Values') ax.scatter(yhat, y) y_vs_yhat = sm.OLS(y, sm.add_constant(yhat, prepend=True)).fit() fig = abline_plot(model_results=y_vs_yhat, ax=ax) ##### Plot fitted values vs Pearson residuals # Pearson residuals are defined to be # # $$\frac{(y - \mu)}{\sqrt{(var(\mu))}}$$ # # where var is typically determined by the family. E.g., binomial variance is $np(1 - p)$ fig = plt.figure(figsize=(12,8)) ax = fig.add_subplot(111, title='Residual Dependence Plot', xlabel='Fitted Values', ylabel='Pearson Residuals') ax.scatter(yhat, stats.zscore(glm_mod.resid_pearson)) ax.axis('tight') ax.plot([0.0, 1.0],[0.0, 0.0], 'k-'); ##### Histogram of standardized deviance residuals with Kernel Density Estimate overlayed # The definition of the deviance residuals depends on the family. For the Binomial distribution this is # # $$r_{dev} = sign\(Y-\mu\)*\sqrt{2n(Y\log\frac{Y}{\mu}+(1-Y)\log\frac{(1-Y)}{(1-\mu)}}$$ # # They can be used to detect ill-fitting covariates resid = glm_mod.resid_deviance resid_std = stats.zscore(resid) kde_resid = sm.nonparametric.KDEUnivariate(resid_std) kde_resid.fit() fig = plt.figure(figsize=(12,8)) ax = fig.add_subplot(111, title="Standardized Deviance Residuals") ax.hist(resid_std, bins=25, normed=True); ax.plot(kde_resid.support, kde_resid.density, 'r'); ##### QQ-plot of deviance residuals fig = plt.figure(figsize=(12,8)) ax = fig.add_subplot(111) fig = sm.graphics.qqplot(resid, line='r', ax=ax)
bsd-3-clause
merenlab/anvio
anvio/synteny.py
2
24040
#!/usr/bin/env python # -*- coding: utf-8 # pylint: disable=line-too-long """Classes to work with ngrams of contig functions. These are classes to deconstruct loci into ngrams. They are used to analyze conserved genes and synteny structures across loci. """ import sys import pandas as pd from collections import Counter import anvio import anvio.tables as t import anvio.dbops as dbops import anvio.utils as utils import anvio.panops as panops import anvio.terminal as terminal import anvio.filesnpaths as filesnpaths import anvio.genomestorage as genomestorage import anvio.genomedescriptions as genomedescriptions from anvio.dbops import PanDatabase from anvio.errors import ConfigError __author__ = "Developers of anvi'o (see AUTHORS.txt)" __copyright__ = "Copyleft 2015-2018, the Meren Lab (http://merenlab.org/)" __credits__ = [] __license__ = "GPL 3.0" __version__ = anvio.__version__ __maintainer__ = "Matthew Schechter" __email__ = "[email protected]" class NGram(object): """class for counting NGrams anvi-analyze-synteny is designed to work with a group of similar loci, where each locus is a contig (which can lie in any number of contigs dbs) Parameters ========== args : argparse.Namespace For examples, arguments accepted by anvi-analyze-syntenty skip_sanity_check : bool, false If True, sanity_check will not be called. Notes ===== - Currently the design assumes that each locus is a contig. In the future we have plans to expand this to compare genomes, not just loci, to each other. If that behavior is desired in the current design, each genome should be a single contig. """ def __init__(self, args, run=terminal.Run(), progress=terminal.Progress(), skip_sanity_check=False): """Parses arguments and run sanity_check""" self.args = args self.run = run self.progress = progress # Parse arguments A = lambda x: args.__dict__[x] if x in args.__dict__ else None self.annotation_source = A('annotation_source') self.window_range = A('ngram_window_range') or "2:3" self.is_in_unknowns_mode = A('analyze_unknown_functions') self.output_file = A('output_file') self.skip_init_functions = A('skip_init_functions') self.genome_names_to_focus = A('genome_names') self.ngram_source = A("ngram_source") self.first_functional_hit_only = A("first_functional_hit_only") self.annotation_source_dict = {} self.pan_db_path = A('pan_db') if self.annotation_source and self.pan_db_path: self.annotation_sources = [self.annotation_source, 'gene_clusters'] if self.pan_db_path: self.pan_db = PanDatabase(self.pan_db_path) self.p_meta = self.pan_db.meta self.p_meta['creation_date'] = utils.get_time_to_date(self.p_meta['creation_date']) if 'creation_date' in self.p_meta else 'unknown' self.p_meta['genome_names'] = sorted([s.strip() for s in self.p_meta['external_genome_names'].split(',') + self.p_meta['internal_genome_names'].split(',') if s]) self.p_meta['num_genomes'] = len(self.p_meta['genome_names']) self.genome_names = self.p_meta['genome_names'] self.gene_clusters_gene_alignments_available = self.p_meta['gene_alignments_computed'] else: self.pan_db = None self.genomes_storage_path = A('genomes_storage') # confirm genome-storage and pangenome hashes match of pangenome is provided if self.pan_db: self.genomes_storage = genomestorage.GenomeStorage(self.genomes_storage_path, self.p_meta['genomes_storage_hash'], genome_names_to_focus=self.p_meta['genome_names'], skip_init_functions=self.skip_init_functions, run=self.run, progress=self.progress) else: self.genomes_storage = genomestorage.GenomeStorage(self.genomes_storage_path, skip_init_functions=self.skip_init_functions, run=self.run, progress=self.progress) # list-annotation-resources self.list_annotation_sources = A('list_annotation_sources') self.gene_function_source_set = self.genomes_storage.db.get_table_as_dataframe('gene_function_calls').source.unique() if self.list_annotation_sources: self.run.info('Available functional annotation sources', ', '.join(self.gene_function_source_set)) sys.exit() # This houses the ngrams' data self.ngram_attributes_list = [] # Focus on specfic set of genomes if self.genome_names_to_focus: if filesnpaths.is_file_exists(self.genome_names_to_focus, dont_raise=True): self.genome_names_to_focus = utils.get_column_data_from_TAB_delim_file(self.genome_names_to_focus, column_indices=[0], expected_number_of_fields=1)[0] else: self.genome_names_to_focus = [g.strip() for g in self.genome_names_to_focus.split(',')] self.run.warning("A subset of genome names is found, and anvi'o will focus only on to those.") self.genomes_storage = genomestorage.GenomeStorage(self.genomes_storage_path, storage_hash=None, genome_names_to_focus=self.genome_names_to_focus) self.genomes = self.genomes_storage.get_genomes_dict() self.external_genome_names = [g for g in self.genomes if self.genomes[g]['external_genome']] self.internal_genome_names = [g for g in self.genomes if not self.genomes[g]['external_genome']] self.hash_to_genome_name = {} for genome_name in self.genomes: self.hash_to_genome_name[self.genomes[genome_name]['genome_hash']] = genome_name # number of genomes in genome-storage self.num_contigs_in_external_genomes_with_genes = len(self.genomes) # number of genomes in genome-storage if self.genome_names_to_focus: self.num_contigs_in_external_genomes_with_genes = len(self.genome_names_to_focus) else: self.num_contigs_in_external_genomes_with_genes = len(self.genomes_storage.get_all_genome_names()) if not skip_sanity_check: self.sanity_check() # unless we are in debug mode, let's keep things quiet. if anvio.DEBUG: self.run_object = terminal.Run() else: self.run_object = terminal.Run(verbose=False) def sanity_check(self): """Sanity_check will confirm input for NGram class""" # checking if the annotation source is common across all contigs databases if self.annotation_source and self.annotation_source not in self.gene_function_source_set: raise ConfigError("The annotation source you requested does not appear to be in all of " "the contigs databases from the external-genomes file. " "Please confirm your annotation-source and that all contigs databases have it :)") if (self.annotation_source and self.pan_db) and not self.ngram_source: raise ConfigError("anvi-analyze-synteny needs to know which annotation source to slice Ngrams with. " "Please use the --ngram-source flag to declare one :)") if not self.args.output_file: raise ConfigError("You should provide an output file name.") # checking window-range input if self.window_range.count(':') != 1: raise ConfigError("anvi'o would love to slice and dice your loci, but the " "Format of window_range must be x:y (e.g. Window sizes 2 to 4 would be denoted as: 2:4)") try: self.window_range = [int(n) for n in self.window_range.split(":")] except ValueError: raise ConfigError("anvi'o would love to slice and dice your loci, but the " "window-ranges need to be integers :)") if self.window_range[0] > self.window_range[1]: raise ConfigError("anvi'o would love to slice and dice your loci, but the " "window-range needs to be from small to big :)") # Window-range must contain 2 integers for window if len(self.window_range) > 2 or not isinstance(self.window_range[0], int) or not isinstance(self.window_range[1], int): raise ConfigError("anvi'o would love to slice and dice your loci, but... the " "window_range must only contain 2 integers and be formated as x:y (e.g. Window sizes 2 to 4 would be denoted as: 2:4)") # Loop through each contigs db, test that each contig contains at least as many genes as max window size and confirm every contig has annotations for contigs_db_name in self.genomes_storage.get_genomes_dict(): gene_caller_ids = self.genomes_storage.get_gene_caller_ids(contigs_db_name) num_genes = len(gene_caller_ids) if self.window_range[1] > num_genes: raise ConfigError("The largest window size you requested (%d) is larger than the number of genes found on this genome: %s" % \ (self.window_range[1], contigs_db_name)) def populate_ngram_attributes(self): """Iterates through all contigs and use self.count_synteny to count all ngrams in that contig. This populates the self.ngram_attributes_list, where each element looks like: (ngram, count, contigs_db_name, contig_name, N) """ # Get gene cluster info from panDB if self.pan_db: gene_cluster_frequencies_dataframe = self.pan_db.db.get_table_as_dataframe('gene_clusters') self.run.warning("Anvi'o is now looking for Ngrams in your contigs!", lc='green') self.run.info_single("What do we say to loci that appear to have no coherent synteny patterns...? Not today! ⚔️", nl_before=1, nl_after=1) genes_and_functions_list = [] for contigs_db_name in self.genomes_storage.get_genomes_dict(): # Get list of genes-callers-ids gene_caller_ids_list = list(self.genomes_storage.get_gene_caller_ids(contigs_db_name)) # Use gene_caller_ids_list to get functions table gene_function_call_df = self.genomes_storage.db.get_table_as_dataframe('gene_function_calls') # Create dict for annotate Ngrams if self.annotation_source: self.gene_caller_id_to_accession_dict = self.get_genes_and_functions_dict(contigs_db_name, gene_function_call_df) self.annotation_source_dict[self.annotation_source] = self.gene_caller_id_to_accession_dict if self.pan_db: self.gene_caller_id_to_gene_cluster_dict = self.get_gene_cluster_dict(contigs_db_name, gene_cluster_frequencies_dataframe) self.annotation_source_dict['gene_clusters'] = self.gene_caller_id_to_gene_cluster_dict # Iterate over range of window sizes and run synteny algorithm to count occurrences of ngrams in a contig for N in range(*self.window_range): ngram_counts_dict, annotations_dict = self.count_synteny(N, gene_caller_ids_list) for ngram, count in ngram_counts_dict.items(): for annotation_source, annotation in annotations_dict[ngram].items(): if annotation_source == self.ngram_source: pass else: self.ngram_attributes_list.append([ngram, count, annotations_dict[ngram][annotation_source], contigs_db_name, N]) def count_synteny(self, N, gene_caller_ids_list): """This method counts synteny patterns of size N on a contig This method counts synteny patterns of size N on a contig by taking a window of gene-callers-ids, annotating it, ordering it consistently, and then counting its occurence on the contig. Parameters ========== n : int A window size to extract a ngram gene_caller_ids_list: list list of all gene-callers-ids on a contig Returns ======= ngram_counts_dict : dict A dict of ngram counts on a contig A tuple of annotations {ngram:count} Notes ===== This function assumes that the input list of gene-callers-ids are in the order of which they occur on the contig """ ngram_counts_dict = Counter({}) annotations_dict = {} gene_callers_id_windows = self.get_windows(N, gene_caller_ids_list) for window in gene_callers_id_windows: annotated_window_dict = self.annotate_window(window) if self.ngram_source: ngram = self.order_window(annotated_window_dict[self.ngram_source]) elif self.annotation_source: ngram = self.order_window(annotated_window_dict[self.annotation_source]) else: ngram = self.order_window(annotated_window_dict['gene_clusters']) # flip annotation if the ngram was flipped if ngram[1] == True: annotated_window_dict_ordered = {} for annotation_source, annotation in annotated_window_dict.items(): annotated_window_flipped = annotation[::-1] annotated_window_dict_ordered[annotation_source] = annotated_window_flipped annotations_dict[ngram[0]] = annotated_window_dict_ordered # record flipped version of annotation else: annotations_dict[ngram[0]] = annotated_window_dict ngram_counts_dict[ngram[0]] += 1 return ngram_counts_dict, annotations_dict def annotate_window(self, window): """This method annotates a gene-callers-id window This method will annotate a gene-callers-id window based using annotation sources provided by the user (e.g. COGs, pan_db). If the user provided the `first_functional_hit_only` flag, the COG annotation will be split by "!!!" and the first (best hit) item will be used. Parameters ========== window : tuple A tuple of gene gene-callers-ids that represents an unannotated Ngram Returns ======= ngram_gene_clusters : tuple A tuple of annotations """ # Annotate window based on user input gene_annotation_dict = {} for annotation_source, annotations_dict in self.annotation_source_dict.items(): if self.first_functional_hit_only: ngram_annotation = [] for g in window: annotation = annotations_dict[g] if "!!!" in annotation: annotation_first = annotation.split("!!!")[0] ngram_annotation.append(annotation_first) else: ngram_annotation.append(annotation) ngram_annotation = tuple(ngram_annotation) else: ngram_annotation = tuple([annotations_dict[g] for g in window]) gene_annotation_dict[annotation_source] = ngram_annotation return gene_annotation_dict def order_window(self, annotated_window): """This method orients an annotated_window in a consistent order This method is to make sure that all Ngrams are ordered in the same direction. For example, we want to count A-B-C and C-B-A as the same Ngram. Parameters ========== annotated_window : tuple A tuple of annotations accessions Returns ======= ngram : tuple A tuple Ngram that is correctly oriented """ original_order = annotated_window flipped_order = annotated_window[::-1] if original_order[0] < flipped_order[0]: ngram = original_order flipped = False else: ngram = flipped_order flipped = True ngram = [ngram, flipped] return ngram def convert_to_df(self): """Takes self.ngram_attributes_list and returns a pandas dataframe""" ngram_count_df_list = [] for ngram_attribute in self.ngram_attributes_list: ngram = "::".join(map(str, list(ngram_attribute[0]))) annotation = "::".join(map(str, list(ngram_attribute[2]))) if self.pan_db and self.annotation_source: df = pd.DataFrame(columns=['ngram','count', 'annotation', 'contig_db_name', 'N', 'number_of_loci']) df = df.append({'ngram': ngram, 'count': ngram_attribute[1], 'annotation': annotation, 'contig_db_name': ngram_attribute[3], 'N':ngram_attribute[4], 'number_of_loci':self.num_contigs_in_external_genomes_with_genes}, ignore_index=True) elif self.pan_db and not self.annotation_source: ngram = "::".join(map(str, list(ngram_attribute[0]))) df = pd.DataFrame(columns=['ngram','count', 'contig_db_name', 'N', 'number_of_loci']) df = df.append({'ngram': ngram, 'count': ngram_attribute[1], 'contig_db_name': ngram_attribute[3], 'N':ngram_attribute[4], 'number_of_loci':self.num_contigs_in_external_genomes_with_genes}, ignore_index=True) else: ngram = "::".join(map(str, list(ngram_attribute[0]))) df = pd.DataFrame(columns=['ngram','count', 'contig_db_name', 'N', 'number_of_loci']) df = df.append({'ngram': ngram, 'count': ngram_attribute[1], 'contig_db_name': ngram_attribute[3], 'N':ngram_attribute[4], 'number_of_loci':self.num_contigs_in_external_genomes_with_genes}, ignore_index=True) ngram_count_df_list.append(df) ngram_count_df_final = pd.concat(ngram_count_df_list) if not self.is_in_unknowns_mode: ngram_count_df_final = ngram_count_df_final[~ngram_count_df_final['ngram'].str.contains("unknown-function" or "no-gene-cluster-annotation")] return ngram_count_df_final def report_ngrams_to_user(self): """Counts ngrams per contig and reports as tab-delimited file""" self.populate_ngram_attributes() df = self.convert_to_df() df.to_csv(self.output_file, sep = '\t', index=False) self.run.info("Ngram table", self.output_file) def get_genes_and_functions_dict(self, contigs_db_name, gene_function_call_df): """This method will extract a list of gene attributes from each contig within a contigsDB. Returns ======= output : list of lists first element is gene_caller_id, second is function accession, third is the contig name """ # get contigsDB gene_function_call_df_filtered = gene_function_call_df[(gene_function_call_df['genome_name'] == contigs_db_name) & (gene_function_call_df['source'] == self.annotation_source)] gene_callers_id_to_accession_dict = gene_function_call_df_filtered[['gene_callers_id','accession']].set_index('gene_callers_id')['accession'].to_dict() gene_caller_ids_list = self.genomes_storage.get_gene_caller_ids(contigs_db_name) # Make list of lists containing gene attributes. If there is not annotation add one in! genes_and_functions_list = [] # List of lists [gene-caller-id, accessions, contig-name] counter = 0 for gene_callers_id in gene_caller_ids_list: list_of_gene_attributes = [] if gene_callers_id in gene_callers_id_to_accession_dict: accession = gene_callers_id_to_accession_dict[gene_callers_id] accession = accession.replace(" ","") list_of_gene_attributes.extend((gene_callers_id, accession)) genes_and_functions_list.append(list_of_gene_attributes) else: # adding in "unknown annotation" if there is none accession = "unknown-function" list_of_gene_attributes.extend((counter, accession)) genes_and_functions_list.append(list_of_gene_attributes) counter += 1 gene_caller_id_to_accession_dict = {} for entry in genes_and_functions_list: gene_caller_id_to_accession_dict[entry[0]] = entry[1] return gene_caller_id_to_accession_dict def get_gene_cluster_dict(self, contigs_db_name, gene_cluster_frequencies_dataframe): gene_cluster_frequencies_dataframe_filtered = gene_cluster_frequencies_dataframe[gene_cluster_frequencies_dataframe['genome_name'] == contigs_db_name] gene_callers_id_to_gene_cluster_id_dict = gene_cluster_frequencies_dataframe_filtered[['gene_caller_id','gene_cluster_id']].set_index('gene_caller_id')['gene_cluster_id'].to_dict() gene_caller_ids_list = self.genomes_storage.get_gene_caller_ids(contigs_db_name) # Make list of lists containing gene cluster attributes. If there is not annotation add one in! genes_cluster_list = [] # List of lists [gene-caller-id, gene-cluster-id, contig-name] counter = 0 for gene_callers_id in gene_caller_ids_list: list_of_gene_attributes = [] if gene_callers_id in gene_callers_id_to_gene_cluster_id_dict: gene_cluster_id = gene_callers_id_to_gene_cluster_id_dict[gene_callers_id] gene_cluster_id = gene_cluster_id.replace(" ","") list_of_gene_attributes.extend((gene_callers_id, gene_cluster_id)) genes_cluster_list.append(list_of_gene_attributes) else: # adding in "unknown annotation" if there is none gene_cluster_id = "no-gene-cluster-annotation" list_of_gene_attributes.extend((counter, gene_cluster_id)) genes_cluster_list.append(list_of_gene_attributes) counter += 1 gene_caller_id_to_gene_cluster_dict = {} for entry in genes_cluster_list: gene_caller_id_to_gene_cluster_dict[entry[0]] = entry[1] return gene_caller_id_to_gene_cluster_dict def get_windows(self, N, gene_caller_ids_list): """This method will count NGrams in contigs This method will use a sliding window of size N to extract gene-callers-id windows. The final output will be a list of windows of size N. Parameters ========== gene_caller_ids_list : list A list of gene gene-callers-ids as they appear in the contig n : int A window size to extract a ngram Returns ======= gene_callers_id_windows : list A list of n sized windows of gene-callers-ids from a contig """ gene_callers_id_windows = [] for i in range(0, len(gene_caller_ids_list) - N + 1): # extract window window = tuple(gene_caller_ids_list[i:i + N]) gene_callers_id_windows.append(window) return gene_callers_id_windows
gpl-3.0
SBRG/ssbio
ssbio/databases/uniprot.py
2
24223
""" UniProtProp =========== """ import os.path as op import re import warnings import bioservices import pandas as pd import requests from collections import defaultdict from dateutil.parser import parse as dateparse import ssbio.utils from BCBio import GFF from ssbio.protein.sequence.seqprop import SeqProp try: from StringIO import StringIO except ImportError: from io import StringIO from six.moves.urllib.request import urlretrieve from Bio import SeqIO import logging log = logging.getLogger(__name__) bsup = bioservices.uniprot.UniProt() # See the UniProt XML specification for these http://www.uniprot.org/docs/uniprot.xsd # Also see http://www.uniprot.org/help/sequence_annotation for categories longname_all_features = ["active site", "binding site", "calcium-binding region", "chain", "coiled-coil region", "compositionally biased region", "cross-link", "disulfide bond", "DNA-binding region", "domain", "glycosylation site", "helix", "initiator methionine", "lipid moiety-binding region", "metal ion-binding site", "modified residue", "mutagenesis site", "non-consecutive residues", "non-terminal residue", "nucleotide phosphate-binding region", "peptide", "propeptide", "region of interest", "repeat", "non-standard amino acid", "sequence conflict", "sequence variant", "short sequence motif", "signal peptide", "site", "splice variant", "strand", "topological domain", "transit peptide", "transmembrane region", "turn", "unsure residue", "zinc finger region", "intramembrane region"] longname_molecule_processing = ["initiator methionine", "signal peptide", "transit peptide", "propeptide", "chain", "peptide"] longname_regions = ['repeat','calcium-binding region','compositionally biased region','nucleotide phosphate-binding region', 'topological domain','intramembrane region','coiled-coil region','zinc finger region','domain', 'short sequence motif','DNA-binding region','region of interest','transmembrane region'] longname_sites = ["active site", "binding site", "metal ion-binding site", "site"] longname_amino_acid_modifications = ["non-standard amino acid", "modified residue", "lipid moiety-binding region", "glycosylation site", "disulfide bond", "cross-link"] longname_natural_variations = ["sequence variant", "splice variant"] longname_experimental_info = ["mutagenesis site", "unsure residue", "sequence conflict", "non-consecutive residues", "non-terminal residue"] longname_secondary_structure = ["helix", "turn", "strand"] class UniProtProp(SeqProp): """Generic class to store information on a UniProt entry, extended from a SeqProp object. The main utilities of this class are to: #. Download and/or parse UniProt text or xml files #. Store extra parsed information in attributes Attributes: uniprot (str): Main UniProt accession code alt_uniprots (list): Alternate accession codes that point to the main one file_type (str): Metadata file type reviewed (bool): If this entry is a "reviewed" entry. If None, then status is unknown. ec_number (str): EC number pfam (list): PFAM IDs entry_version (str): Date of last update of the UniProt entry seq_version (str): Date of last update of the UniProt sequence """ def __init__(self, seq, id, name='<unknown name>', description='<unknown description>', fasta_path=None, xml_path=None, gff_path=None): """Store basic protein sequence properties from a UniProt ID/ACC. One or all of the input files can be provided - you might ask why even provide the FASTA if the XML has the sequence in it? The FASTA file can be used in alignment programs run locally. Args: seq (str, Seq, SeqRecord): Sequence string, Biopython Seq or SeqRecord object id (str): UniProt ID/ACC fasta_path (str): Path to FASTA file xml_path (str): Path to UniProt XML file gff_path (str): Path to GFF feature file """ if not is_valid_uniprot_id(id): raise ValueError("{}: invalid UniProt ID!".format(id)) self.reviewed = False self.alt_uniprots = None self.taxonomy = None self.seq_version = None self.seq_date = None self.entry_version = None self.entry_date = None SeqProp.__init__(self, id=id, seq=seq, name=name, description=description, sequence_path=fasta_path, metadata_path=xml_path, feature_path=gff_path) self.uniprot = id @SeqProp.seq.getter def seq(self): """Seq: Get the Seq object from the sequence file, metadata file, or in memory""" if self.sequence_file: log.debug('{}: reading sequence from sequence file {}'.format(self.id, self.sequence_path)) tmp_sr = SeqIO.read(self.sequence_path, 'fasta') return tmp_sr.seq elif self.metadata_file: log.debug('{}: reading sequence from metadata file {}'.format(self.id, self.metadata_path)) tmp_sr = SeqIO.read(self.metadata_path, 'uniprot-xml') return tmp_sr.seq else: if not self._seq: log.debug('{}: no sequence stored in memory'.format(self.id)) else: log.debug('{}: reading sequence from memory'.format(self.id)) return self._seq @SeqProp.features.getter def features(self): """list: Get the features from the feature file, metadata file, or in memory""" if self.feature_file: log.debug('{}: reading features from feature file {}'.format(self.id, self.feature_path)) with open(self.feature_path) as handle: feats = list(GFF.parse(handle)) if len(feats) > 1: log.warning('Too many sequences in GFF') else: return feats[0].features elif self.metadata_file: log.debug('{}: reading features from metadata file {}'.format(self.id, self.metadata_path)) tmp_sr = SeqIO.read(self.metadata_path, 'uniprot-xml') return tmp_sr.features else: return self._features @SeqProp.metadata_path.setter def metadata_path(self, m_path): """Provide pointers to the paths of the metadata file Args: m_path: Path to metadata file """ if not m_path: self.metadata_dir = None self.metadata_file = None else: if not op.exists(m_path): raise OSError('{}: file does not exist!'.format(m_path)) if not op.dirname(m_path): self.metadata_dir = '.' else: self.metadata_dir = op.dirname(m_path) self.metadata_file = op.basename(m_path) # Parse the metadata file using Biopython's built in SeqRecord parser # Just updating IDs and stuff tmp_sr = SeqIO.read(self.metadata_path, 'uniprot-xml') parsed = parse_uniprot_xml_metadata(tmp_sr) self.update(parsed, overwrite=True) def metadata_path_unset(self): """Copy features to memory and remove the association of the metadata file.""" if not self.metadata_file: raise IOError('No metadata file to unset') log.debug('{}: reading from metadata file {}'.format(self.id, self.metadata_path)) tmp_sr = SeqIO.read(self.metadata_path, 'uniprot-xml') tmp_feats = tmp_sr.features # TODO: should this be in separate unset functions? self.metadata_dir = None self.metadata_file = None self.features = tmp_feats if self.sequence_file: tmp_sr = tmp_sr.seq self.sequence_dir = None self.sequence_file = None self.seq = tmp_sr def download_seq_file(self, outdir, force_rerun=False): """Download and load the UniProt FASTA file""" uniprot_fasta_file = download_uniprot_file(uniprot_id=self.id, filetype='fasta', outdir=outdir, force_rerun=force_rerun) self.sequence_path = uniprot_fasta_file def download_metadata_file(self, outdir, force_rerun=False): """Download and load the UniProt XML file""" uniprot_xml_file = download_uniprot_file(uniprot_id=self.id, outdir=outdir, filetype='xml', force_rerun=force_rerun) self.metadata_path = uniprot_xml_file def ranking_score(self): """Provide a score for this UniProt ID based on reviewed (True=1, False=0) + number of PDBs Returns: int: Scoring for this ID """ return self.reviewed + self.num_pdbs def __json_encode__(self): # TODO: investigate why saving with # does not work! # Recon3D problem genes: testers = ['1588.2', '6819.1', '27233.1', '2264.1'] to_return = {} for x in self.__dict__.keys(): if x == 'description': sanitized = ssbio.utils.force_string(getattr(self, x)).replace('#', '-') to_return.update({x: getattr(self, x)}) else: to_return.update({x: getattr(self, x)}) return to_return def parse_uniprot_xml_metadata(sr): """Load relevant attributes and dbxrefs from a parsed UniProt XML file in a SeqRecord. Returns: dict: All parsed information """ # TODO: What about "reviewed" status? and EC number xref_dbs_to_keep = ['GO', 'KEGG', 'PDB', 'PROSITE', 'Pfam', 'RefSeq'] infodict = {} infodict['alt_uniprots'] = list(set(sr.annotations['accessions']).difference([sr.id])) infodict['gene_name'] = None if 'gene_name_primary' in sr.annotations: infodict['gene_name'] = sr.annotations['gene_name_primary'] infodict['description'] = sr.description infodict['taxonomy'] = None if 'organism' in sr.annotations: infodict['taxonomy'] = sr.annotations['organism'] infodict['seq_version'] = sr.annotations['sequence_version'] infodict['seq_date'] = sr.annotations['sequence_modified'] infodict['entry_version'] = sr.annotations['version'] infodict['entry_date'] = sr.annotations['modified'] tmp = defaultdict(list) for xref in sr.dbxrefs: database = xref.split(':', 1)[0] xrefs = xref.split(':', 1)[-1] if database in xref_dbs_to_keep: if database == 'PDB': tmp['pdbs'].append(xrefs) else: tmp[database.lower()].append(xrefs) infodict.update(tmp) return infodict def is_valid_uniprot_id(instring): """Check if a string is a valid UniProt ID. See regex from: http://www.uniprot.org/help/accession_numbers Args: instring: any string identifier Returns: True if the string is a valid UniProt ID """ valid_id = re.compile("[OPQ][0-9][A-Z0-9]{3}[0-9]|[A-NR-Z][0-9]([A-Z][A-Z0-9]{2}[0-9]){1,2}") if valid_id.match(str(instring)): return True else: return False # TODO: method to blast UniProt to find a 100% sequence match def blast_uniprot(seq_str, seq_ident=1, evalue=0.0001, reviewed_only=True, organism=None): """BLAST the UniProt db to find what IDs match the sequence input Args: seq_str: Sequence string seq_ident: Percent identity to match evalue: E-value of BLAST hit Returns: """ pass def get_fasta(uniprot_id): """Get the protein sequence for a UniProt ID as a string. Args: uniprot_id: Valid UniProt ID Returns: str: String of the protein (amino acid) sequence """ # Silencing the "Will be moved to Biokit" message with ssbio.utils.suppress_stdout(): return bsup.get_fasta_sequence(uniprot_id) def uniprot_reviewed_checker(uniprot_id): """Check if a single UniProt ID is reviewed or not. Args: uniprot_id: Returns: bool: If the entry is reviewed """ query_string = 'id:' + uniprot_id uni_rev_raw = StringIO(bsup.search(query_string, columns='id,reviewed', frmt='tab')) uni_rev_df = pd.read_table(uni_rev_raw, sep='\t', index_col=0) uni_rev_df = uni_rev_df.fillna(False) uni_rev_df = uni_rev_df[pd.notnull(uni_rev_df.Status)] uni_rev_df = uni_rev_df.replace(to_replace="reviewed", value=True) uni_rev_df = uni_rev_df.replace(to_replace="unreviewed", value=False) uni_rev_dict_adder = uni_rev_df.to_dict()['Status'] return uni_rev_dict_adder[uniprot_id] def uniprot_reviewed_checker_batch(uniprot_ids): """Batch check if uniprot IDs are reviewed or not Args: uniprot_ids: UniProt ID or list of UniProt IDs Returns: A dictionary of {UniProtID: Boolean} """ uniprot_ids = ssbio.utils.force_list(uniprot_ids) invalid_ids = [i for i in uniprot_ids if not is_valid_uniprot_id(i)] uniprot_ids = [i for i in uniprot_ids if is_valid_uniprot_id(i)] if invalid_ids: warnings.warn("Invalid UniProt IDs {} will be ignored".format(invalid_ids)) # splitting query up into managable sizes (200 IDs each) Nmax = 200 N, rest = divmod(len(uniprot_ids), Nmax) uni_rev_dict = {} if rest > 0: N += 1 for i in range(0, N): i1 = i * Nmax i2 = (i + 1) * Nmax if i2 > len(uniprot_ids): i2 = len(uniprot_ids) query = uniprot_ids[i1:i2] query_string = '' for x in query: query_string += 'id:' + x + '+OR+' query_string = query_string.strip('+OR+') uni_rev_raw = StringIO(bsup.search(query_string, columns='id,reviewed', frmt='tab')) uni_rev_df = pd.read_table(uni_rev_raw, sep='\t', index_col=0) uni_rev_df = uni_rev_df.fillna(False) # no_metadata = uni_rev_df[pd.isnull(uni_rev_df.Status)].index.tolist() # if no_metadata: # warnings.warn("Unable to retrieve metadata for {}.".format(no_metadata)) uni_rev_df = uni_rev_df[pd.notnull(uni_rev_df.Status)] uni_rev_df = uni_rev_df.replace(to_replace="reviewed", value=True) uni_rev_df = uni_rev_df.replace(to_replace="unreviewed", value=False) uni_rev_dict_adder = uni_rev_df.to_dict()['Status'] uni_rev_dict.update(uni_rev_dict_adder) return uni_rev_dict def uniprot_ec(uniprot_id): """Retrieve the EC number annotation for a UniProt ID. Args: uniprot_id: Valid UniProt ID Returns: """ r = requests.post('http://www.uniprot.org/uniprot/?query=%s&columns=ec&format=tab' % uniprot_id) ec = r.content.decode('utf-8').splitlines()[1] if len(ec) == 0: ec = None return ec def uniprot_sites(uniprot_id): """Retrieve a list of UniProt sites parsed from the feature file Sites are defined here: http://www.uniprot.org/help/site and here: http://www.uniprot.org/help/function_section Args: uniprot_id: Valid UniProt ID Returns: """ r = requests.post('http://www.uniprot.org/uniprot/%s.gff' % uniprot_id) gff = StringIO(r.content.decode('utf-8')) feats = list(GFF.parse(gff)) if len(feats) > 1: log.warning('Too many sequences in GFF') else: return feats[0].features # try: # gff_df = pd.read_table(gff, sep='\t', skiprows=2, header=None) # except ValueError as e: # log.error('Error retrieving feature table') # print(e) # return pd.DataFrame() # # gff_df.drop([0, 1, 5, 6, 7, 9], axis=1, inplace=True) # gff_df.columns = ['type', 'seq_start', 'seq_end', 'notes'] # # return gff_df def download_uniprot_file(uniprot_id, filetype, outdir='', force_rerun=False): """Download a UniProt file for a UniProt ID/ACC Args: uniprot_id: Valid UniProt ID filetype: txt, fasta, xml, rdf, or gff outdir: Directory to download the file Returns: str: Absolute path to file """ my_file = '{}.{}'.format(uniprot_id, filetype) url = 'http://www.uniprot.org/uniprot/{}'.format(my_file) outfile = op.join(outdir, my_file) if ssbio.utils.force_rerun(flag=force_rerun, outfile=outfile): urlretrieve(url, outfile) return outfile def parse_uniprot_txt_file(infile): """Parse a raw UniProt metadata file and return a dictionary. Args: infile: Path to metadata file Returns: dict: Metadata dictionary """ uniprot_metadata_dict = {} metadata = old_parse_uniprot_txt_file(infile) metadata_keys = list(metadata.keys()) if metadata_keys: metadata_key = metadata_keys[0] else: return uniprot_metadata_dict uniprot_metadata_dict['seq_len'] = len(str(metadata[metadata_key]['sequence'])) uniprot_metadata_dict['reviewed'] = metadata[metadata_key]['is_reviewed'] uniprot_metadata_dict['seq_version'] = metadata[metadata_key]['sequence_version'] uniprot_metadata_dict['entry_version'] = metadata[metadata_key]['entry_version'] if 'gene' in metadata[metadata_key]: uniprot_metadata_dict['gene_name'] = metadata[metadata_key]['gene'] if 'description' in metadata[metadata_key]: uniprot_metadata_dict['description'] = metadata[metadata_key]['description'] if 'refseq' in metadata[metadata_key]: uniprot_metadata_dict['refseq'] = metadata[metadata_key]['refseq'] if 'kegg' in metadata[metadata_key]: uniprot_metadata_dict['kegg'] = metadata[metadata_key]['kegg'] if 'ec' in metadata[metadata_key]: uniprot_metadata_dict['ec_number'] = metadata[metadata_key]['ec'] if 'pfam' in metadata[metadata_key]: uniprot_metadata_dict['pfam'] = metadata[metadata_key]['pfam'] if 'pdbs' in metadata[metadata_key]: uniprot_metadata_dict['pdbs'] = list(set(metadata[metadata_key]['pdbs'])) return uniprot_metadata_dict def old_parse_uniprot_txt_file(infile): """From: boscoh/uniprot github Parses the text of metadata retrieved from uniprot.org. Only a few fields have been parsed, but this provides a template for the other fields. A single description is generated from joining alternative descriptions. Returns a dictionary with the main UNIPROT ACC as keys. """ with open(infile, 'r') as txt: cache_txt = txt.read() tag = None uniprot_id = None metadata_by_seqid = {} for l in cache_txt.splitlines(): test_tag = l[:5].strip() if test_tag and test_tag != tag: tag = test_tag line = l[5:].strip() words = line.split() if tag == "ID": uniprot_id = words[0] is_reviewed = words[1].startswith('Reviewed') length = int(words[2]) metadata_by_seqid[uniprot_id] = { 'id': uniprot_id, 'is_reviewed': is_reviewed, 'length': length, 'sequence': '', 'accs': [], } entry = metadata_by_seqid[uniprot_id] if tag == "DT": # DT 01-OCT-1996, integrated into UniProtKB/Swiss-Prot. # DT 17-OCT-2006, sequence version 3. # DT 22-JUL-2015, entry version 166. comma_split = line.split(',') if 'sequence version' in comma_split[1]: # print 'sequence_version', comma_split[0], # dateparse(comma_split[0]).date() entry['sequence_version'] = str( dateparse(comma_split[0]).date()) elif 'entry version' in comma_split[1]: # print 'entry_version', comma_split[0], # dateparse(comma_split[0]).date() entry['entry_version'] = str( dateparse(comma_split[0]).date()) if tag == "SQ": if words[0] != "SEQUENCE": entry['sequence'] += ''.join(words) if tag == "AC": accs = [w.replace(";", "") for w in words] entry['accs'].extend(accs) if tag == "DR": if len(words) > 0: if 'PDB' in words[0]: if 'pdb' not in entry: entry['pdb'] = words[1][:-1] if 'pdbs' not in entry: entry['pdbs'] = [] entry['pdbs'].append(words[1][:-1]) if 'RefSeq' in words[0]: if 'refseq' not in entry: entry['refseq'] = [] ids = [w[:-1] for w in words[1:]] entry['refseq'].extend(ids) if 'KEGG' in words[0]: if 'kegg' not in entry: entry['kegg'] = [] ids = [w[:-1] for w in words[1:]] ids = filter(lambda w: len(w) > 1, ids) entry['kegg'].extend(ids) if 'GO' in words[0]: if 'go' not in entry: entry['go'] = [] entry['go'].append(' '.join(words[1:])) if 'Pfam' in words[0]: if 'pfam' not in entry: entry['pfam'] = [] entry['pfam'].append(words[1][:-1]) if tag == "GN": if 'gene' not in entry and len(words) > 0: pieces = words[0].split("=") if len(pieces) > 1 and 'name' in pieces[0].lower(): entry['gene'] = pieces[1].replace( ';', '').replace(',', '') if tag == "OS": # OS Homo sapiens (Human). if 'organism' not in entry: entry['organism'] = "" entry['organism'] += line if tag == "DE": if 'descriptions' not in entry: entry['descriptions'] = [] entry['descriptions'].append(line) if tag == "CC": if 'comment' not in entry: entry['comment'] = '' entry['comment'] += line + '\n' for entry in metadata_by_seqid.values(): descriptions = entry['descriptions'] for i in reversed(range(len(descriptions))): description = descriptions[i] if 'Short' in description or 'Full' in description or 'EC=' in description: if 'Short' in description or 'Full' in description: j = description.find('=') descriptions[i] = description[j + 1:].replace(';', '') if 'description' not in entry: entry['description'] = [] entry['description'].append(descriptions[i]) if 'EC=' in description: j = description.find('=') descriptions[i] = description[j + 1:].replace(';', '') if '{' in descriptions[i]: descriptions[i] = descriptions[i].split(' {')[0] if 'ec' not in entry: entry['ec'] = [] entry['ec'].append(descriptions[i]) else: del descriptions[i] return metadata_by_seqid
mit
karuppayya/zeppelin
python/src/main/resources/python/backend_zinline.py
61
11831
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # This file provides a static (non-interactive) matplotlib plotting backend # for zeppelin notebooks for use with the python/pyspark interpreters from __future__ import print_function import sys import uuid import warnings import base64 from io import BytesIO try: from StringIO import StringIO except ImportError: from io import StringIO import mpl_config import matplotlib from matplotlib._pylab_helpers import Gcf from matplotlib.backends.backend_agg import new_figure_manager, FigureCanvasAgg from matplotlib.backend_bases import ShowBase, FigureManagerBase from matplotlib.figure import Figure ######################################################################## # # The following functions and classes are for pylab and implement # window/figure managers, etc... # ######################################################################## class Show(ShowBase): """ A callable object that displays the figures to the screen. Valid kwargs include figure width and height (in units supported by the div tag), block (allows users to override blocking behavior regardless of whether or not interactive mode is enabled, currently unused) and close (Implicitly call matplotlib.pyplot.close('all') with each call to show()). """ def __call__(self, close=None, block=None, **kwargs): if close is None: close = mpl_config.get('close') try: managers = Gcf.get_all_fig_managers() if not managers: return # Tell zeppelin that the output will be html using the %html magic # We want to do this only once to avoid seeing "%html" printed # directly to the outout when multiple figures are displayed from # one paragraph. if mpl_config.get('angular'): print('%angular') else: print('%html') # Show all open figures for manager in managers: manager.show(**kwargs) finally: # This closes all the figures if close is set to True. if close and Gcf.get_all_fig_managers(): Gcf.destroy_all() class FigureCanvasZInline(FigureCanvasAgg): """ The canvas the figure renders into. Calls the draw and print fig methods, creates the renderers, etc... """ def get_bytes(self, **kwargs): """ Get the byte representation of the figure. Should only be used with jpg/png formats. """ # Make sure format is correct fmt = kwargs.get('format', mpl_config.get('format')) if fmt == 'svg': raise ValueError("get_bytes() does not support svg, use png or jpg") # Express the image as bytes buf = BytesIO() self.print_figure(buf, **kwargs) fmt = fmt.encode() if sys.version_info >= (3, 4) and sys.version_info < (3, 5): byte_str = bytes("data:image/%s;base64," %fmt, "utf-8") else: byte_str = b"data:image/%s;base64," %fmt byte_str += base64.b64encode(buf.getvalue()) # Python3 forces all strings to default to unicode, but for raster image # formats (eg png, jpg), we want to work with bytes. Thus this step is # needed to ensure compatability for all python versions. byte_str = byte_str.decode('ascii') buf.close() return byte_str def get_svg(self, **kwargs): """ Get the svg representation of the figure. Should only be used with svg format. """ # Make sure format is correct fmt = kwargs.get('format', mpl_config.get('format')) if fmt != 'svg': raise ValueError("get_svg() does not support png or jpg, use svg") # For SVG the data string has to be unicode, not bytes buf = StringIO() self.print_figure(buf, **kwargs) svg_str = buf.getvalue() buf.close() return svg_str def draw_idle(self, *args, **kwargs): """ Called when the figure gets updated (eg through a plotting command). This is overriden to allow open figures to be reshown after they are updated when mpl_config.get('close') is False. """ if not self._is_idle_drawing: with self._idle_draw_cntx(): self.draw(*args, **kwargs) draw_if_interactive() class FigureManagerZInline(FigureManagerBase): """ Wrap everything up into a window for the pylab interface """ def __init__(self, canvas, num): FigureManagerBase.__init__(self, canvas, num) self.fig_id = "figure_{0}".format(uuid.uuid4().hex) self._shown = False def angular_bind(self, **kwargs): """ Bind figure data to Zeppelin's Angular Object Registry. If mpl_config("angular") is True and PY4J is supported, this allows for the possibility to interactively update a figure from a separate paragraph without having to display it multiple times. """ # This doesn't work for SVG so make sure it's not our format fmt = kwargs.get('format', mpl_config.get('format')) if fmt == 'svg': return # Get the figure data as a byte array src = self.canvas.get_bytes(**kwargs) # Flag to determine whether or not to use # zeppelin's angular display system angular = mpl_config.get('angular') # ZeppelinContext instance (requires PY4J) context = mpl_config.get('context') # Finally we must ensure that automatic closing is set to False, # as otherwise using the angular display system is pointless close = mpl_config.get('close') # If above conditions are met, bind the figure data to # the Angular Object Registry. if not close and angular: if hasattr(context, 'angularBind'): # Binding is performed through figure ID to ensure this works # if multiple figures are open context.angularBind(self.fig_id, src) # Zeppelin will automatically replace this value even if it # is updated from another pargraph thanks to the {{}} notation src = "{{%s}}" %self.fig_id else: warnings.warn("Cannot bind figure to Angular Object Registry. " "Check if PY4J is installed.") return src def angular_unbind(self): """ Unbind figure from angular display system. """ context = mpl_config.get('context') if hasattr(context, 'angularUnbind'): context.angularUnbind(self.fig_id) def destroy(self): """ Called when close=True or implicitly by pyplot.close(). Overriden to automatically clean up the angular object registry. """ self.angular_unbind() def show(self, **kwargs): if not self._shown: zdisplay(self.canvas.figure, **kwargs) else: self.canvas.draw_idle() self.angular_bind(**kwargs) self._shown = True def draw_if_interactive(): """ If interactive mode is on, this allows for updating properties of the figure when each new plotting command is called. """ manager = Gcf.get_active() interactive = matplotlib.is_interactive() angular = mpl_config.get('angular') # Don't bother continuing if we aren't in interactive mode # or if there are no active figures. Also pointless to continue # in angular mode as we don't want to reshow the figure. if not interactive or angular or manager is None: return # Allow for figure to be reshown if close is false since # this function call implies that it has been updated if not mpl_config.get('close'): manager._shown = False def new_figure_manager(num, *args, **kwargs): """ Create a new figure manager instance """ # if a main-level app must be created, this (and # new_figure_manager_given_figure) is the usual place to # do it -- see backend_wx, backend_wxagg and backend_tkagg for # examples. Not all GUIs require explicit instantiation of a # main-level app (egg backend_gtk, backend_gtkagg) for pylab FigureClass = kwargs.pop('FigureClass', Figure) thisFig = FigureClass(*args, **kwargs) return new_figure_manager_given_figure(num, thisFig) def new_figure_manager_given_figure(num, figure): """ Create a new figure manager instance for the given figure. """ canvas = FigureCanvasZInline(figure) manager = FigureManagerZInline(canvas, num) return manager ######################################################################## # # Backend specific functions # ######################################################################## def zdisplay(fig, **kwargs): """ Publishes a matplotlib figure to the notebook paragraph output. """ # kwargs can be width or height (in units supported by div tag) width = kwargs.pop('width', 'auto') height = kwargs.pop('height', 'auto') fmt = kwargs.get('format', mpl_config.get('format')) # Check if format is supported supported_formats = mpl_config.get('supported_formats') if fmt not in supported_formats: raise ValueError("Unsupported format %s" %fmt) # For SVG the data string has to be unicode, not bytes if fmt == 'svg': img = fig.canvas.get_svg(**kwargs) # This is needed to ensure the SVG image is the correct size. # We should find a better way to do this... width = '{}px'.format(mpl_config.get('width')) height = '{}px'.format(mpl_config.get('height')) else: # Express the image as bytes src = fig.canvas.manager.angular_bind(**kwargs) img = "<img src={src} style='width={width};height:{height}'>" img = img.format(src=src, width=width, height=height) # Print the image to the notebook paragraph via the %html magic html = "<div style='width:{width};height:{height}'>{img}<div>" print(html.format(width=width, height=height, img=img)) def displayhook(): """ Called post paragraph execution if interactive mode is on """ if matplotlib.is_interactive(): show() ######################################################################## # # Now just provide the standard names that backend.__init__ is expecting # ######################################################################## # Create a reference to the show function we are using. This is what actually # gets called by matplotlib.pyplot.show(). show = Show() # Default FigureCanvas and FigureManager classes to use from the backend FigureCanvas = FigureCanvasZInline FigureManager = FigureManagerZInline
apache-2.0
Lyleo/nupic
external/linux32/lib/python2.6/site-packages/matplotlib/backends/backend_wxagg.py
70
9051
from __future__ import division """ backend_wxagg.py A wxPython backend for Agg. This uses the GUI widgets written by Jeremy O'Donoghue ([email protected]) and the Agg backend by John Hunter ([email protected]) Copyright (C) 2003-5 Jeremy O'Donoghue, John Hunter, Illinois Institute of Technology License: This work is licensed under the matplotlib license( PSF compatible). A copy should be included with this source code. """ import wx import matplotlib from matplotlib.figure import Figure from backend_agg import FigureCanvasAgg import backend_wx from backend_wx import FigureManager, FigureManagerWx, FigureCanvasWx, \ FigureFrameWx, DEBUG_MSG, NavigationToolbar2Wx, error_msg_wx, \ draw_if_interactive, show, Toolbar, backend_version class FigureFrameWxAgg(FigureFrameWx): def get_canvas(self, fig): return FigureCanvasWxAgg(self, -1, fig) def _get_toolbar(self, statbar): if matplotlib.rcParams['toolbar']=='classic': toolbar = NavigationToolbarWx(self.canvas, True) elif matplotlib.rcParams['toolbar']=='toolbar2': toolbar = NavigationToolbar2WxAgg(self.canvas) toolbar.set_status_bar(statbar) else: toolbar = None return toolbar class FigureCanvasWxAgg(FigureCanvasAgg, FigureCanvasWx): """ The FigureCanvas contains the figure and does event handling. In the wxPython backend, it is derived from wxPanel, and (usually) lives inside a frame instantiated by a FigureManagerWx. The parent window probably implements a wxSizer to control the displayed control size - but we give a hint as to our preferred minimum size. """ def draw(self, drawDC=None): """ Render the figure using agg. """ DEBUG_MSG("draw()", 1, self) FigureCanvasAgg.draw(self) self.bitmap = _convert_agg_to_wx_bitmap(self.get_renderer(), None) self._isDrawn = True self.gui_repaint(drawDC=drawDC) def blit(self, bbox=None): """ Transfer the region of the agg buffer defined by bbox to the display. If bbox is None, the entire buffer is transferred. """ if bbox is None: self.bitmap = _convert_agg_to_wx_bitmap(self.get_renderer(), None) self.gui_repaint() return l, b, w, h = bbox.bounds r = l + w t = b + h x = int(l) y = int(self.bitmap.GetHeight() - t) srcBmp = _convert_agg_to_wx_bitmap(self.get_renderer(), None) srcDC = wx.MemoryDC() srcDC.SelectObject(srcBmp) destDC = wx.MemoryDC() destDC.SelectObject(self.bitmap) destDC.BeginDrawing() destDC.Blit(x, y, int(w), int(h), srcDC, x, y) destDC.EndDrawing() destDC.SelectObject(wx.NullBitmap) srcDC.SelectObject(wx.NullBitmap) self.gui_repaint() filetypes = FigureCanvasAgg.filetypes def print_figure(self, filename, *args, **kwargs): # Use pure Agg renderer to draw FigureCanvasAgg.print_figure(self, filename, *args, **kwargs) # Restore the current view; this is needed because the # artist contains methods rely on particular attributes # of the rendered figure for determining things like # bounding boxes. if self._isDrawn: self.draw() class NavigationToolbar2WxAgg(NavigationToolbar2Wx): def get_canvas(self, frame, fig): return FigureCanvasWxAgg(frame, -1, fig) def new_figure_manager(num, *args, **kwargs): """ Create a new figure manager instance """ # in order to expose the Figure constructor to the pylab # interface we need to create the figure here DEBUG_MSG("new_figure_manager()", 3, None) backend_wx._create_wx_app() FigureClass = kwargs.pop('FigureClass', Figure) fig = FigureClass(*args, **kwargs) frame = FigureFrameWxAgg(num, fig) figmgr = frame.get_figure_manager() if matplotlib.is_interactive(): figmgr.frame.Show() return figmgr # # agg/wxPython image conversion functions (wxPython <= 2.6) # def _py_convert_agg_to_wx_image(agg, bbox): """ Convert the region of the agg buffer bounded by bbox to a wx.Image. If bbox is None, the entire buffer is converted. Note: agg must be a backend_agg.RendererAgg instance. """ image = wx.EmptyImage(int(agg.width), int(agg.height)) image.SetData(agg.tostring_rgb()) if bbox is None: # agg => rgb -> image return image else: # agg => rgb -> image => bitmap => clipped bitmap => image return wx.ImageFromBitmap(_clipped_image_as_bitmap(image, bbox)) def _py_convert_agg_to_wx_bitmap(agg, bbox): """ Convert the region of the agg buffer bounded by bbox to a wx.Bitmap. If bbox is None, the entire buffer is converted. Note: agg must be a backend_agg.RendererAgg instance. """ if bbox is None: # agg => rgb -> image => bitmap return wx.BitmapFromImage(_py_convert_agg_to_wx_image(agg, None)) else: # agg => rgb -> image => bitmap => clipped bitmap return _clipped_image_as_bitmap( _py_convert_agg_to_wx_image(agg, None), bbox) def _clipped_image_as_bitmap(image, bbox): """ Convert the region of a wx.Image bounded by bbox to a wx.Bitmap. """ l, b, width, height = bbox.get_bounds() r = l + width t = b + height srcBmp = wx.BitmapFromImage(image) srcDC = wx.MemoryDC() srcDC.SelectObject(srcBmp) destBmp = wx.EmptyBitmap(int(width), int(height)) destDC = wx.MemoryDC() destDC.SelectObject(destBmp) destDC.BeginDrawing() x = int(l) y = int(image.GetHeight() - t) destDC.Blit(0, 0, int(width), int(height), srcDC, x, y) destDC.EndDrawing() srcDC.SelectObject(wx.NullBitmap) destDC.SelectObject(wx.NullBitmap) return destBmp # # agg/wxPython image conversion functions (wxPython >= 2.8) # def _py_WX28_convert_agg_to_wx_image(agg, bbox): """ Convert the region of the agg buffer bounded by bbox to a wx.Image. If bbox is None, the entire buffer is converted. Note: agg must be a backend_agg.RendererAgg instance. """ if bbox is None: # agg => rgb -> image image = wx.EmptyImage(int(agg.width), int(agg.height)) image.SetData(agg.tostring_rgb()) return image else: # agg => rgba buffer -> bitmap => clipped bitmap => image return wx.ImageFromBitmap(_WX28_clipped_agg_as_bitmap(agg, bbox)) def _py_WX28_convert_agg_to_wx_bitmap(agg, bbox): """ Convert the region of the agg buffer bounded by bbox to a wx.Bitmap. If bbox is None, the entire buffer is converted. Note: agg must be a backend_agg.RendererAgg instance. """ if bbox is None: # agg => rgba buffer -> bitmap return wx.BitmapFromBufferRGBA(int(agg.width), int(agg.height), agg.buffer_rgba(0, 0)) else: # agg => rgba buffer -> bitmap => clipped bitmap return _WX28_clipped_agg_as_bitmap(agg, bbox) def _WX28_clipped_agg_as_bitmap(agg, bbox): """ Convert the region of a the agg buffer bounded by bbox to a wx.Bitmap. Note: agg must be a backend_agg.RendererAgg instance. """ l, b, width, height = bbox.get_bounds() r = l + width t = b + height srcBmp = wx.BitmapFromBufferRGBA(int(agg.width), int(agg.height), agg.buffer_rgba(0, 0)) srcDC = wx.MemoryDC() srcDC.SelectObject(srcBmp) destBmp = wx.EmptyBitmap(int(width), int(height)) destDC = wx.MemoryDC() destDC.SelectObject(destBmp) destDC.BeginDrawing() x = int(l) y = int(int(agg.height) - t) destDC.Blit(0, 0, int(width), int(height), srcDC, x, y) destDC.EndDrawing() srcDC.SelectObject(wx.NullBitmap) destDC.SelectObject(wx.NullBitmap) return destBmp def _use_accelerator(state): """ Enable or disable the WXAgg accelerator, if it is present and is also compatible with whatever version of wxPython is in use. """ global _convert_agg_to_wx_image global _convert_agg_to_wx_bitmap if getattr(wx, '__version__', '0.0')[0:3] < '2.8': # wxPython < 2.8, so use the C++ accelerator or the Python routines if state and _wxagg is not None: _convert_agg_to_wx_image = _wxagg.convert_agg_to_wx_image _convert_agg_to_wx_bitmap = _wxagg.convert_agg_to_wx_bitmap else: _convert_agg_to_wx_image = _py_convert_agg_to_wx_image _convert_agg_to_wx_bitmap = _py_convert_agg_to_wx_bitmap else: # wxPython >= 2.8, so use the accelerated Python routines _convert_agg_to_wx_image = _py_WX28_convert_agg_to_wx_image _convert_agg_to_wx_bitmap = _py_WX28_convert_agg_to_wx_bitmap # try to load the WXAgg accelerator try: import _wxagg except ImportError: _wxagg = None # if it's present, use it _use_accelerator(True)
gpl-3.0
leejz/meta-omics-scripts
generate_kgml_graphics.py
1
13581
#!/usr/bin/env python """ -------------------------------------------------------------------------------- Created: Jackson Lee 9/29/14 This script reads in a tab delimited combined coverage file from consolidate_coverage.py of phylogeny, protein classifications, and annotations uses the biopython KGML libraries to generate graphics of KEGG pathways. Input file: Phylogeny Organism Protein Classification RPKM1 RPKM2 ... 2 H. Monster Function|K00003 4211.629513 ... 2 H. Monster Function|K00012 2752.574388 3 ... ... Output A series of mapping files for each bin over each time point -------------------------------------------------------------------------------- usage: generate_kgml_graphics.py -i in.file -d out.directory """ #------------------------------------------------------------------------------- # # #Code from: http://armchairbiology.blogspot.co.uk/2013/02/keggwatch-part-iii.html #Generating KEGG maps example 2 #------------------------------------------------------------------------------- #Header - Linkers, Libs, Constants from string import strip from argparse import ArgumentParser, RawDescriptionHelpFormatter import pandas as pd import os import bisect from numpy import log10, arange import KGML_parser from KGML_scrape import retrieve_KEGG_pathway from KGML_vis import KGMLCanvas from Bio.Graphics import ColorSpiral # List of 2010 IDs for metabolic pathways metabolic = ["ko00010", "ko00020", "ko00030", "ko00040", "ko00051", "ko00052", "ko00053", "ko00061", "ko00062", "ko00071", "ko00072", "ko00100", "ko00120", "ko00121", "ko00130", "ko00140", "ko00190", "ko00195", "ko00196", "ko00230", "ko00231", "ko00232", "ko00240", "ko00250", "ko00253", "ko00260", "ko00270", "ko00280", "ko00281", "ko00290", "ko00300", "ko00310", "ko00311", "ko00312", "ko00330", "ko00331", "ko00340", "ko00350", "ko00351", "ko00360", "ko00361", "ko00362", "ko00363", "ko00364", "ko00380", "ko00400", "ko00401", "ko00402", "ko00410", "ko00430", "ko00440", "ko00450", "ko00460", "ko00471", "ko00472", "ko00473", "ko00480", "ko00500", "ko00510", "ko00511", "ko00512", "ko00513", "ko00514", "ko00520", "ko00521", "ko00522", "ko00523", "ko00524", "ko00531", "ko00532", "ko00533", "ko00534", "ko00540", "ko00550", "ko00561", "ko00562", "ko00563", "ko00564", "ko00565", "ko00590", "ko00591", "ko00592", "ko00600", "ko00601", "ko00603", "ko00604", "ko00620", "ko00621", "ko00622", "ko00623", "ko00624", "ko00625", "ko00626", "ko00627", "ko00630", "ko00633", "ko00640", "ko00642", "ko00643", "ko00650", "ko00660", "ko00670", "ko00680", "ko00710", "ko00720", "ko00730", "ko00740", "ko00750", "ko00760", "ko00770", "ko00780", "ko00785", "ko00790", "ko00791", "ko00830", "ko00860", "ko00900", "ko00901", "ko00902", "ko00903", "ko00904", "ko00905", "ko00906", "ko00908", "ko00909", "ko00910", "ko00920", "ko00930", "ko00940", "ko00941", "ko00942", "ko00943", "ko00944", "ko00945", "ko00950", "ko00960", "ko00965", "ko00966", "ko00970", "ko00980", "ko00981", "ko00982", "ko00983", "ko01040", "ko01051", "ko01053", "ko01055", "ko01056", "ko01057", "ko01058", "ko01100", "ko01110", "ko01120", "ko04070"] # List of 2010 IDs for non-metabolic pathways non_metabolic = ["ko02010", "ko02020", "ko02030", "ko02040", "ko02060", "ko03008", "ko03010", "ko03013", "ko03015", "ko03018", "ko03020", "ko03022", "ko03030", "ko03040", "ko03050", "ko03060", "ko03070", "ko03320", "ko03410", "ko03420", "ko03430", "ko03440", "ko03450", "ko04010", "ko04011", "ko04012", "ko04013", "ko04020", "ko04060", "ko04062", "ko04070", "ko04075", "ko04080", "ko04110", "ko04111", "ko04112", "ko04113", "ko04114", "ko04115", "ko04120", "ko04122", "ko04130", "ko04140", "ko04141", "ko04142", "ko04144", "ko04145", "ko04146", "ko04150", "ko04210", "ko04260", "ko04270", "ko04310", "ko04320", "ko04330", "ko04340", "ko04350", "ko04360", "ko04370", "ko04380", "ko04510", "ko04512", "ko04514", "ko04520", "ko04530", "ko04540", "ko04610", "ko04612", "ko04614", "ko04620", "ko04621", "ko04622", "ko04623", "ko04626", "ko04630", "ko04640", "ko04650", "ko04660", "ko04662", "ko04664", "ko04666", "ko04670", "ko04672", "ko04710", "ko04711", "ko04712", "ko04720", "ko04722", "ko04730", "ko04740", "ko04742", "ko04744", "ko04745", "ko04810", "ko04910", "ko04912", "ko04914", "ko04916", "ko04920", "ko04930", "ko04940", "ko04950", "ko04960", "ko04961", "ko04962", "ko04964", "ko04966", "ko04970", "ko04971", "ko04972", "ko04973", "ko04974", "ko04975", "ko04976", "ko04977", "ko04978", "ko05010", "ko05012", "ko05014", "ko05016", "ko05020", "ko05100", "ko05110", "ko05111", "ko05120", "ko05130", "ko05131", "ko05140", "ko05142", "ko05143", "ko05144", "ko05145", "ko05146", "ko05150", "ko05152", "ko05160", "ko05162", "ko05200", "ko05210", "ko05211", "ko05212", "ko05213", "ko05214", "ko05215", "ko05216", "ko05217", "ko05218", "ko05219", "ko05220", "ko05221", "ko05222", "ko05223", "ko05310", "ko05320", "ko05322", "ko05323", "ko05330", "ko05332", "ko05340", "ko05410", "ko05412", "ko05414", "ko05416"] #all_kegg = metabolic + non_metabolic #essential all_kegg = ["ko00010", "ko00020", "ko00030", "ko00040", "ko00051", "ko00052", "ko00053", "ko00061", "ko00071", "ko00190", "ko00195", "ko00196", "ko00230", "ko00240", "ko00250", "ko00260", "ko00270", "ko00500", "ko00510", "ko00520", "ko00562", "ko00620", "ko00625", "ko00630", "ko00640", "ko00650", "ko00660", "ko00680", "ko00710", "ko00720", "ko00910", "ko00920", "ko01100", "ko01110", "ko01120", "ko02010", "ko02020", "ko02060", "ko03070", "ko04710"] #for bin 27 #all_kegg = ["ko00010", "ko00020", "ko00030", "ko00190", "ko00195", "ko00620", "ko00630", "ko00640", "ko00650", "ko00660", "ko00680", "ko00720", "ko00910", "ko00920", "ko01100", "ko01110", "ko01120", "ko02010", "ko02020", "ko03070", "ko04122"] #bare set #all_kegg = ["ko00010", "ko00020", "ko01100", "ko01110", "ko01120"] #------------------------------------------------------------------------------- #function declarations #------------------------------------------------------------------------------- #Body print "Running..." if __name__ == '__main__': parser = ArgumentParser(usage = "generate_kgml_graphics.py -i in.file -d \ out.directory", description=__doc__, formatter_class=RawDescriptionHelpFormatter) parser.add_argument("-i", "--input_file", action="store", dest="inputfilename", help="text input file") parser.add_argument("-d", "--output_directory", action="store", dest="outputdirectory", help="text output file") parser.add_argument("-K", "--KEGG_directory", action="store", dest="KEGGdirectory", help="path to KEGG kgml files") options = parser.parse_args() mandatories = ["inputfilename","outputdirectory", "KEGGdirectory"] for m in mandatories: if not options.__dict__[m]: print "\nError: Missing Arguments\n" parser.print_help() exit(-1) outputdirectory = options.outputdirectory inputfilename = options.inputfilename keggdir = options.KEGGdirectory if not os.path.exists(outputdirectory): os.makedirs(outputdirectory) else: print "\nError: Directory exists!\n" parser.print_help() exit(-1) print "Reading in datafile..." with open(inputfilename,'U') as infile: combined = pd.read_csv(infile, header=0, sep='\t') infile.close() combined.columns = ["Phylogeny", "Organism", "Protein Classification"] + combined.columns.tolist()[3:] combined["Protein Classification"] = combined["Protein Classification"].str.replace('^.*\|', '') rpkm_columns = combined.columns[3:] log10_columns = [column_name + '_log10' for column_name in rpkm_columns] combined[log10_columns] = combined[rpkm_columns].applymap(lambda x: log10(float(x)) if float(x) > 0 else 0) bin_list = list(combined.Phylogeny.unique()) bin_list.sort() #cs = ColorSpiral(a=2, b=0.2, v_init=0.85, v_final=0.5, jitter=0.03) print "Generating graphics..." for bin in bin_list: working_df = combined[combined.Phylogeny == bin] os.makedirs(outputdirectory + '/' + str(bin)) #.reindex(index='Protein Classification') for timepoint, label in zip(log10_columns,rpkm_columns): # find rpkm ranges and set color palette min_rpkm = working_df[working_df[timepoint] != 0][timepoint].min() max_rpkm = working_df[working_df[timepoint] != 0][timepoint].max() cutoff_rpkm = working_df[working_df[timepoint] != 0][timepoint].median() color_range = arange(min_rpkm, max_rpkm, (max_rpkm-min_rpkm)/100) color_dict = ColorSpiral.get_color_dict(color_range, a=6, b=0.7, v_init=0.7, v_final=0.55, jitter=0.00) print 'Generating ' + outputdirectory + '/' + str(bin) + '/' + str(bin) + '.' + label for map in all_kegg: outfilename = outputdirectory + '/' + str(bin) + '/' + str(bin) + '.' + label + '.' + map + '.pdf' #print 'Opening ' + keggdir + '/' + map + '.kgml' pathway = KGML_parser.read(open(keggdir + '/' + map + '.kgml', 'U')) kgml_map = KGMLCanvas(pathway, show_maps=False) kgml_map.fontsize = 9 special_maps = ['ko01100','ko01110','ko01120'] if pathway.name.split('path:')[1] in special_maps: entries = [e for e in pathway.orthologs] for entry in entries: ename = entry.name.split('ko:')[1:] ename = [i[:6].lower() for i in ename] erpkm = working_df.loc[working_df["Protein Classification"].isin(ename),label].sum() if erpkm >= 0: erpkm = log10(erpkm) if erpkm < min_rpkm: #print oname for g in entry.graphics: g.fgcolor = '#CCCCCC' g.width = .4 else: for g in entry.graphics: g.width = 2 if erpkm > cutoff_rpkm: for g in entry.graphics: g.width = 10 kgml_map.show_colorbar_legend = False kgml_map.import_imagemap = False kgml_map.show_maps = True else: kgml_map.set_colorbar_legend(minmax=['1e%.2f' % min_rpkm,'1e%.2f' % max_rpkm], wh_dims = [60.0, 5.0], xypos= [35.0, 5.0], color_dict=color_dict) orthologs = [e for e in pathway.orthologs] for ortholog in orthologs: oname = ortholog.name.split('ko:')[1:] oname = [i[:6].lower() for i in oname] orpkm = working_df.loc[working_df["Protein Classification"].isin(oname),label].sum() if orpkm != 0: orpkm = log10(orpkm) if orpkm > max_rpkm: orpkm = max_rpkm if orpkm <= 0: orpkm = min_rpkm if bisect.bisect_left(color_range, orpkm) > len(color_range)-1: ocolor = color_dict[color_range[-1]] else: ocolor = color_dict[color_range[bisect.bisect_left(color_range, orpkm)]] for element in ortholog.graphics: element.bgcolor = ocolor # set figure display attributes kgml_map.import_imagemap = True #kgml_map.show_maps = True kgml_map.show_maps = False #kgml_map.show_orthologs = False kgml_map.draw_relations = False kgml_map.show_compounds = False #kgml_map.show_genes = False # And rendering elements as an overlay #kgml_map.show_compounds = True kgml_map.show_genes = True kgml_map.show_orthologs = True # Default settings are for the KGML elements only kgml_map.draw(outfilename) print "Done!"
mit
eteq/bokeh
bokeh/charts/builder/tests/test_area_builder.py
33
3666
""" This is the Bokeh charts testing interface. """ #----------------------------------------------------------------------------- # Copyright (c) 2012 - 2014, Continuum Analytics, Inc. All rights reserved. # # Powered by the Bokeh Development Team. # # The full license is in the file LICENSE.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- from __future__ import absolute_import from collections import OrderedDict import unittest import numpy as np from numpy.testing import assert_array_equal, assert_array_almost_equal import pandas as pd from bokeh.charts import Area from bokeh.models import DataRange1d, Range1d from bokeh.charts.builder.tests._utils import create_chart #----------------------------------------------------------------------------- # Classes and functions #----------------------------------------------------------------------------- class TestAreaBuilder(unittest.TestCase): def test_supported_input(self): xyvalues = OrderedDict( python=[2, 3, 7, 5, 26], pypy=[12, 33, 47, 15, 126], jython=[22, 43, 10, 25, 26], ) # prepare some data to check tests results... zeros = np.zeros(5) x = np.array([4,3,2,1,0,0,1,2,3,4]) y_jython = np.hstack((zeros, np.array(xyvalues['jython']))) y_pypy = np.hstack((zeros, np.array(xyvalues['pypy']))) y_python = np.hstack((zeros, np.array(xyvalues['python']))) data_keys = ['x', 'y_jython', 'y_pypy', 'y_python'] for _xy in [xyvalues, dict(xyvalues), pd.DataFrame(xyvalues)]: area = create_chart(Area, _xy) builder = area._builders[0] self.assertEqual(sorted(builder._groups), sorted(list(xyvalues.keys()))) self.assertListEqual(sorted(builder._data.keys()), data_keys) assert_array_equal(builder._data['x'], x) assert_array_equal(builder._data['y_jython'], y_jython) assert_array_equal(builder._data['y_pypy'], y_pypy) assert_array_equal(builder._data['y_python'], y_python) self.assertIsInstance(area.x_range, DataRange1d) self.assertIsInstance(area.y_range, Range1d) assert_array_almost_equal(area.y_range.start, -12.6, decimal=4) assert_array_almost_equal(area.y_range.end, 138.6, decimal=4) self.assertEqual(builder._source._data, builder._data) data_keys = ['x', 'y_0', 'y_1', 'y_2'] lvalues = [[2, 3, 7, 5, 26], [12, 33, 47, 15, 126], [22, 43, 10, 25, 26]] y_0, y_1, y_2 = y_python, y_pypy, y_jython for _xy in [lvalues, np.array(lvalues)]: area = create_chart(Area, _xy) builder = area._builders[0] self.assertEqual(builder._groups, ['0', '1', '2']) self.assertListEqual(sorted(builder._data.keys()), data_keys) assert_array_equal(builder._data['x'], x) assert_array_equal(builder._data['y_0'], y_0) assert_array_equal(builder._data['y_1'], y_1) assert_array_equal(builder._data['y_2'], y_2) self.assertIsInstance(area.x_range, DataRange1d) self.assertIsInstance(area.y_range, Range1d) assert_array_almost_equal(area.y_range.start, -12.6, decimal=4) assert_array_almost_equal(area.y_range.end, 138.6, decimal=4) self.assertEqual(builder._source._data, builder._data)
bsd-3-clause
huzq/scikit-learn
examples/manifold/plot_swissroll.py
20
1277
""" =================================== Swiss Roll reduction with LLE =================================== An illustration of Swiss Roll reduction with locally linear embedding """ # Author: Fabian Pedregosa -- <[email protected]> # License: BSD 3 clause (C) INRIA 2011 print(__doc__) import matplotlib.pyplot as plt # This import is needed to modify the way figure behaves from mpl_toolkits.mplot3d import Axes3D Axes3D #---------------------------------------------------------------------- # Locally linear embedding of the swiss roll from sklearn import manifold, datasets X, color = datasets.make_swiss_roll(n_samples=1500) print("Computing LLE embedding") X_r, err = manifold.locally_linear_embedding(X, n_neighbors=12, n_components=2) print("Done. Reconstruction error: %g" % err) #---------------------------------------------------------------------- # Plot result fig = plt.figure() ax = fig.add_subplot(211, projection='3d') ax.scatter(X[:, 0], X[:, 1], X[:, 2], c=color, cmap=plt.cm.Spectral) ax.set_title("Original data") ax = fig.add_subplot(212) ax.scatter(X_r[:, 0], X_r[:, 1], c=color, cmap=plt.cm.Spectral) plt.axis('tight') plt.xticks([]), plt.yticks([]) plt.title('Projected data') plt.show()
bsd-3-clause
Caseyftw/astronet
astronet/visualization/base.py
1
19717
''' Created on 26.07.2016 @author: Fabian Gieseke, Cas van den Bogaard ''' #import matplotlib #matplotlib.use('Agg') import os import numpy as np import theano import theano.tensor as T from lasagne.layers import get_output from itertools import product from mpl_toolkits.mplot3d.axes3d import Axes3D from mpl_toolkits.axes_grid1 import make_axes_locatable import matplotlib.pyplot as plt from matplotlib import cm from sklearn_evaluation import plot as sklearnplot from sklearn.metrics import matthews_corrcoef, accuracy_score, precision_score, recall_score, roc_curve import matplotlib.ticker as ticker from nolearn.lasagne.visualize import occlusion_heatmap, plot_conv_activity from sklearn import metrics import seaborn import pandas from .io import store_results, ensure_dir import warnings warnings.filterwarnings("ignore", category=UserWarning, module="matplotlib") def _colorbar_fmt(x, pos): fm = '% *d' % (5, x) return fm # TO DO: # Allow both RGB images and a subplot for each layer. def plot_image(img, ofname, titles=None, figsize=(10,5), mode="2d"): """ Plots and saves a given image, one subplot for each dimension. Parameters ---------- img : array-like The image that is to be plotted. ofname : string Filename for the output image. titles : list, default None A list of strings that are used as title for each of the plotted dimensions. figsize: tuple, default (10,5) Size of the output figure. mode : string Mode of plotting. Either '2d' or '3d'. """ assert mode in ["2d", "3d"] fig = plt.figure(tight_layout=True, figsize=figsize) for i in range(img.shape[0]): if mode == "3d": ax = fig.add_subplot(1, img.shape[0], i + 1, projection='3d') X, Y = np.meshgrid(np.arange(img.shape[1]), np.arange(img.shape[2])) surf = ax.plot_surface(X, Y, img[i, :, :], rstride=1, cstride=1, cmap=cm.coolwarm, linewidth=0, antialiased=False) ax.view_init(15, 15) if titles is not None: ax.set_title(titles[i]) elif mode == "2d": ax = fig.add_subplot(1, img.shape[0], i + 1) ax.xaxis.set_visible(False) ax.yaxis.set_visible(False) if titles is not None: ax.set_title(titles[i]) im = ax.imshow(img[i,:,:]) divider = make_axes_locatable(ax) cax = divider.append_axes("right", size="5%", pad=0.05) plt.colorbar(im, cax=cax, format=ticker.FuncFormatter(_colorbar_fmt)) else: raise Exception("Unknown plotting mode: %s" % str(mode)) plt.savefig(ofname, bbox_inches='tight') plt.close() # TO DO: # Add plot_loss and draw_to_file class PlotHandler(): """ Class that helps with the plotting of graphs after testing of the performance. Parameters ---------- plots : list List of tuples, the first value being the name of the plotting method, the second a dictionary of parameters for that method. Example: plots = [ ('confusion', {}), ('roc', {'title': "ROC-curve"}), ] preds : array-like Class predictions that the model has made. preds_proba : array-like Prediction probabilities that the model has made. y_test : array-like True labels for the test patterns. X_test : array-like The test patterns. indices_test : array-like model : AstroWrapper The AstroWrapper model used to make these predictions. odir : string Relative path to the output directory. verbose: integer, default 0 Verbosity level. """ def __init__(self, plots, preds, preds_proba, y_test, X_test, indices_test, model, odir, verbose=0): self._methods = { "confusion": self.confusion, "roc": self.roc, "occlusion": self.occlusion, "conv weights": self.conv_weights, "conv activation": self.conv_activation, "misses": self.misses, } self.preds = preds self.preds_proba = preds_proba self.y_test = y_test self.X_test = X_test self.indices_test = indices_test self.model = model self.network = model.model self.odir = odir self.plots = plots self.verbose = verbose self.X_test_trans, self.y_test_trans = self.model.ABI_test.transform(self.X_test, self.y_test) def plot(self): """ Method to calls all selected plot methods. """ for pl in self.plots: self._methods[pl[0]](pl[1], self.odir, self.verbose) def confusion(self,args,odir,verbose): """ Plots and saves the confusion matrix. Only works for classification problems. Parameters ---------- args : dictionary, default {} Contains the key:value pairs for the parameters of this plot: 'clabels': list, default ["Negative", "Positive"] Labels for the negative and positive classes. 'ofname': string, default "confusion.png" Name for the output file. 'figsize': tuple, default (10,10) Size of the output figure. 'cmap': string, default "YlGnBu" Name of the Seaborn color map that should be used. odir : string Relative path to the output directory. verbose : integer, default 0 Verbosity level. """ clabels = args['clabels'] if 'clabels' in args else ["Negative", "Positive"] ofname = args['ofname'] if 'ofname' in args else "confusion.png" figsize = args['figsize'] if 'figsize' in args else (10,10) cmap = args['cmap'] if 'cmap' in args else "YlGnBu" cm = metrics.confusion_matrix(self.y_test, self.preds) df_cm = pandas.DataFrame(cm, index=clabels, columns = clabels) plt.figure(figsize=figsize) seaborn.set(font_scale=6.5) seaborn.heatmap(df_cm, annot=True, fmt="d", linewidths=.5, cmap=cmap, square=True, cbar=False) plt.xlabel("Prediction") plt.ylabel("True Class") plt.savefig(os.path.join(odir,ofname), bbox_inches='tight') plt.close() def roc(self,args,odir,verbose): """ Plots and saves the ROC curve. Only works for classification problems. Parameters ---------- args : dictionary, default {} Contains the key:value pairs for the parameters of this plot: 'pos_label': string or integer, default 1 Value of the label of the positive class 'ofname': string, default "confusion.png" Name for the output file. 'title': string or None, default None Title to be used in the plot. Set to None for no title. 'figsize': tuple, default (10,10) Size of the output figure. odir : string Relative path to the output directory. verbose : integer, default 0 Verbosity level. """ pos_label = args['pos_label'] if 'pos_label' in args else 1 title = args['title'] if 'title' in args else None ofname = args['ofname'] if 'ofname' in args else "roc.png" figsize = args['figsize'] if 'figsize' in args else (10,10) fpr, tpr, thres = roc_curve(self.y_test, self.preds_proba[:,pos_label], pos_label=pos_label) plt.plot(fpr, tpr) if title: plt.title(title) plt.xlabel("FPR") plt.ylabel("TPR") plt.savefig(os.path.join(odir, ofname)) plt.close() def occlusion(self,args,odir,verbose): """ Computes and plots the occlusion heatmap. This requires testing the image once for every pixel and thus may be slow. Parameters ---------- args : dictionary, default {} Contains the key:value pairs for the parameters of this plot: 'image_range': slice, default slice(0,1) The indices of the images for which to compute the occlusion heatmap. 'ofname': string, default "occlusion" Name for the output files, without extension. 'title': string or None, default None Title to be used in the plot. Set to None for no title. 'figsize': tuple, default (10,10) Size of the output figure. 'square_length': integer, default 5 Length of the sides of the square used in the occlusion. odir : string Relative path to the output directory. verbose : integer, default 0 Verbosity level. """ im_range = args['image_range'] if 'image_range' in args else slice(0,1) title = args['title'] if 'title' in args else None ofname = args['ofname'] if 'ofname' in args else "occlusion" figsize = args['figsize'] if 'figsize' in args else (10,10) length = args['square_length'] if 'square_length' in args else 5 imgs = self.X_test[im_range] labels = self.y_test[im_range] for i in range(len(labels)): hm = occlusion_heatmap(self.network, imgs[i:i+1], labels[i], square_length=length) fig = plt.figure(tight_layout=True, figsize=figsize) for j in range(len(imgs[i])): ax = fig.add_subplot(1, len(imgs[i])+1, j + 1) ax.xaxis.set_visible(False) ax.yaxis.set_visible(False) im = ax.imshow(imgs[i,j,:,:]) divider = make_axes_locatable(ax) cax = divider.append_axes("right", size="5%", pad=0.05) plt.colorbar(im, cax=cax, format=ticker.FuncFormatter(_colorbar_fmt)) ax = fig.add_subplot(1, len(imgs[i])+1, len(imgs[i])+1) ax.xaxis.set_visible(False) ax.yaxis.set_visible(False) im = ax.imshow(hm, vmin=0, vmax=1) divider = make_axes_locatable(ax) cax = divider.append_axes("right", size="5%", pad=0.05) cbar = plt.colorbar(im, cax=cax) cbar.set_ticks([0,0.2,0.4,0.6,0.8,1]) cbar.set_ticklabels([0,0.2,0.4,0.6,0.8,1]) plt.savefig(os.path.join(odir, ofname+str(i)+".png"), bbox_inches='tight') plt.close() def conv_weights(self,args,odir,verbose): """ Plots the weights for a single convolutional layer. Parameters ---------- args : dictionary, default {} Contains the key:value pairs for the parameters of this plot: 'layer': integer, default 1 Index of the layer for which the weights are plotted. 'ofname': string, default "occlusion" Name for the output files, without extension. 'figsize': tuple, default (10,10) Size of the output figure. odir : string Relative path to the output directory. verbose : integer, default 0 Verbosity level. """ layer_i = args['layer'] if 'layer' in args else 1 ofname = args['ofname'] if 'ofname' in args else "weights" figsize = args['figsize'] if 'figsize' in args else (10,10) layer = self.network.layers_[layer_i] W = layer.W.get_value() shape = W.shape nrows = np.ceil(np.sqrt(shape[0])).astype(int) ncols = nrows vmin = np.min(W) vmax = np.max(W) for feature_map in range(shape[1]): fig, axes = plt.subplots(int(nrows), int(ncols), figsize=figsize, squeeze=False) for i, ax in enumerate(axes.flatten()): ax.set_xticks([]) ax.set_yticks([]) ax.axis('off') if i < shape[0]: ax.set_title( str(round(W[i, feature_map].sum(), 3)), fontsize=10) im = ax.imshow(W[i, feature_map], cmap='gray', interpolation='none', vmin=vmin, vmax=vmax) fig.subplots_adjust(right=0.8) cbar_ax = fig.add_axes([0.85, 0.15, 0.05, 0.7]) fig.colorbar(im, cax=cbar_ax) fig.savefig( os.path.join(odir, ofname+str(feature_map)+".png") ) def conv_activation(self,args,odir,verbose): """ Plots the activations of a single convolutional layer for a given image. Parameters ---------- args : dictionary, default {} Contains the key:value pairs for the parameters of this plot: 'layer': integer, default 1 Index of the convolutional layer for which the activations are plotted. 'image_index': integer, default 0 Index of the image for which the activations are plotted. 'ofname': string, default "occlusion" Name for the output file. 'figsize': tuple, default (10,10) Size of the output figure. odir : string Relative path to the output directory. verbose : integer, default 0 Verbosity level. """ layer_i = args['layer'] if 'layer' in args else 1 image_i = args['image index'] if 'image_index' in args else 0 ofname = args['ofname'] if 'ofname' in args else "activity.png" figsize = args['figsize'] if 'figsize' in args else (10,10) x = self.X_test_trans[image_i:image_i+1] layer = self.network.layers_[layer_i] if x.shape[0] != 1: raise ValueError("Only one sample can be plotted at a time.") # compile theano function xs = T.tensor4('xs').astype(theano.config.floatX) get_activity = theano.function([xs], get_output(layer, xs)) activity = get_activity(x) shape = activity.shape nrows = np.ceil(np.sqrt(shape[1])).astype(int) ncols = nrows fig, axes = plt.subplots(nrows + 1, ncols, figsize=figsize, squeeze=False) for i in range(x.shape[1]): axes[0, 1 + i].imshow(1 - x[0][i], cmap='Greys', interpolation='none') axes[0, 1].set_title('before', fontsize=30) axes[0, 2].set_title('after', fontsize=30) axes[0, 3].set_title('diff', fontsize=30) for ax in axes.flatten(): ax.set_xticks([]) ax.set_yticks([]) ax.axis('off') for i, (r, c) in enumerate(product(range(nrows), range(ncols))): if i >= shape[1]: break ndim = activity[0][i].ndim if ndim != 2: raise ValueError("Wrong number of dimensions, image data should " "have 2, instead got {}".format(ndim)) axes[r + 1, c].imshow(-activity[0][i], cmap='Greys', interpolation='none') fig.savefig( os.path.join(odir, ofname) ) # TO DO: # Make it so that RGB can be plotted in one image, instead of three def misses(self,args,odir,verbose): """ Plots all images for which the predicted class is wrong. Parameters ---------- args : dictionary, default {} Contains the key:value pairs for the parameters of this plot: #fix odir : string Relative path to the output directory. verbose : integer, default 0 Verbosity level. """ titles = args['image labels'] if 'image labels' in args else ["before", "after", "diff"] mode = args['mode'] if 'mode' in args else '2d' orig_indices = np.array(self.indices_test) misclassifications = np.array(range(len(self.y_test))) misclassifications = misclassifications[self.y_test != self.preds] misclassifications_indices = orig_indices[self.y_test != self.preds] if verbose > 0: print("Number of test elements: %i" % len(self.y_test)) print("Misclassifications: %s" % str(misclassifications_indices)) print("Plotting misclassifications ...") for i in xrange(len(misclassifications)): index = misclassifications[i] orig_index = misclassifications_indices[i] ofname = os.path.join(odir, str(self.y_test[index]), str(orig_index) + ".png") ensure_dir(ofname) plot_image(self.X_test[index], ofname, titles=titles, mode=mode) # TO DO: # Test this function, add more metrics or ways to add metrics def assess_performance(preds, y_test, odir, metrics, ofname='results.txt'): results = {} if 'MCC' in metrics: results['MCC'] = matthews_corrcoef(y_test, preds) if 'accuracy' in metrics: results['accuracy'] = accuracy_score(y_test, preds) if 'precision' in metrics: results['precision'] = precision_score(y_test, preds) if 'recall' in metrics: results['recall'] = recall_score(y_test, preds) with open(os.path.join(odir, ofname), 'a') as result_file: for m in metrics: result_file.write(m + ": " + str(results[m])) if verbose>0: print m, ": ", result[m] # TO DO: # Replace this with a plotting function def assess_classification_performance(preds, y_test, odir, plots=None, X_test=None, indices_test=None, preds_proba=None, model=None, verbose=0): """ Method for the calculation of performance metrics and creating of selected plots. Parameters ---------- preds : array-like Class predictions that the model has made. y_test : array-like True labels for the test patterns. odir : string Relative path to the output directory. plots : list or None, default None List of tuples, the first value being the name of the plotting method, the second a dictionary of parameters for that method. Example: plots = [ ('confusion', {}), ('roc', {'title': "ROC-curve"}), ] Nothing is plotted when set to None. X_test : array-like or None, default None The test patterns, if necessary for plotting. indices_test : array-like or None, default None preds_proba : array-like Prediction probabilities that the model has made. model : AstroWrapper The AstroWrapper model used to make these predictions. verbose: integer, default 0 Verbosity level. """ if plots: plot_helper = PlotHelper(plots, preds, preds_proba, y_test, X_test, indices_test, model, odir) plot_helper.plot(odir)
gpl-2.0
cloudlakecho/remote-key-pod
rolling code.py
1
1461
# from February 8 2017 to # Malibu, California # San Diego, California # source patent US 6980655 # receiving rolling code # reverse order of binary digits -> zero the most significant digit -> set initial trinary # rolling code to zero -> subtract next highest power of three from rolling code -> # result is bigger than zero -> increment next most significant digit of binary rolling code # result is smaller or equal to zero -> add next highest power of three to rolling code -> # next highest power of three # making the rolling code # # Python 3.5 code import numpy as np import random import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D from numpy import * def run(total_bit): each_digit = 0 rolling_code = [] for i in range(total_bit): if random.random() > 0.5: each_digit = random.randint(65, 91) else: each_digit = random.randint(97, 123) rolling_code.append(chr(each_digit)) return rolling_code def main(): total_bit = 40; total_num = 40; rolling_code = run(total_bit) # print("The rolling code is %s" % rolling_code) group = [run(total_bit) for i in range(total_num)] # print(type(group), len(group), len(group[0])) group_2D = np.asarray(group).reshape(total_bit, total_num) print(group_2D) print(type(group_2D[0, 0])) # Change character to RGB color for visualization if __name__ == '__main__': main()
apache-2.0
ColumbiaCMB/kid_readout
apps/data_taking_scripts/old_scripts/fast_noise_sweep_upstairs.py
1
3262
import matplotlib from kid_readout.roach import baseband matplotlib.use('agg') import numpy as np import time import sys from kid_readout.utils import data_file,sweeps from kid_readout.analysis.resonator import fit_best_resonator ri = baseband.RoachBasebandWide() ri.initialize() #f0s = np.load('/home/gjones/workspace/apps/f8_fit_resonances.npy') #f0s = np.load('/home/gjones/workspace/apps/first_pass_sc3x3_0813f9.npy') #f0s = np.load('/home/gjones/workspace/apps/sc5x4_0813f10_first_pass.npy')#[:4] f0s = np.load('/home/gjones/workspace/apps/sc5x4_0813f12.npy') f0s.sort() #f0s = f0s*0.998 nf = len(f0s) atonce = 4 if nf % atonce > 0: print "extending list of resonators to make a multiple of ",atonce f0s = np.concatenate((f0s,np.arange(1,1+atonce-(nf%atonce))+f0s.max())) #offsets = np.linspace(-4882.8125,4638.671875,20) #offsets = np.concatenate(([-40e3,-20e3],offsets,[20e3,40e3]))/1e6 #offsets = offsets*8 nsamp = 2**20 step = 1 f0binned = np.round(f0s*nsamp/512.0)*512.0/nsamp offset_bins = np.arange(-21,21)*step offsets = offset_bins*512.0/nsamp offsets = np.concatenate(([-40e-3,-20e-3],offsets,[20e-3,40e-3])) print f0s print len(f0s) print offsets*1e6 start = time.time() measured_freqs = sweeps.prepare_sweep(ri,f0binned,offsets,nsamp=nsamp) print "loaded waveforms in", (time.time()-start),"seconds" sys.stdout.flush() time.sleep(1) atten_list = np.linspace(15,46,8)#[30]#[35.5,33.5,46.5,43.5,40.5,37.5] #atten_list = [33.0] for atten in atten_list: df = data_file.DataFile() ri.set_dac_attenuator(atten) sweep_data = sweeps.do_prepared_sweep(ri, nchan_per_step=atonce, reads_per_step=8) df.add_sweep(sweep_data) meas_cfs = [] idxs = [] for m in range(len(f0s)): fr,s21,errors = sweep_data.select_by_freq(f0s[m]) thiscf = f0s[m] res = fit_best_resonator(fr[2:-2],s21[2:-2],errors=errors[2:-2]) delay = res.delay # delays.append(delay) s21 = s21*np.exp(2j*np.pi*res.delay*fr) res = fit_best_resonator(fr,s21,errors=errors) fmin = fr[np.abs(s21).argmin()] print "s21 fmin", fmin, "original guess",thiscf,"this fit", res.f_0,"delay",delay,"resid",res.delay if abs(res.f_0 - thiscf) > 0.1: if abs(fmin - thiscf) > 0.1: print "using original guess" meas_cfs.append(thiscf) else: print "using fmin" meas_cfs.append(fmin) else: print "using this fit" meas_cfs.append(res.f_0) idx = np.unravel_index(abs(measured_freqs - meas_cfs[-1]).argmin(),measured_freqs.shape) idxs.append(idx) print meas_cfs ri.add_tone_freqs(np.array(meas_cfs)) ri.select_bank(ri.tone_bins.shape[0]-1) ri._sync() time.sleep(0.5) nsets = len(meas_cfs)/atonce tsg = None for iset in range(nsets): selection = range(len(meas_cfs))[iset::nsets] ri.select_fft_bins(selection) ri._sync() time.sleep(0.2) dmod,addr = ri.get_data_seconds(30,demod=True) tsg = df.add_timestream_data(dmod, ri, tsg=tsg) df.sync() df.log_hw_state(ri) df.nc.sync() df.nc.close() print "completed in",((time.time()-start)/60.0),"minutes"
bsd-2-clause
bjornstenqvist/faunus
scripts/csvcompare.py
2
1793
#!/usr/bin/env python import sys if sys.version_info < (3, 0): sys.stdout.write("Sorry, Python 3 og higher required\n") sys.exit(1) import argparse, pandas from math import fabs parser = argparse.ArgumentParser(description='Numerically compare two CSV files') parser.add_argument('--tol', default=0.02, type=float, help='relative error tolerance (default: 0.02)') parser.add_argument('--small', default=1e-10, type=float, help='always equal if difference is smaller than this (default: 1e-10)') parser.add_argument('--quiet', '-q', dest='quiet', action='store_true', help='less output') parser.add_argument('--csv_sep', default=' ', help='CSV separator (default: " ")') parser.add_argument('file_ref', help='reference file') parser.add_argument('file_new', help='new file') args = parser.parse_args() returncode = 0 def isapprox(a, b): return fabs(a - b) < args.small or fabs(a - b) < args.tol * fabs(a) def isnumber(val): ''' filter to compare only ints and floats ''' return isinstance(val, float) or isinstance(val, int) def compare(a, b): ''' compare ints and floats to relative tolerence ''' if isnumber(a) and isnumber(b): return isapprox(a, b) else: return a == b dfs = [pandas.read_csv(f, sep=args.csv_sep, header=None) for f in (args.file_ref, args.file_new)] if len(dfs) == 2: for (ref_col_name, ref_col), (new_col_name, new_col) in zip(*(df.iteritems() for df in dfs)): for ref_val, new_val in zip(ref_col, new_col): if not compare(ref_val, new_val): if returncode == 0: returncode = 1 if not args.quiet: print('mismatch in col {:2d} {} {}'.format(ref_col_name, ref_val, new_val)) else: returncode = 2 sys.exit(returncode)
mit
arjoly/scikit-learn
sklearn/cross_decomposition/cca_.py
209
3150
from .pls_ import _PLS __all__ = ['CCA'] class CCA(_PLS): """CCA Canonical Correlation Analysis. CCA inherits from PLS with mode="B" and deflation_mode="canonical". Read more in the :ref:`User Guide <cross_decomposition>`. Parameters ---------- n_components : int, (default 2). number of components to keep. scale : boolean, (default True) whether to scale the data? max_iter : an integer, (default 500) the maximum number of iterations of the NIPALS inner loop tol : non-negative real, default 1e-06. the tolerance used in the iterative algorithm copy : boolean Whether the deflation be done on a copy. Let the default value to True unless you don't care about side effects Attributes ---------- x_weights_ : array, [p, n_components] X block weights vectors. y_weights_ : array, [q, n_components] Y block weights vectors. x_loadings_ : array, [p, n_components] X block loadings vectors. y_loadings_ : array, [q, n_components] Y block loadings vectors. x_scores_ : array, [n_samples, n_components] X scores. y_scores_ : array, [n_samples, n_components] Y scores. x_rotations_ : array, [p, n_components] X block to latents rotations. y_rotations_ : array, [q, n_components] Y block to latents rotations. n_iter_ : array-like Number of iterations of the NIPALS inner loop for each component. Notes ----- For each component k, find the weights u, v that maximizes max corr(Xk u, Yk v), such that ``|u| = |v| = 1`` Note that it maximizes only the correlations between the scores. The residual matrix of X (Xk+1) block is obtained by the deflation on the current X score: x_score. The residual matrix of Y (Yk+1) block is obtained by deflation on the current Y score. Examples -------- >>> from sklearn.cross_decomposition import CCA >>> X = [[0., 0., 1.], [1.,0.,0.], [2.,2.,2.], [3.,5.,4.]] >>> Y = [[0.1, -0.2], [0.9, 1.1], [6.2, 5.9], [11.9, 12.3]] >>> cca = CCA(n_components=1) >>> cca.fit(X, Y) ... # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE CCA(copy=True, max_iter=500, n_components=1, scale=True, tol=1e-06) >>> X_c, Y_c = cca.transform(X, Y) References ---------- Jacob A. Wegelin. A survey of Partial Least Squares (PLS) methods, with emphasis on the two-block case. Technical Report 371, Department of Statistics, University of Washington, Seattle, 2000. In french but still a reference: Tenenhaus, M. (1998). La regression PLS: theorie et pratique. Paris: Editions Technic. See also -------- PLSCanonical PLSSVD """ def __init__(self, n_components=2, scale=True, max_iter=500, tol=1e-06, copy=True): _PLS.__init__(self, n_components=n_components, scale=scale, deflation_mode="canonical", mode="B", norm_y_weights=True, algorithm="nipals", max_iter=max_iter, tol=tol, copy=copy)
bsd-3-clause
HyukjinKwon/spark
python/pyspark/pandas/tests/plot/test_series_plot_plotly.py
14
8485
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import unittest from distutils.version import LooseVersion import pprint import pandas as pd import numpy as np from pyspark import pandas as ps from pyspark.pandas.config import set_option, reset_option from pyspark.pandas.utils import name_like_string from pyspark.testing.pandasutils import ( have_plotly, plotly_requirement_message, PandasOnSparkTestCase, TestUtils, ) if have_plotly: from plotly import express import plotly.graph_objs as go @unittest.skipIf(not have_plotly, plotly_requirement_message) @unittest.skipIf( LooseVersion(pd.__version__) < "1.0.0", "pandas<1.0; pandas<1.0 does not support latest plotly and/or 'plotting.backend' option.", ) class SeriesPlotPlotlyTest(PandasOnSparkTestCase, TestUtils): @classmethod def setUpClass(cls): super().setUpClass() pd.set_option("plotting.backend", "plotly") set_option("plotting.backend", "plotly") set_option("plotting.max_rows", 1000) set_option("plotting.sample_ratio", None) @classmethod def tearDownClass(cls): pd.reset_option("plotting.backend") reset_option("plotting.backend") reset_option("plotting.max_rows") reset_option("plotting.sample_ratio") super().tearDownClass() @property def pdf1(self): return pd.DataFrame( {"a": [1, 2, 3, 4, 5, 6, 7, 8, 9, 15, 50]}, index=[0, 1, 3, 5, 6, 8, 9, 9, 9, 10, 10] ) @property def psdf1(self): return ps.from_pandas(self.pdf1) @property def psdf2(self): return ps.range(1002) @property def pdf2(self): return self.psdf2.to_pandas() def test_bar_plot(self): pdf = self.pdf1 psdf = self.psdf1 self.assertEqual(pdf["a"].plot(kind="bar"), psdf["a"].plot(kind="bar")) self.assertEqual(pdf["a"].plot.bar(), psdf["a"].plot.bar()) def test_line_plot(self): pdf = self.pdf1 psdf = self.psdf1 self.assertEqual(pdf["a"].plot(kind="line"), psdf["a"].plot(kind="line")) self.assertEqual(pdf["a"].plot.line(), psdf["a"].plot.line()) def test_barh_plot(self): pdf = self.pdf1 psdf = self.psdf1 self.assertEqual(pdf["a"].plot(kind="barh"), psdf["a"].plot(kind="barh")) def test_area_plot(self): pdf = pd.DataFrame( { "sales": [3, 2, 3, 9, 10, 6], "signups": [5, 5, 6, 12, 14, 13], "visits": [20, 42, 28, 62, 81, 50], }, index=pd.date_range(start="2018/01/01", end="2018/07/01", freq="M"), ) psdf = ps.from_pandas(pdf) self.assertEqual(pdf["sales"].plot(kind="area"), psdf["sales"].plot(kind="area")) self.assertEqual(pdf["sales"].plot.area(), psdf["sales"].plot.area()) # just a sanity check for df.col type self.assertEqual(pdf.sales.plot(kind="area"), psdf.sales.plot(kind="area")) def test_pie_plot(self): psdf = self.psdf1 pdf = psdf.to_pandas() self.assertEqual( psdf["a"].plot(kind="pie"), express.pie(pdf, values=pdf.columns[0], names=pdf.index), ) # TODO: support multi-index columns # columns = pd.MultiIndex.from_tuples([("x", "y")]) # psdf.columns = columns # pdf.columns = columns # self.assertEqual( # psdf[("x", "y")].plot(kind="pie"), # express.pie(pdf, values=pdf.iloc[:, 0].to_numpy(), names=pdf.index.to_numpy()), # ) # TODO: support multi-index # psdf = ps.DataFrame( # { # "a": [1, 2, 3, 4, 5, 6, 7, 8, 9, 15, 50], # "b": [2, 3, 4, 5, 7, 9, 10, 15, 34, 45, 49] # }, # index=pd.MultiIndex.from_tuples([("x", "y")] * 11), # ) # pdf = psdf.to_pandas() # self.assertEqual( # psdf["a"].plot(kind="pie"), express.pie(pdf, values=pdf.columns[0], names=pdf.index), # ) def test_hist_plot(self): def check_hist_plot(psser): bins = np.array([1.0, 5.9, 10.8, 15.7, 20.6, 25.5, 30.4, 35.3, 40.2, 45.1, 50.0]) data = np.array([5.0, 4.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0]) prev = bins[0] text_bins = [] for b in bins[1:]: text_bins.append("[%s, %s)" % (prev, b)) prev = b text_bins[-1] = text_bins[-1][:-1] + "]" bins = 0.5 * (bins[:-1] + bins[1:]) name_a = name_like_string(psser.name) bars = [ go.Bar( x=bins, y=data, name=name_a, text=text_bins, hovertemplate=("variable=" + name_a + "<br>value=%{text}<br>count=%{y}"), ), ] fig = go.Figure(data=bars, layout=go.Layout(barmode="stack")) fig["layout"]["xaxis"]["title"] = "value" fig["layout"]["yaxis"]["title"] = "count" self.assertEqual( pprint.pformat(psser.plot(kind="hist").to_dict()), pprint.pformat(fig.to_dict()) ) psdf1 = self.psdf1 check_hist_plot(psdf1["a"]) columns = pd.MultiIndex.from_tuples([("x", "y")]) psdf1.columns = columns check_hist_plot(psdf1[("x", "y")]) def test_pox_plot(self): def check_pox_plot(psser): fig = go.Figure() fig.add_trace( go.Box( name=name_like_string(psser.name), q1=[3], median=[6], q3=[9], mean=[10.0], lowerfence=[1], upperfence=[15], y=[[50]], boxpoints="suspectedoutliers", notched=False, ) ) fig["layout"]["xaxis"]["title"] = name_like_string(psser.name) fig["layout"]["yaxis"]["title"] = "value" self.assertEqual( pprint.pformat(psser.plot(kind="box").to_dict()), pprint.pformat(fig.to_dict()) ) psdf1 = self.psdf1 check_pox_plot(psdf1["a"]) columns = pd.MultiIndex.from_tuples([("x", "y")]) psdf1.columns = columns check_pox_plot(psdf1[("x", "y")]) def test_pox_plot_arguments(self): with self.assertRaisesRegex(ValueError, "does not support"): self.psdf1.a.plot.box(boxpoints="all") with self.assertRaisesRegex(ValueError, "does not support"): self.psdf1.a.plot.box(notched=True) self.psdf1.a.plot.box(hovertext="abc") # other arguments should not throw an exception def test_kde_plot(self): psdf = ps.DataFrame({"a": [1, 2, 3, 4, 5]}) pdf = pd.DataFrame( { "Density": [0.05709372, 0.07670272, 0.05709372], "names": ["a", "a", "a"], "index": [-1.0, 3.0, 7.0], } ) actual = psdf.a.plot.kde(bw_method=5, ind=3) expected = express.line(pdf, x="index", y="Density") expected["layout"]["xaxis"]["title"] = None self.assertEqual(pprint.pformat(actual.to_dict()), pprint.pformat(expected.to_dict())) if __name__ == "__main__": from pyspark.pandas.tests.plot.test_series_plot_plotly import * # noqa: F401 try: import xmlrunner # type: ignore[import] testRunner = xmlrunner.XMLTestRunner(output="target/test-reports", verbosity=2) except ImportError: testRunner = None unittest.main(testRunner=testRunner, verbosity=2)
apache-2.0
galtay/neural_learner
make_learning_curves.py
1
3749
"""Make learning curves with the MNIST data. This plots the cost (and/or accuracy) of both the training set and the cross validation set as a function of the number of examples in the training set. This is a useful tool when deciding how to improve a neural netword (e.g. do we need more features, more samples, more hidden layers, ...). Good discussion here, https://www.coursera.org/learn/machine-learning/lecture/Kont7/learning-curves The variable that determines how long this module will take to run is the number of samples in each iteration (specified in the variable `m_samples`) """ import numpy import random import matplotlib.pyplot as plt import nn # load training, validatin, and test data # train: m=50000 images of n=28x28=784 pixels each # valid: m=10000 images of n=28x28=784 pixels each # test: m=10000 images of n=28x28=784 pixels each # each image (and label) is a digit in the range 0-9 #==================================================================== mnist_data = numpy.load('data/mnist_uint8_uint8.npz') train = nn.create_training_dict( mnist_data['train_features'], mnist_data['train_labels']) valid = nn.create_training_dict( mnist_data['valid_features'], mnist_data['valid_labels']) test = nn.create_training_dict( mnist_data['test_features'], mnist_data['test_labels']) # hard code layer sizes and initialize random weights #==================================================================== n_input_nodes = 784 # pixels in image n_hidden_nodes = 25 # variable n_output_nodes = 10 # number of labels layer_sizes = [n_input_nodes, n_hidden_nodes, n_output_nodes] # set initial random weights #==================================================================== random_weights = nn.initialize_random_weights(layer_sizes) weight_shapes = [w.shape for w in random_weights] # make learning curve #==================================================================== lam = 0.0 m_samples = [(i+1)*5000 for i in range(2)] train_accs = [] valid_accs = [] train_Js = [] valid_Js = [] for im in m_samples: # train res = nn.minimize( random_weights, train['Xnorm'][0:im,:], train['y1hot'][0:im,:], lam=lam) trained_weights_flat = res.x trained_weights = nn.unflatten_array(res.x, weight_shapes) # compute cost of training sample J_train = nn.compute_cost_and_grad( trained_weights_flat, train['Xnorm'][0:im,:], train['y1hot'][0:im,:], weight_shapes, lam=lam, cost_only=True) train_Js.append(J_train) # compute accuracy on training sample aa, zz = nn.feed_forward(train['Xnorm'][0:im,:], trained_weights) h = aa[-1] y_predict = numpy.argmax(h, axis=1) accuracy = (train['y'][0:im] == y_predict).astype(numpy.float).mean() train_accs.append(accuracy) # compute cost on validation sample J_valid = nn.compute_cost_and_grad( trained_weights_flat, valid['Xnorm'], valid['y1hot'], weight_shapes, lam=lam, cost_only=True) valid_Js.append(J_valid) # compute accuracy on validation sample aa, zz = nn.feed_forward(valid['Xnorm'], trained_weights) h = aa[-1] y_predict = numpy.argmax(h, axis=1) accuracy = (valid['y'] == y_predict).astype(numpy.float).mean() valid_accs.append(accuracy) print print 'Validation, Training Set Cost (m={}): {}, {}'.format( im, valid_Js[-1], train_Js[-1]) print 'Validation, Training Set Accuracy (m={}): {}, {}'.format( im, valid_accs[-1], train_accs[-1]) print plt.plot(train_Js, lw=3.0, ls='--', color='blue', label='J_train') plt.plot(valid_Js, lw=3.0, ls='-', color='red', label='J_cv') plt.legend(loc='best')
mit
anntzer/scikit-learn
sklearn/feature_selection/_variance_threshold.py
10
3395
# Author: Lars Buitinck # License: 3-clause BSD import numpy as np from ..base import BaseEstimator from ._base import SelectorMixin from ..utils.sparsefuncs import mean_variance_axis, min_max_axis from ..utils.validation import check_is_fitted class VarianceThreshold(SelectorMixin, BaseEstimator): """Feature selector that removes all low-variance features. This feature selection algorithm looks only at the features (X), not the desired outputs (y), and can thus be used for unsupervised learning. Read more in the :ref:`User Guide <variance_threshold>`. Parameters ---------- threshold : float, default=0 Features with a training-set variance lower than this threshold will be removed. The default is to keep all features with non-zero variance, i.e. remove the features that have the same value in all samples. Attributes ---------- variances_ : array, shape (n_features,) Variances of individual features. Notes ----- Allows NaN in the input. Raises ValueError if no feature in X meets the variance threshold. Examples -------- The following dataset has integer features, two of which are the same in every sample. These are removed with the default setting for threshold:: >>> X = [[0, 2, 0, 3], [0, 1, 4, 3], [0, 1, 1, 3]] >>> selector = VarianceThreshold() >>> selector.fit_transform(X) array([[2, 0], [1, 4], [1, 1]]) """ def __init__(self, threshold=0.): self.threshold = threshold def fit(self, X, y=None): """Learn empirical variances from X. Parameters ---------- X : {array-like, sparse matrix}, shape (n_samples, n_features) Sample vectors from which to compute variances. y : any, default=None Ignored. This parameter exists only for compatibility with sklearn.pipeline.Pipeline. Returns ------- self """ X = self._validate_data(X, accept_sparse=('csr', 'csc'), dtype=np.float64, force_all_finite='allow-nan') if hasattr(X, "toarray"): # sparse matrix _, self.variances_ = mean_variance_axis(X, axis=0) if self.threshold == 0: mins, maxes = min_max_axis(X, axis=0) peak_to_peaks = maxes - mins else: self.variances_ = np.nanvar(X, axis=0) if self.threshold == 0: peak_to_peaks = np.ptp(X, axis=0) if self.threshold == 0: # Use peak-to-peak to avoid numeric precision issues # for constant features compare_arr = np.array([self.variances_, peak_to_peaks]) self.variances_ = np.nanmin(compare_arr, axis=0) if np.all(~np.isfinite(self.variances_) | (self.variances_ <= self.threshold)): msg = "No feature in X meets the variance threshold {0:.5f}" if X.shape[0] == 1: msg += " (X contains only one sample)" raise ValueError(msg.format(self.threshold)) return self def _get_support_mask(self): check_is_fitted(self) return self.variances_ > self.threshold def _more_tags(self): return {'allow_nan': True}
bsd-3-clause
nettrom/importance
python/global-predictions.py
1
10540
#!/usr/env/python # -*- coding: utf-8 -*- ''' Script to load a global dataset of articles and generate predictions. Articles will be split into two groups depending on whether we have all available view data or not. For articles that do not, we will calculate the lower end of a confidence interval to use as a view estimate. Copyright (c) 2017 Morten Wang Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ''' import re import logging import pickle from yaml import load import pandas as pd import numpy as np import scipy.stats as st from sklearn.preprocessing import LabelEncoder from sklearn.ensemble import GradientBoostingClassifier as gbm from sklearn.externals.joblib import Parallel, delayed class GlobalPredictor: def __init__(self): self.config = None self.model = None self.le = None def calc_views(self, pg): ''' Calculate views for a "new" page based on the given group of page data. :param pg: group of page data (page_id, number of views, etc) :type pg: `pandas.GroupBy` ''' ## Index the page group by date, then reindex it by our full ## date range to add NaN-rows, then turn all NaNs into 0. pg.set_index(pd.to_datetime(pg.view_date), inplace=True) pg = pg.reindex(self.date_range).fillna(0) ## From https://stackoverflow.com/questions/39352554/pandas-dataframe-delete-row-with-certain-value-until-that-value-changes ## Remove all rows until the first non-zero row is encountered. ## This works because at some point in the date range, the page was ## created and therefore _must_ have non-zero views at that point. pg = pg.loc[pg[(pg != 0).all(axis=1)].first_valid_index():] ## If we only have two data points, the average views is 0, ## otherwise, it's the lower end of the confidence interval. if len(pg.num_views) <= 2: return(0.0) else: return(st.t.interval(self.config['confidence interval']/100, len(pg.num_views)-1, loc=np.mean(pg.num_views), scale=st.sem(pg.num_views))[0]) def load_datasets(self): ''' Load in the datasets defined in the configuration file, split them into "new" and "old" pages. Make any necessary calculations. Return both datasets. :param config: the global model configuration :type config: dict ''' # read in snapshot snapshot = pd.read_table(self.config['snapshot file']) # read in dataset dataset = pd.read_table(self.config['dataset']) # read in clickstream clickstream = pd.read_table(self.config['clickstream file']) # read in disambiguations disambiguations = pd.read_table(self.config['disambiguation file']) # read in the "new" page views newpage_views = pd.read_table(self.config['new page views']) logging.info('loaded all datasets, processing and merging') # Log-transform number of inlinks, views, and calculate prop_proj_inlinks dataset['log_inlinks'] = np.log10(1 + dataset['num_inlinks']) ## Because this is the global dataset, we don't have WikiProjects, ## so "prop_proj_inlinks" is always (1 + num_inlinks/(1 + num_inlinks)) dataset['prop_proj_inlinks'] = 1 + (dataset['num_inlinks']/ \ (1 + dataset['num_inlinks'])) # Calculate the proportion of clicks from articles clickstream['prop_from_art'] = np.minimum( 1.0, clickstream['n_from_art']/(1 + clickstream['n_clicks'])) # Join the datasets # snapshot[dataset[clickstream]] res = pd.merge(snapshot, pd.merge(dataset, clickstream, on='page_id'), left_on='art_page_id', right_on='page_id') # filter out pages where the talk page is an archive res = res[res.talk_is_archive == 0] # filter out pages where the article is a redirect res = res[res.art_is_redirect == 0] # filter out pages where there is no corresponding article res = res[res.art_page_id > 0] # filter out disambiguations res = res[res.art_page_id.isin(disambiguations.page_id) == False] # Split out the new pages, calculate views for them, then copy those # views back in. logging.info('processing new page views') ## We need to extend data for "new" pages so that all of them have ## data for all days after their first day in the dataset. self.date_range = pd.date_range(start=min(newpage_views.view_date), end=max(newpage_views.view_date)) ## Calculate confidence intervals for all new page views. Set it to ## 0 of it's below 0. newpage_views = newpage_views.groupby('page_id').apply(self.calc_views) newpage_views = newpage_views.to_frame() newpage_views.columns = ['num_views'] newpage_views.num_views = newpage_views.num_views.apply( lambda n: np.max(n, 0)) ## Set all with negative views to NaN, create the index, ## update, then set all rows with NaN to 0 views. ## Consider: df.num_views[df.num_views < 0] = np.nan res.loc[(res.num_views < 0), 'num_views'] = np.nan res.set_index('art_page_id', inplace=True) res.update(newpage_views, overwrite=True) res.loc[(pd.isnull(res.num_views) | \ (res.num_views < 0)), 'num_views'] = 0 # calculate log views res['log_views'] = np.log10(1 + res['num_views']) # calculate proportion of active inlinks res['prop_act_inlinks'] = np.minimum( 1.0, res['n_act_links']/(1 + res['num_inlinks'])) # add rank variables for views and inlinks, and make them percentiles res['rank_links'] = res.num_inlinks.rank(method='min') res['rank_links_perc'] = res.num_inlinks.rank(method='min', pct=True) res['rank_views'] = res.num_views.rank(method='min') res['rank_views_perc'] = res.num_views.rank(method='min', pct=True) # ok, done return(res) def predict_ratings(self, dataset): ''' Trim the given dataset down to the right columns, make predictions of the importance rating, and also probabilities for each rating. :param dataset: the dataset to make predictions on :type dataset: `pandas.DataFrame` ''' X = dataset[self.config['predictors']].as_matrix() logging.info('predicting importance ratings') classes = self.model.predict(X) logging.info('predicting rating probabilities') probabilities = self.model.predict_proba(X) dataset['pred_rating'] = pd.Series(classes, index=dataset.index) for i in range(probabilities.shape[1]): col_name = 'proba_{}'.format(self.le.inverse_transform(i)) dataset[col_name] = probabilities[:,i] ## Return the dataset with predictions and probabilities added return(dataset) def make_predictions(self, config_file): ''' Load in the datasets and models defined in the given configuration file, then predict the importance of all articles in the datasets. ''' logging.info('loading the configuration file') # load in the configuration with open(config_file) as infile: self.config = load(infile) logging.info('loading the model') # load in the model with open(self.config['model file'], 'rb') as infile: self.model = pickle.load(infile) logging.info('loading the label encoder') # load in the label encoder with open(self.config['label encoder file'], 'rb') as infile: self.le = pickle.load(infile) logging.info('reading in the datasets') # read in the datasets self.dataset = self.load_datasets() # make predictions for all the old pages and write out a dataset logging.info('making predictions') self.dataset = self.predict_ratings(self.dataset) # reset the index so the page id column exists before writing it out self.dataset.reset_index(inplace=True) self.dataset[self.config['prediction dataset columns']].to_csv( self.config['prediction dataset'], sep='\t', index=False, compression='bz2') return() def main(): import argparse cli_parser = argparse.ArgumentParser( description="script to make predictions for all articles in a Wikipedia edition using the global model" ) # Verbosity option cli_parser.add_argument('-v', '--verbose', action='store_true', help='write informational output') ## YAML configuration file for the global model cli_parser.add_argument('config_file', help='path to the global model YAML configuration file') args = cli_parser.parse_args() if args.verbose: logging.basicConfig(level=logging.INFO) predictor = GlobalPredictor() predictor.make_predictions(args.config_file) return() if __name__ == '__main__': main()
mit
microsoft/LightGBM
examples/python-guide/plot_example.py
1
2035
# coding: utf-8 from pathlib import Path import pandas as pd import lightgbm as lgb if lgb.compat.MATPLOTLIB_INSTALLED: import matplotlib.pyplot as plt else: raise ImportError('You need to install matplotlib and restart your session for plot_example.py.') print('Loading data...') # load or create your dataset regression_example_dir = Path(__file__).absolute().parents[1] / 'regression' df_train = pd.read_csv(str(regression_example_dir / 'regression.train'), header=None, sep='\t') df_test = pd.read_csv(str(regression_example_dir / 'regression.test'), header=None, sep='\t') y_train = df_train[0] y_test = df_test[0] X_train = df_train.drop(0, axis=1) X_test = df_test.drop(0, axis=1) # create dataset for lightgbm lgb_train = lgb.Dataset(X_train, y_train) lgb_test = lgb.Dataset(X_test, y_test, reference=lgb_train) # specify your configurations as a dict params = { 'num_leaves': 5, 'metric': ('l1', 'l2'), 'verbose': 0 } evals_result = {} # to record eval results for plotting print('Starting training...') # train gbm = lgb.train(params, lgb_train, num_boost_round=100, valid_sets=[lgb_train, lgb_test], feature_name=[f'f{i + 1}' for i in range(X_train.shape[-1])], categorical_feature=[21], evals_result=evals_result, verbose_eval=10) print('Plotting metrics recorded during training...') ax = lgb.plot_metric(evals_result, metric='l1') plt.show() print('Plotting feature importances...') ax = lgb.plot_importance(gbm, max_num_features=10) plt.show() print('Plotting split value histogram...') ax = lgb.plot_split_value_histogram(gbm, feature='f26', bins='auto') plt.show() print('Plotting 54th tree...') # one tree use categorical feature to split ax = lgb.plot_tree(gbm, tree_index=53, figsize=(15, 15), show_info=['split_gain']) plt.show() print('Plotting 54th tree with graphviz...') graph = lgb.create_tree_digraph(gbm, tree_index=53, name='Tree54') graph.render(view=True)
mit
mxjl620/scikit-learn
sklearn/mixture/tests/test_gmm.py
200
17427
import unittest import copy import sys from nose.tools import assert_true import numpy as np from numpy.testing import (assert_array_equal, assert_array_almost_equal, assert_raises) from scipy import stats from sklearn import mixture from sklearn.datasets.samples_generator import make_spd_matrix from sklearn.utils.testing import assert_greater from sklearn.utils.testing import assert_raise_message from sklearn.metrics.cluster import adjusted_rand_score from sklearn.externals.six.moves import cStringIO as StringIO rng = np.random.RandomState(0) def test_sample_gaussian(): # Test sample generation from mixture.sample_gaussian where covariance # is diagonal, spherical and full n_features, n_samples = 2, 300 axis = 1 mu = rng.randint(10) * rng.rand(n_features) cv = (rng.rand(n_features) + 1.0) ** 2 samples = mixture.sample_gaussian( mu, cv, covariance_type='diag', n_samples=n_samples) assert_true(np.allclose(samples.mean(axis), mu, atol=1.3)) assert_true(np.allclose(samples.var(axis), cv, atol=1.5)) # the same for spherical covariances cv = (rng.rand() + 1.0) ** 2 samples = mixture.sample_gaussian( mu, cv, covariance_type='spherical', n_samples=n_samples) assert_true(np.allclose(samples.mean(axis), mu, atol=1.5)) assert_true(np.allclose( samples.var(axis), np.repeat(cv, n_features), atol=1.5)) # and for full covariances A = rng.randn(n_features, n_features) cv = np.dot(A.T, A) + np.eye(n_features) samples = mixture.sample_gaussian( mu, cv, covariance_type='full', n_samples=n_samples) assert_true(np.allclose(samples.mean(axis), mu, atol=1.3)) assert_true(np.allclose(np.cov(samples), cv, atol=2.5)) # Numerical stability check: in SciPy 0.12.0 at least, eigh may return # tiny negative values in its second return value. from sklearn.mixture import sample_gaussian x = sample_gaussian([0, 0], [[4, 3], [1, .1]], covariance_type='full', random_state=42) print(x) assert_true(np.isfinite(x).all()) def _naive_lmvnpdf_diag(X, mu, cv): # slow and naive implementation of lmvnpdf ref = np.empty((len(X), len(mu))) stds = np.sqrt(cv) for i, (m, std) in enumerate(zip(mu, stds)): ref[:, i] = np.log(stats.norm.pdf(X, m, std)).sum(axis=1) return ref def test_lmvnpdf_diag(): # test a slow and naive implementation of lmvnpdf and # compare it to the vectorized version (mixture.lmvnpdf) to test # for correctness n_features, n_components, n_samples = 2, 3, 10 mu = rng.randint(10) * rng.rand(n_components, n_features) cv = (rng.rand(n_components, n_features) + 1.0) ** 2 X = rng.randint(10) * rng.rand(n_samples, n_features) ref = _naive_lmvnpdf_diag(X, mu, cv) lpr = mixture.log_multivariate_normal_density(X, mu, cv, 'diag') assert_array_almost_equal(lpr, ref) def test_lmvnpdf_spherical(): n_features, n_components, n_samples = 2, 3, 10 mu = rng.randint(10) * rng.rand(n_components, n_features) spherecv = rng.rand(n_components, 1) ** 2 + 1 X = rng.randint(10) * rng.rand(n_samples, n_features) cv = np.tile(spherecv, (n_features, 1)) reference = _naive_lmvnpdf_diag(X, mu, cv) lpr = mixture.log_multivariate_normal_density(X, mu, spherecv, 'spherical') assert_array_almost_equal(lpr, reference) def test_lmvnpdf_full(): n_features, n_components, n_samples = 2, 3, 10 mu = rng.randint(10) * rng.rand(n_components, n_features) cv = (rng.rand(n_components, n_features) + 1.0) ** 2 X = rng.randint(10) * rng.rand(n_samples, n_features) fullcv = np.array([np.diag(x) for x in cv]) reference = _naive_lmvnpdf_diag(X, mu, cv) lpr = mixture.log_multivariate_normal_density(X, mu, fullcv, 'full') assert_array_almost_equal(lpr, reference) def test_lvmpdf_full_cv_non_positive_definite(): n_features, n_samples = 2, 10 rng = np.random.RandomState(0) X = rng.randint(10) * rng.rand(n_samples, n_features) mu = np.mean(X, 0) cv = np.array([[[-1, 0], [0, 1]]]) expected_message = "'covars' must be symmetric, positive-definite" assert_raise_message(ValueError, expected_message, mixture.log_multivariate_normal_density, X, mu, cv, 'full') def test_GMM_attributes(): n_components, n_features = 10, 4 covariance_type = 'diag' g = mixture.GMM(n_components, covariance_type, random_state=rng) weights = rng.rand(n_components) weights = weights / weights.sum() means = rng.randint(-20, 20, (n_components, n_features)) assert_true(g.n_components == n_components) assert_true(g.covariance_type == covariance_type) g.weights_ = weights assert_array_almost_equal(g.weights_, weights) g.means_ = means assert_array_almost_equal(g.means_, means) covars = (0.1 + 2 * rng.rand(n_components, n_features)) ** 2 g.covars_ = covars assert_array_almost_equal(g.covars_, covars) assert_raises(ValueError, g._set_covars, []) assert_raises(ValueError, g._set_covars, np.zeros((n_components - 2, n_features))) assert_raises(ValueError, mixture.GMM, n_components=20, covariance_type='badcovariance_type') class GMMTester(): do_test_eval = True def _setUp(self): self.n_components = 10 self.n_features = 4 self.weights = rng.rand(self.n_components) self.weights = self.weights / self.weights.sum() self.means = rng.randint(-20, 20, (self.n_components, self.n_features)) self.threshold = -0.5 self.I = np.eye(self.n_features) self.covars = { 'spherical': (0.1 + 2 * rng.rand(self.n_components, self.n_features)) ** 2, 'tied': (make_spd_matrix(self.n_features, random_state=0) + 5 * self.I), 'diag': (0.1 + 2 * rng.rand(self.n_components, self.n_features)) ** 2, 'full': np.array([make_spd_matrix(self.n_features, random_state=0) + 5 * self.I for x in range(self.n_components)])} def test_eval(self): if not self.do_test_eval: return # DPGMM does not support setting the means and # covariances before fitting There is no way of fixing this # due to the variational parameters being more expressive than # covariance matrices g = self.model(n_components=self.n_components, covariance_type=self.covariance_type, random_state=rng) # Make sure the means are far apart so responsibilities.argmax() # picks the actual component used to generate the observations. g.means_ = 20 * self.means g.covars_ = self.covars[self.covariance_type] g.weights_ = self.weights gaussidx = np.repeat(np.arange(self.n_components), 5) n_samples = len(gaussidx) X = rng.randn(n_samples, self.n_features) + g.means_[gaussidx] ll, responsibilities = g.score_samples(X) self.assertEqual(len(ll), n_samples) self.assertEqual(responsibilities.shape, (n_samples, self.n_components)) assert_array_almost_equal(responsibilities.sum(axis=1), np.ones(n_samples)) assert_array_equal(responsibilities.argmax(axis=1), gaussidx) def test_sample(self, n=100): g = self.model(n_components=self.n_components, covariance_type=self.covariance_type, random_state=rng) # Make sure the means are far apart so responsibilities.argmax() # picks the actual component used to generate the observations. g.means_ = 20 * self.means g.covars_ = np.maximum(self.covars[self.covariance_type], 0.1) g.weights_ = self.weights samples = g.sample(n) self.assertEqual(samples.shape, (n, self.n_features)) def test_train(self, params='wmc'): g = mixture.GMM(n_components=self.n_components, covariance_type=self.covariance_type) g.weights_ = self.weights g.means_ = self.means g.covars_ = 20 * self.covars[self.covariance_type] # Create a training set by sampling from the predefined distribution. X = g.sample(n_samples=100) g = self.model(n_components=self.n_components, covariance_type=self.covariance_type, random_state=rng, min_covar=1e-1, n_iter=1, init_params=params) g.fit(X) # Do one training iteration at a time so we can keep track of # the log likelihood to make sure that it increases after each # iteration. trainll = [] for _ in range(5): g.params = params g.init_params = '' g.fit(X) trainll.append(self.score(g, X)) g.n_iter = 10 g.init_params = '' g.params = params g.fit(X) # finish fitting # Note that the log likelihood will sometimes decrease by a # very small amount after it has more or less converged due to # the addition of min_covar to the covariance (to prevent # underflow). This is why the threshold is set to -0.5 # instead of 0. delta_min = np.diff(trainll).min() self.assertTrue( delta_min > self.threshold, "The min nll increase is %f which is lower than the admissible" " threshold of %f, for model %s. The likelihoods are %s." % (delta_min, self.threshold, self.covariance_type, trainll)) def test_train_degenerate(self, params='wmc'): # Train on degenerate data with 0 in some dimensions # Create a training set by sampling from the predefined distribution. X = rng.randn(100, self.n_features) X.T[1:] = 0 g = self.model(n_components=2, covariance_type=self.covariance_type, random_state=rng, min_covar=1e-3, n_iter=5, init_params=params) g.fit(X) trainll = g.score(X) self.assertTrue(np.sum(np.abs(trainll / 100 / X.shape[1])) < 5) def test_train_1d(self, params='wmc'): # Train on 1-D data # Create a training set by sampling from the predefined distribution. X = rng.randn(100, 1) # X.T[1:] = 0 g = self.model(n_components=2, covariance_type=self.covariance_type, random_state=rng, min_covar=1e-7, n_iter=5, init_params=params) g.fit(X) trainll = g.score(X) if isinstance(g, mixture.DPGMM): self.assertTrue(np.sum(np.abs(trainll / 100)) < 5) else: self.assertTrue(np.sum(np.abs(trainll / 100)) < 2) def score(self, g, X): return g.score(X).sum() class TestGMMWithSphericalCovars(unittest.TestCase, GMMTester): covariance_type = 'spherical' model = mixture.GMM setUp = GMMTester._setUp class TestGMMWithDiagonalCovars(unittest.TestCase, GMMTester): covariance_type = 'diag' model = mixture.GMM setUp = GMMTester._setUp class TestGMMWithTiedCovars(unittest.TestCase, GMMTester): covariance_type = 'tied' model = mixture.GMM setUp = GMMTester._setUp class TestGMMWithFullCovars(unittest.TestCase, GMMTester): covariance_type = 'full' model = mixture.GMM setUp = GMMTester._setUp def test_multiple_init(): # Test that multiple inits does not much worse than a single one X = rng.randn(30, 5) X[:10] += 2 g = mixture.GMM(n_components=2, covariance_type='spherical', random_state=rng, min_covar=1e-7, n_iter=5) train1 = g.fit(X).score(X).sum() g.n_init = 5 train2 = g.fit(X).score(X).sum() assert_true(train2 >= train1 - 1.e-2) def test_n_parameters(): # Test that the right number of parameters is estimated n_samples, n_dim, n_components = 7, 5, 2 X = rng.randn(n_samples, n_dim) n_params = {'spherical': 13, 'diag': 21, 'tied': 26, 'full': 41} for cv_type in ['full', 'tied', 'diag', 'spherical']: g = mixture.GMM(n_components=n_components, covariance_type=cv_type, random_state=rng, min_covar=1e-7, n_iter=1) g.fit(X) assert_true(g._n_parameters() == n_params[cv_type]) def test_1d_1component(): # Test all of the covariance_types return the same BIC score for # 1-dimensional, 1 component fits. n_samples, n_dim, n_components = 100, 1, 1 X = rng.randn(n_samples, n_dim) g_full = mixture.GMM(n_components=n_components, covariance_type='full', random_state=rng, min_covar=1e-7, n_iter=1) g_full.fit(X) g_full_bic = g_full.bic(X) for cv_type in ['tied', 'diag', 'spherical']: g = mixture.GMM(n_components=n_components, covariance_type=cv_type, random_state=rng, min_covar=1e-7, n_iter=1) g.fit(X) assert_array_almost_equal(g.bic(X), g_full_bic) def assert_fit_predict_correct(model, X): model2 = copy.deepcopy(model) predictions_1 = model.fit(X).predict(X) predictions_2 = model2.fit_predict(X) assert adjusted_rand_score(predictions_1, predictions_2) == 1.0 def test_fit_predict(): """ test that gmm.fit_predict is equivalent to gmm.fit + gmm.predict """ lrng = np.random.RandomState(101) n_samples, n_dim, n_comps = 100, 2, 2 mu = np.array([[8, 8]]) component_0 = lrng.randn(n_samples, n_dim) component_1 = lrng.randn(n_samples, n_dim) + mu X = np.vstack((component_0, component_1)) for m_constructor in (mixture.GMM, mixture.VBGMM, mixture.DPGMM): model = m_constructor(n_components=n_comps, covariance_type='full', min_covar=1e-7, n_iter=5, random_state=np.random.RandomState(0)) assert_fit_predict_correct(model, X) model = mixture.GMM(n_components=n_comps, n_iter=0) z = model.fit_predict(X) assert np.all(z == 0), "Quick Initialization Failed!" def test_aic(): # Test the aic and bic criteria n_samples, n_dim, n_components = 50, 3, 2 X = rng.randn(n_samples, n_dim) SGH = 0.5 * (X.var() + np.log(2 * np.pi)) # standard gaussian entropy for cv_type in ['full', 'tied', 'diag', 'spherical']: g = mixture.GMM(n_components=n_components, covariance_type=cv_type, random_state=rng, min_covar=1e-7) g.fit(X) aic = 2 * n_samples * SGH * n_dim + 2 * g._n_parameters() bic = (2 * n_samples * SGH * n_dim + np.log(n_samples) * g._n_parameters()) bound = n_dim * 3. / np.sqrt(n_samples) assert_true(np.abs(g.aic(X) - aic) / n_samples < bound) assert_true(np.abs(g.bic(X) - bic) / n_samples < bound) def check_positive_definite_covars(covariance_type): r"""Test that covariance matrices do not become non positive definite Due to the accumulation of round-off errors, the computation of the covariance matrices during the learning phase could lead to non-positive definite covariance matrices. Namely the use of the formula: .. math:: C = (\sum_i w_i x_i x_i^T) - \mu \mu^T instead of: .. math:: C = \sum_i w_i (x_i - \mu)(x_i - \mu)^T while mathematically equivalent, was observed a ``LinAlgError`` exception, when computing a ``GMM`` with full covariance matrices and fixed mean. This function ensures that some later optimization will not introduce the problem again. """ rng = np.random.RandomState(1) # we build a dataset with 2 2d component. The components are unbalanced # (respective weights 0.9 and 0.1) X = rng.randn(100, 2) X[-10:] += (3, 3) # Shift the 10 last points gmm = mixture.GMM(2, params="wc", covariance_type=covariance_type, min_covar=1e-3) # This is a non-regression test for issue #2640. The following call used # to trigger: # numpy.linalg.linalg.LinAlgError: 2-th leading minor not positive definite gmm.fit(X) if covariance_type == "diag" or covariance_type == "spherical": assert_greater(gmm.covars_.min(), 0) else: if covariance_type == "tied": covs = [gmm.covars_] else: covs = gmm.covars_ for c in covs: assert_greater(np.linalg.det(c), 0) def test_positive_definite_covars(): # Check positive definiteness for all covariance types for covariance_type in ["full", "tied", "diag", "spherical"]: yield check_positive_definite_covars, covariance_type def test_verbose_first_level(): # Create sample data X = rng.randn(30, 5) X[:10] += 2 g = mixture.GMM(n_components=2, n_init=2, verbose=1) old_stdout = sys.stdout sys.stdout = StringIO() try: g.fit(X) finally: sys.stdout = old_stdout def test_verbose_second_level(): # Create sample data X = rng.randn(30, 5) X[:10] += 2 g = mixture.GMM(n_components=2, n_init=2, verbose=2) old_stdout = sys.stdout sys.stdout = StringIO() try: g.fit(X) finally: sys.stdout = old_stdout
bsd-3-clause
person142/scipy
scipy/spatial/kdtree.py
4
38301
# Copyright Anne M. Archibald 2008 # Released under the scipy license import numpy as np from heapq import heappush, heappop import scipy.sparse __all__ = ['minkowski_distance_p', 'minkowski_distance', 'distance_matrix', 'Rectangle', 'KDTree'] def minkowski_distance_p(x, y, p=2): """ Compute the pth power of the L**p distance between two arrays. For efficiency, this function computes the L**p distance but does not extract the pth root. If `p` is 1 or infinity, this is equal to the actual L**p distance. Parameters ---------- x : (M, K) array_like Input array. y : (N, K) array_like Input array. p : float, 1 <= p <= infinity Which Minkowski p-norm to use. Examples -------- >>> from scipy.spatial import minkowski_distance_p >>> minkowski_distance_p([[0,0],[0,0]], [[1,1],[0,1]]) array([2, 1]) """ x = np.asarray(x) y = np.asarray(y) # Find smallest common datatype with float64 (return type of this function) - addresses #10262. # Don't just cast to float64 for complex input case. common_datatype = np.promote_types(np.promote_types(x.dtype, y.dtype), 'float64') # Make sure x and y are NumPy arrays of correct datatype. x = x.astype(common_datatype) y = y.astype(common_datatype) if p == np.inf: return np.amax(np.abs(y-x), axis=-1) elif p == 1: return np.sum(np.abs(y-x), axis=-1) else: return np.sum(np.abs(y-x)**p, axis=-1) def minkowski_distance(x, y, p=2): """ Compute the L**p distance between two arrays. Parameters ---------- x : (M, K) array_like Input array. y : (N, K) array_like Input array. p : float, 1 <= p <= infinity Which Minkowski p-norm to use. Examples -------- >>> from scipy.spatial import minkowski_distance >>> minkowski_distance([[0,0],[0,0]], [[1,1],[0,1]]) array([ 1.41421356, 1. ]) """ x = np.asarray(x) y = np.asarray(y) if p == np.inf or p == 1: return minkowski_distance_p(x, y, p) else: return minkowski_distance_p(x, y, p)**(1./p) class Rectangle(object): """Hyperrectangle class. Represents a Cartesian product of intervals. """ def __init__(self, maxes, mins): """Construct a hyperrectangle.""" self.maxes = np.maximum(maxes,mins).astype(float) self.mins = np.minimum(maxes,mins).astype(float) self.m, = self.maxes.shape def __repr__(self): return "<Rectangle %s>" % list(zip(self.mins, self.maxes)) def volume(self): """Total volume.""" return np.prod(self.maxes-self.mins) def split(self, d, split): """ Produce two hyperrectangles by splitting. In general, if you need to compute maximum and minimum distances to the children, it can be done more efficiently by updating the maximum and minimum distances to the parent. Parameters ---------- d : int Axis to split hyperrectangle along. split : float Position along axis `d` to split at. """ mid = np.copy(self.maxes) mid[d] = split less = Rectangle(self.mins, mid) mid = np.copy(self.mins) mid[d] = split greater = Rectangle(mid, self.maxes) return less, greater def min_distance_point(self, x, p=2.): """ Return the minimum distance between input and points in the hyperrectangle. Parameters ---------- x : array_like Input. p : float, optional Input. """ return minkowski_distance(0, np.maximum(0,np.maximum(self.mins-x,x-self.maxes)),p) def max_distance_point(self, x, p=2.): """ Return the maximum distance between input and points in the hyperrectangle. Parameters ---------- x : array_like Input array. p : float, optional Input. """ return minkowski_distance(0, np.maximum(self.maxes-x,x-self.mins),p) def min_distance_rectangle(self, other, p=2.): """ Compute the minimum distance between points in the two hyperrectangles. Parameters ---------- other : hyperrectangle Input. p : float Input. """ return minkowski_distance(0, np.maximum(0,np.maximum(self.mins-other.maxes,other.mins-self.maxes)),p) def max_distance_rectangle(self, other, p=2.): """ Compute the maximum distance between points in the two hyperrectangles. Parameters ---------- other : hyperrectangle Input. p : float, optional Input. """ return minkowski_distance(0, np.maximum(self.maxes-other.mins,other.maxes-self.mins),p) class KDTree(object): """ kd-tree for quick nearest-neighbor lookup This class provides an index into a set of k-D points which can be used to rapidly look up the nearest neighbors of any point. Parameters ---------- data : (N,K) array_like The data points to be indexed. This array is not copied, and so modifying this data will result in bogus results. leafsize : int, optional The number of points at which the algorithm switches over to brute-force. Has to be positive. Raises ------ RuntimeError The maximum recursion limit can be exceeded for large data sets. If this happens, either increase the value for the `leafsize` parameter or increase the recursion limit by:: >>> import sys >>> sys.setrecursionlimit(10000) See Also -------- cKDTree : Implementation of `KDTree` in Cython Notes ----- The algorithm used is described in Maneewongvatana and Mount 1999. The general idea is that the kd-tree is a binary tree, each of whose nodes represents an axis-aligned hyperrectangle. Each node specifies an axis and splits the set of points based on whether their coordinate along that axis is greater than or less than a particular value. During construction, the axis and splitting point are chosen by the "sliding midpoint" rule, which ensures that the cells do not all become long and thin. The tree can be queried for the r closest neighbors of any given point (optionally returning only those within some maximum distance of the point). It can also be queried, with a substantial gain in efficiency, for the r approximate closest neighbors. For large dimensions (20 is already large) do not expect this to run significantly faster than brute force. High-dimensional nearest-neighbor queries are a substantial open problem in computer science. The tree also supports all-neighbors queries, both with arrays of points and with other kd-trees. These do use a reasonably efficient algorithm, but the kd-tree is not necessarily the best data structure for this sort of calculation. """ def __init__(self, data, leafsize=10): self.data = np.asarray(data) self.n, self.m = np.shape(self.data) self.leafsize = int(leafsize) if self.leafsize < 1: raise ValueError("leafsize must be at least 1") self.maxes = np.amax(self.data,axis=0) self.mins = np.amin(self.data,axis=0) self.tree = self.__build(np.arange(self.n), self.maxes, self.mins) class node(object): def __lt__(self, other): return id(self) < id(other) def __gt__(self, other): return id(self) > id(other) def __le__(self, other): return id(self) <= id(other) def __ge__(self, other): return id(self) >= id(other) def __eq__(self, other): return id(self) == id(other) class leafnode(node): def __init__(self, idx): self.idx = idx self.children = len(idx) class innernode(node): def __init__(self, split_dim, split, less, greater): self.split_dim = split_dim self.split = split self.less = less self.greater = greater self.children = less.children+greater.children def __build(self, idx, maxes, mins): if len(idx) <= self.leafsize: return KDTree.leafnode(idx) else: data = self.data[idx] # maxes = np.amax(data,axis=0) # mins = np.amin(data,axis=0) d = np.argmax(maxes-mins) maxval = maxes[d] minval = mins[d] if maxval == minval: # all points are identical; warn user? return KDTree.leafnode(idx) data = data[:,d] # sliding midpoint rule; see Maneewongvatana and Mount 1999 # for arguments that this is a good idea. split = (maxval+minval)/2 less_idx = np.nonzero(data <= split)[0] greater_idx = np.nonzero(data > split)[0] if len(less_idx) == 0: split = np.amin(data) less_idx = np.nonzero(data <= split)[0] greater_idx = np.nonzero(data > split)[0] if len(greater_idx) == 0: split = np.amax(data) less_idx = np.nonzero(data < split)[0] greater_idx = np.nonzero(data >= split)[0] if len(less_idx) == 0: # _still_ zero? all must have the same value if not np.all(data == data[0]): raise ValueError("Troublesome data array: %s" % data) split = data[0] less_idx = np.arange(len(data)-1) greater_idx = np.array([len(data)-1]) lessmaxes = np.copy(maxes) lessmaxes[d] = split greatermins = np.copy(mins) greatermins[d] = split return KDTree.innernode(d, split, self.__build(idx[less_idx],lessmaxes,mins), self.__build(idx[greater_idx],maxes,greatermins)) def __query(self, x, k=1, eps=0, p=2, distance_upper_bound=np.inf): side_distances = np.maximum(0,np.maximum(x-self.maxes,self.mins-x)) if p != np.inf: side_distances **= p min_distance = np.sum(side_distances) else: min_distance = np.amax(side_distances) # priority queue for chasing nodes # entries are: # minimum distance between the cell and the target # distances between the nearest side of the cell and the target # the head node of the cell q = [(min_distance, tuple(side_distances), self.tree)] # priority queue for the nearest neighbors # furthest known neighbor first # entries are (-distance**p, i) neighbors = [] if eps == 0: epsfac = 1 elif p == np.inf: epsfac = 1/(1+eps) else: epsfac = 1/(1+eps)**p if p != np.inf and distance_upper_bound != np.inf: distance_upper_bound = distance_upper_bound**p while q: min_distance, side_distances, node = heappop(q) if isinstance(node, KDTree.leafnode): # brute-force data = self.data[node.idx] ds = minkowski_distance_p(data,x[np.newaxis,:],p) for i in range(len(ds)): if ds[i] < distance_upper_bound: if len(neighbors) == k: heappop(neighbors) heappush(neighbors, (-ds[i], node.idx[i])) if len(neighbors) == k: distance_upper_bound = -neighbors[0][0] else: # we don't push cells that are too far onto the queue at all, # but since the distance_upper_bound decreases, we might get # here even if the cell's too far if min_distance > distance_upper_bound*epsfac: # since this is the nearest cell, we're done, bail out break # compute minimum distances to the children and push them on if x[node.split_dim] < node.split: near, far = node.less, node.greater else: near, far = node.greater, node.less # near child is at the same distance as the current node heappush(q,(min_distance, side_distances, near)) # far child is further by an amount depending only # on the split value sd = list(side_distances) if p == np.inf: min_distance = max(min_distance, abs(node.split-x[node.split_dim])) elif p == 1: sd[node.split_dim] = np.abs(node.split-x[node.split_dim]) min_distance = min_distance - side_distances[node.split_dim] + sd[node.split_dim] else: sd[node.split_dim] = np.abs(node.split-x[node.split_dim])**p min_distance = min_distance - side_distances[node.split_dim] + sd[node.split_dim] # far child might be too far, if so, don't bother pushing it if min_distance <= distance_upper_bound*epsfac: heappush(q,(min_distance, tuple(sd), far)) if p == np.inf: return sorted([(-d,i) for (d,i) in neighbors]) else: return sorted([((-d)**(1./p),i) for (d,i) in neighbors]) def query(self, x, k=1, eps=0, p=2, distance_upper_bound=np.inf): """ Query the kd-tree for nearest neighbors Parameters ---------- x : array_like, last dimension self.m An array of points to query. k : int, optional The number of nearest neighbors to return. eps : nonnegative float, optional Return approximate nearest neighbors; the kth returned value is guaranteed to be no further than (1+eps) times the distance to the real kth nearest neighbor. p : float, 1<=p<=infinity, optional Which Minkowski p-norm to use. 1 is the sum-of-absolute-values "Manhattan" distance 2 is the usual Euclidean distance infinity is the maximum-coordinate-difference distance distance_upper_bound : nonnegative float, optional Return only neighbors within this distance. This is used to prune tree searches, so if you are doing a series of nearest-neighbor queries, it may help to supply the distance to the nearest neighbor of the most recent point. Returns ------- d : float or array of floats The distances to the nearest neighbors. If x has shape tuple+(self.m,), then d has shape tuple if k is one, or tuple+(k,) if k is larger than one. Missing neighbors (e.g. when k > n or distance_upper_bound is given) are indicated with infinite distances. If k is None, then d is an object array of shape tuple, containing lists of distances. In either case the hits are sorted by distance (nearest first). i : integer or array of integers The locations of the neighbors in self.data. i is the same shape as d. Examples -------- >>> from scipy import spatial >>> x, y = np.mgrid[0:5, 2:8] >>> tree = spatial.KDTree(list(zip(x.ravel(), y.ravel()))) >>> tree.data array([[0, 2], [0, 3], [0, 4], [0, 5], [0, 6], [0, 7], [1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 2], [2, 3], [2, 4], [2, 5], [2, 6], [2, 7], [3, 2], [3, 3], [3, 4], [3, 5], [3, 6], [3, 7], [4, 2], [4, 3], [4, 4], [4, 5], [4, 6], [4, 7]]) >>> pts = np.array([[0, 0], [2.1, 2.9]]) >>> tree.query(pts) (array([ 2. , 0.14142136]), array([ 0, 13])) >>> tree.query(pts[0]) (2.0, 0) """ x = np.asarray(x) if np.shape(x)[-1] != self.m: raise ValueError("x must consist of vectors of length %d but has shape %s" % (self.m, np.shape(x))) if p < 1: raise ValueError("Only p-norms with 1<=p<=infinity permitted") retshape = np.shape(x)[:-1] if retshape != (): if k is None: dd = np.empty(retshape,dtype=object) ii = np.empty(retshape,dtype=object) elif k > 1: dd = np.empty(retshape+(k,),dtype=float) dd.fill(np.inf) ii = np.empty(retshape+(k,),dtype=int) ii.fill(self.n) elif k == 1: dd = np.empty(retshape,dtype=float) dd.fill(np.inf) ii = np.empty(retshape,dtype=int) ii.fill(self.n) else: raise ValueError("Requested %s nearest neighbors; acceptable numbers are integers greater than or equal to one, or None") for c in np.ndindex(retshape): hits = self.__query(x[c], k=k, eps=eps, p=p, distance_upper_bound=distance_upper_bound) if k is None: dd[c] = [d for (d,i) in hits] ii[c] = [i for (d,i) in hits] elif k > 1: for j in range(len(hits)): dd[c+(j,)], ii[c+(j,)] = hits[j] elif k == 1: if len(hits) > 0: dd[c], ii[c] = hits[0] else: dd[c] = np.inf ii[c] = self.n return dd, ii else: hits = self.__query(x, k=k, eps=eps, p=p, distance_upper_bound=distance_upper_bound) if k is None: return [d for (d,i) in hits], [i for (d,i) in hits] elif k == 1: if len(hits) > 0: return hits[0] else: return np.inf, self.n elif k > 1: dd = np.empty(k,dtype=float) dd.fill(np.inf) ii = np.empty(k,dtype=int) ii.fill(self.n) for j in range(len(hits)): dd[j], ii[j] = hits[j] return dd, ii else: raise ValueError("Requested %s nearest neighbors; acceptable numbers are integers greater than or equal to one, or None") def __query_ball_point(self, x, r, p=2., eps=0): R = Rectangle(self.maxes, self.mins) def traverse_checking(node, rect): if rect.min_distance_point(x, p) > r / (1. + eps): return [] elif rect.max_distance_point(x, p) < r * (1. + eps): return traverse_no_checking(node) elif isinstance(node, KDTree.leafnode): d = self.data[node.idx] return node.idx[minkowski_distance(d, x, p) <= r].tolist() else: less, greater = rect.split(node.split_dim, node.split) return traverse_checking(node.less, less) + \ traverse_checking(node.greater, greater) def traverse_no_checking(node): if isinstance(node, KDTree.leafnode): return node.idx.tolist() else: return traverse_no_checking(node.less) + \ traverse_no_checking(node.greater) return traverse_checking(self.tree, R) def query_ball_point(self, x, r, p=2., eps=0): """Find all points within distance r of point(s) x. Parameters ---------- x : array_like, shape tuple + (self.m,) The point or points to search for neighbors of. r : positive float The radius of points to return. p : float, optional Which Minkowski p-norm to use. Should be in the range [1, inf]. eps : nonnegative float, optional Approximate search. Branches of the tree are not explored if their nearest points are further than ``r / (1 + eps)``, and branches are added in bulk if their furthest points are nearer than ``r * (1 + eps)``. Returns ------- results : list or array of lists If `x` is a single point, returns a list of the indices of the neighbors of `x`. If `x` is an array of points, returns an object array of shape tuple containing lists of neighbors. Notes ----- If you have many points whose neighbors you want to find, you may save substantial amounts of time by putting them in a KDTree and using query_ball_tree. Examples -------- >>> from scipy import spatial >>> x, y = np.mgrid[0:5, 0:5] >>> points = np.c_[x.ravel(), y.ravel()] >>> tree = spatial.KDTree(points) >>> tree.query_ball_point([2, 0], 1) [5, 10, 11, 15] Query multiple points and plot the results: >>> import matplotlib.pyplot as plt >>> points = np.asarray(points) >>> plt.plot(points[:,0], points[:,1], '.') >>> for results in tree.query_ball_point(([2, 0], [3, 3]), 1): ... nearby_points = points[results] ... plt.plot(nearby_points[:,0], nearby_points[:,1], 'o') >>> plt.margins(0.1, 0.1) >>> plt.show() """ x = np.asarray(x) if x.shape[-1] != self.m: raise ValueError("Searching for a %d-dimensional point in a " "%d-dimensional KDTree" % (x.shape[-1], self.m)) if len(x.shape) == 1: return self.__query_ball_point(x, r, p, eps) else: retshape = x.shape[:-1] result = np.empty(retshape, dtype=object) for c in np.ndindex(retshape): result[c] = self.__query_ball_point(x[c], r, p=p, eps=eps) return result def query_ball_tree(self, other, r, p=2., eps=0): """Find all pairs of points whose distance is at most r Parameters ---------- other : KDTree instance The tree containing points to search against. r : float The maximum distance, has to be positive. p : float, optional Which Minkowski norm to use. `p` has to meet the condition ``1 <= p <= infinity``. eps : float, optional Approximate search. Branches of the tree are not explored if their nearest points are further than ``r/(1+eps)``, and branches are added in bulk if their furthest points are nearer than ``r * (1+eps)``. `eps` has to be non-negative. Returns ------- results : list of lists For each element ``self.data[i]`` of this tree, ``results[i]`` is a list of the indices of its neighbors in ``other.data``. """ results = [[] for i in range(self.n)] def traverse_checking(node1, rect1, node2, rect2): if rect1.min_distance_rectangle(rect2, p) > r/(1.+eps): return elif rect1.max_distance_rectangle(rect2, p) < r*(1.+eps): traverse_no_checking(node1, node2) elif isinstance(node1, KDTree.leafnode): if isinstance(node2, KDTree.leafnode): d = other.data[node2.idx] for i in node1.idx: results[i] += node2.idx[minkowski_distance(d,self.data[i],p) <= r].tolist() else: less, greater = rect2.split(node2.split_dim, node2.split) traverse_checking(node1,rect1,node2.less,less) traverse_checking(node1,rect1,node2.greater,greater) elif isinstance(node2, KDTree.leafnode): less, greater = rect1.split(node1.split_dim, node1.split) traverse_checking(node1.less,less,node2,rect2) traverse_checking(node1.greater,greater,node2,rect2) else: less1, greater1 = rect1.split(node1.split_dim, node1.split) less2, greater2 = rect2.split(node2.split_dim, node2.split) traverse_checking(node1.less,less1,node2.less,less2) traverse_checking(node1.less,less1,node2.greater,greater2) traverse_checking(node1.greater,greater1,node2.less,less2) traverse_checking(node1.greater,greater1,node2.greater,greater2) def traverse_no_checking(node1, node2): if isinstance(node1, KDTree.leafnode): if isinstance(node2, KDTree.leafnode): for i in node1.idx: results[i] += node2.idx.tolist() else: traverse_no_checking(node1, node2.less) traverse_no_checking(node1, node2.greater) else: traverse_no_checking(node1.less, node2) traverse_no_checking(node1.greater, node2) traverse_checking(self.tree, Rectangle(self.maxes, self.mins), other.tree, Rectangle(other.maxes, other.mins)) return results def query_pairs(self, r, p=2., eps=0): """ Find all pairs of points within a distance. Parameters ---------- r : positive float The maximum distance. p : float, optional Which Minkowski norm to use. `p` has to meet the condition ``1 <= p <= infinity``. eps : float, optional Approximate search. Branches of the tree are not explored if their nearest points are further than ``r/(1+eps)``, and branches are added in bulk if their furthest points are nearer than ``r * (1+eps)``. `eps` has to be non-negative. Returns ------- results : set Set of pairs ``(i,j)``, with ``i < j``, for which the corresponding positions are close. """ results = set() def traverse_checking(node1, rect1, node2, rect2): if rect1.min_distance_rectangle(rect2, p) > r/(1.+eps): return elif rect1.max_distance_rectangle(rect2, p) < r*(1.+eps): traverse_no_checking(node1, node2) elif isinstance(node1, KDTree.leafnode): if isinstance(node2, KDTree.leafnode): # Special care to avoid duplicate pairs if id(node1) == id(node2): d = self.data[node2.idx] for i in node1.idx: for j in node2.idx[minkowski_distance(d,self.data[i],p) <= r]: if i < j: results.add((i,j)) else: d = self.data[node2.idx] for i in node1.idx: for j in node2.idx[minkowski_distance(d,self.data[i],p) <= r]: if i < j: results.add((i,j)) elif j < i: results.add((j,i)) else: less, greater = rect2.split(node2.split_dim, node2.split) traverse_checking(node1,rect1,node2.less,less) traverse_checking(node1,rect1,node2.greater,greater) elif isinstance(node2, KDTree.leafnode): less, greater = rect1.split(node1.split_dim, node1.split) traverse_checking(node1.less,less,node2,rect2) traverse_checking(node1.greater,greater,node2,rect2) else: less1, greater1 = rect1.split(node1.split_dim, node1.split) less2, greater2 = rect2.split(node2.split_dim, node2.split) traverse_checking(node1.less,less1,node2.less,less2) traverse_checking(node1.less,less1,node2.greater,greater2) # Avoid traversing (node1.less, node2.greater) and # (node1.greater, node2.less) (it's the same node pair twice # over, which is the source of the complication in the # original KDTree.query_pairs) if id(node1) != id(node2): traverse_checking(node1.greater,greater1,node2.less,less2) traverse_checking(node1.greater,greater1,node2.greater,greater2) def traverse_no_checking(node1, node2): if isinstance(node1, KDTree.leafnode): if isinstance(node2, KDTree.leafnode): # Special care to avoid duplicate pairs if id(node1) == id(node2): for i in node1.idx: for j in node2.idx: if i < j: results.add((i,j)) else: for i in node1.idx: for j in node2.idx: if i < j: results.add((i,j)) elif j < i: results.add((j,i)) else: traverse_no_checking(node1, node2.less) traverse_no_checking(node1, node2.greater) else: # Avoid traversing (node1.less, node2.greater) and # (node1.greater, node2.less) (it's the same node pair twice # over, which is the source of the complication in the # original KDTree.query_pairs) if id(node1) == id(node2): traverse_no_checking(node1.less, node2.less) traverse_no_checking(node1.less, node2.greater) traverse_no_checking(node1.greater, node2.greater) else: traverse_no_checking(node1.less, node2) traverse_no_checking(node1.greater, node2) traverse_checking(self.tree, Rectangle(self.maxes, self.mins), self.tree, Rectangle(self.maxes, self.mins)) return results def count_neighbors(self, other, r, p=2.): """ Count how many nearby pairs can be formed. Count the number of pairs (x1,x2) can be formed, with x1 drawn from self and x2 drawn from ``other``, and where ``distance(x1, x2, p) <= r``. This is the "two-point correlation" described in Gray and Moore 2000, "N-body problems in statistical learning", and the code here is based on their algorithm. Parameters ---------- other : KDTree instance The other tree to draw points from. r : float or one-dimensional array of floats The radius to produce a count for. Multiple radii are searched with a single tree traversal. p : float, 1<=p<=infinity, optional Which Minkowski p-norm to use Returns ------- result : int or 1-D array of ints The number of pairs. Note that this is internally stored in a numpy int, and so may overflow if very large (2e9). """ def traverse(node1, rect1, node2, rect2, idx): min_r = rect1.min_distance_rectangle(rect2,p) max_r = rect1.max_distance_rectangle(rect2,p) c_greater = r[idx] > max_r result[idx[c_greater]] += node1.children*node2.children idx = idx[(min_r <= r[idx]) & (r[idx] <= max_r)] if len(idx) == 0: return if isinstance(node1,KDTree.leafnode): if isinstance(node2,KDTree.leafnode): ds = minkowski_distance(self.data[node1.idx][:,np.newaxis,:], other.data[node2.idx][np.newaxis,:,:], p).ravel() ds.sort() result[idx] += np.searchsorted(ds,r[idx],side='right') else: less, greater = rect2.split(node2.split_dim, node2.split) traverse(node1, rect1, node2.less, less, idx) traverse(node1, rect1, node2.greater, greater, idx) else: if isinstance(node2,KDTree.leafnode): less, greater = rect1.split(node1.split_dim, node1.split) traverse(node1.less, less, node2, rect2, idx) traverse(node1.greater, greater, node2, rect2, idx) else: less1, greater1 = rect1.split(node1.split_dim, node1.split) less2, greater2 = rect2.split(node2.split_dim, node2.split) traverse(node1.less,less1,node2.less,less2,idx) traverse(node1.less,less1,node2.greater,greater2,idx) traverse(node1.greater,greater1,node2.less,less2,idx) traverse(node1.greater,greater1,node2.greater,greater2,idx) R1 = Rectangle(self.maxes, self.mins) R2 = Rectangle(other.maxes, other.mins) if np.shape(r) == (): r = np.array([r]) result = np.zeros(1,dtype=int) traverse(self.tree, R1, other.tree, R2, np.arange(1)) return result[0] elif len(np.shape(r)) == 1: r = np.asarray(r) n, = r.shape result = np.zeros(n,dtype=int) traverse(self.tree, R1, other.tree, R2, np.arange(n)) return result else: raise ValueError("r must be either a single value or a one-dimensional array of values") def sparse_distance_matrix(self, other, max_distance, p=2.): """ Compute a sparse distance matrix Computes a distance matrix between two KDTrees, leaving as zero any distance greater than max_distance. Parameters ---------- other : KDTree max_distance : positive float p : float, optional Returns ------- result : dok_matrix Sparse matrix representing the results in "dictionary of keys" format. """ result = scipy.sparse.dok_matrix((self.n,other.n)) def traverse(node1, rect1, node2, rect2): if rect1.min_distance_rectangle(rect2, p) > max_distance: return elif isinstance(node1, KDTree.leafnode): if isinstance(node2, KDTree.leafnode): for i in node1.idx: for j in node2.idx: d = minkowski_distance(self.data[i],other.data[j],p) if d <= max_distance: result[i,j] = d else: less, greater = rect2.split(node2.split_dim, node2.split) traverse(node1,rect1,node2.less,less) traverse(node1,rect1,node2.greater,greater) elif isinstance(node2, KDTree.leafnode): less, greater = rect1.split(node1.split_dim, node1.split) traverse(node1.less,less,node2,rect2) traverse(node1.greater,greater,node2,rect2) else: less1, greater1 = rect1.split(node1.split_dim, node1.split) less2, greater2 = rect2.split(node2.split_dim, node2.split) traverse(node1.less,less1,node2.less,less2) traverse(node1.less,less1,node2.greater,greater2) traverse(node1.greater,greater1,node2.less,less2) traverse(node1.greater,greater1,node2.greater,greater2) traverse(self.tree, Rectangle(self.maxes, self.mins), other.tree, Rectangle(other.maxes, other.mins)) return result def distance_matrix(x, y, p=2, threshold=1000000): """ Compute the distance matrix. Returns the matrix of all pair-wise distances. Parameters ---------- x : (M, K) array_like Matrix of M vectors in K dimensions. y : (N, K) array_like Matrix of N vectors in K dimensions. p : float, 1 <= p <= infinity Which Minkowski p-norm to use. threshold : positive int If ``M * N * K`` > `threshold`, algorithm uses a Python loop instead of large temporary arrays. Returns ------- result : (M, N) ndarray Matrix containing the distance from every vector in `x` to every vector in `y`. Examples -------- >>> from scipy.spatial import distance_matrix >>> distance_matrix([[0,0],[0,1]], [[1,0],[1,1]]) array([[ 1. , 1.41421356], [ 1.41421356, 1. ]]) """ x = np.asarray(x) m, k = x.shape y = np.asarray(y) n, kk = y.shape if k != kk: raise ValueError("x contains %d-dimensional vectors but y contains %d-dimensional vectors" % (k, kk)) if m*n*k <= threshold: return minkowski_distance(x[:,np.newaxis,:],y[np.newaxis,:,:],p) else: result = np.empty((m,n),dtype=float) # FIXME: figure out the best dtype if m < n: for i in range(m): result[i,:] = minkowski_distance(x[i],y,p) else: for j in range(n): result[:,j] = minkowski_distance(x,y[j],p) return result
bsd-3-clause
WilliamLubega/PGD-Computer-Science-_Project
william/server/Server.py
2
1648
# coding: utf-8 # In[ ]: import socket import sys import numpy as np import matplotlib.pyplot as plt import caffe import cv2 import detectobjects as det import createdb as cdb from convnet import ConvNetClassifier opts = {'img_dir': '/some/where/images', 'model_dir': '/some/where/models/models/', 'annotation_dir': '/some/where/annotation', 'model': '2C-1FC-O', 'threshold': 0.5, 'overlapThreshold': 0.3, 'lim': 0, 'prob': det.MAX, 'pos': det.MAX, 'gauss': 1, 'mean': np.array([162.83]), 'input_scale': None, 'raw_scale': 255, 'image_dims': (50,50), 'channel_swap': None, 'probs_area': 40, 'step': 5 } net = ConvNetClassifier(opts) trainfiles, valfiles, testfiles = cdb.create_sets(opts['img_dir']) s = socket.socket() s.bind(('',xxxx) # remember to change the port and a server address appropriately s.listen(10) i=1 while True: sc, address = s.accept() print address f = open("transmit.jpg",'wb') #open in binary; to be later renamed to nomenclature specific to android phone l = 1 while(l): l = sc.recv(1024) while (l): f.write(l) l = sc.recv(1024) f.close() img = cv2.imread("transmit.jpg", cv2.IMREAD_GRAYSCALE) #imfile = opts['img_dir'] + testfiles[1] imfile = '/some/where/tranmit.jpg' found = det.detect(imfile, net, opts) #shape = np.shape(img)[1] sc.send(str(found[1])) # Would like to send back a response, for now the recieved parasite locations sc.close() s.close()
mit
ElDeveloper/scikit-learn
sklearn/ensemble/tests/test_forest.py
3
41612
""" Testing for the forest module (sklearn.ensemble.forest). """ # Authors: Gilles Louppe, # Brian Holt, # Andreas Mueller, # Arnaud Joly # License: BSD 3 clause import pickle from collections import defaultdict from itertools import combinations from itertools import product import numpy as np from scipy.misc import comb from scipy.sparse import csr_matrix from scipy.sparse import csc_matrix from scipy.sparse import coo_matrix from sklearn.utils.testing import assert_almost_equal from sklearn.utils.testing import assert_array_almost_equal from sklearn.utils.testing import assert_array_equal from sklearn.utils.testing import assert_equal from sklearn.utils.testing import assert_false, assert_true from sklearn.utils.testing import assert_less, assert_greater from sklearn.utils.testing import assert_greater_equal from sklearn.utils.testing import assert_raises from sklearn.utils.testing import assert_warns from sklearn.utils.testing import ignore_warnings from sklearn import datasets from sklearn.decomposition import TruncatedSVD from sklearn.ensemble import ExtraTreesClassifier from sklearn.ensemble import ExtraTreesRegressor from sklearn.ensemble import RandomForestClassifier from sklearn.ensemble import RandomForestRegressor from sklearn.ensemble import RandomTreesEmbedding from sklearn.model_selection import GridSearchCV from sklearn.svm import LinearSVC from sklearn.utils.fixes import bincount from sklearn.utils.validation import check_random_state from sklearn.tree.tree import SPARSE_SPLITTERS # toy sample X = [[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1]] y = [-1, -1, -1, 1, 1, 1] T = [[-1, -1], [2, 2], [3, 2]] true_result = [-1, 1, 1] # also load the iris dataset # and randomly permute it iris = datasets.load_iris() rng = check_random_state(0) perm = rng.permutation(iris.target.size) iris.data = iris.data[perm] iris.target = iris.target[perm] # also load the boston dataset # and randomly permute it boston = datasets.load_boston() perm = rng.permutation(boston.target.size) boston.data = boston.data[perm] boston.target = boston.target[perm] # also make a hastie_10_2 dataset hastie_X, hastie_y = datasets.make_hastie_10_2(n_samples=20, random_state=1) hastie_X = hastie_X.astype(np.float32) FOREST_CLASSIFIERS = { "ExtraTreesClassifier": ExtraTreesClassifier, "RandomForestClassifier": RandomForestClassifier, } FOREST_REGRESSORS = { "ExtraTreesRegressor": ExtraTreesRegressor, "RandomForestRegressor": RandomForestRegressor, } FOREST_TRANSFORMERS = { "RandomTreesEmbedding": RandomTreesEmbedding, } FOREST_ESTIMATORS = dict() FOREST_ESTIMATORS.update(FOREST_CLASSIFIERS) FOREST_ESTIMATORS.update(FOREST_REGRESSORS) FOREST_ESTIMATORS.update(FOREST_TRANSFORMERS) def check_classification_toy(name): """Check classification on a toy dataset.""" ForestClassifier = FOREST_CLASSIFIERS[name] clf = ForestClassifier(n_estimators=10, random_state=1) clf.fit(X, y) assert_array_equal(clf.predict(T), true_result) assert_equal(10, len(clf)) clf = ForestClassifier(n_estimators=10, max_features=1, random_state=1) clf.fit(X, y) assert_array_equal(clf.predict(T), true_result) assert_equal(10, len(clf)) # also test apply leaf_indices = clf.apply(X) assert_equal(leaf_indices.shape, (len(X), clf.n_estimators)) def test_classification_toy(): for name in FOREST_CLASSIFIERS: yield check_classification_toy, name def check_iris_criterion(name, criterion): # Check consistency on dataset iris. ForestClassifier = FOREST_CLASSIFIERS[name] clf = ForestClassifier(n_estimators=10, criterion=criterion, random_state=1) clf.fit(iris.data, iris.target) score = clf.score(iris.data, iris.target) assert_greater(score, 0.9, "Failed with criterion %s and score = %f" % (criterion, score)) clf = ForestClassifier(n_estimators=10, criterion=criterion, max_features=2, random_state=1) clf.fit(iris.data, iris.target) score = clf.score(iris.data, iris.target) assert_greater(score, 0.5, "Failed with criterion %s and score = %f" % (criterion, score)) def test_iris(): for name, criterion in product(FOREST_CLASSIFIERS, ("gini", "entropy")): yield check_iris_criterion, name, criterion def check_boston_criterion(name, criterion): # Check consistency on dataset boston house prices. ForestRegressor = FOREST_REGRESSORS[name] clf = ForestRegressor(n_estimators=5, criterion=criterion, random_state=1) clf.fit(boston.data, boston.target) score = clf.score(boston.data, boston.target) assert_greater(score, 0.95, "Failed with max_features=None, criterion %s " "and score = %f" % (criterion, score)) clf = ForestRegressor(n_estimators=5, criterion=criterion, max_features=6, random_state=1) clf.fit(boston.data, boston.target) score = clf.score(boston.data, boston.target) assert_greater(score, 0.95, "Failed with max_features=6, criterion %s " "and score = %f" % (criterion, score)) def test_boston(): for name, criterion in product(FOREST_REGRESSORS, ("mse", )): yield check_boston_criterion, name, criterion def check_regressor_attributes(name): # Regression models should not have a classes_ attribute. r = FOREST_REGRESSORS[name](random_state=0) assert_false(hasattr(r, "classes_")) assert_false(hasattr(r, "n_classes_")) r.fit([[1, 2, 3], [4, 5, 6]], [1, 2]) assert_false(hasattr(r, "classes_")) assert_false(hasattr(r, "n_classes_")) def test_regressor_attributes(): for name in FOREST_REGRESSORS: yield check_regressor_attributes, name def check_probability(name): # Predict probabilities. ForestClassifier = FOREST_CLASSIFIERS[name] with np.errstate(divide="ignore"): clf = ForestClassifier(n_estimators=10, random_state=1, max_features=1, max_depth=1) clf.fit(iris.data, iris.target) assert_array_almost_equal(np.sum(clf.predict_proba(iris.data), axis=1), np.ones(iris.data.shape[0])) assert_array_almost_equal(clf.predict_proba(iris.data), np.exp(clf.predict_log_proba(iris.data))) def test_probability(): for name in FOREST_CLASSIFIERS: yield check_probability, name def check_importances(name, criterion, X, y): ForestEstimator = FOREST_ESTIMATORS[name] est = ForestEstimator(n_estimators=20, criterion=criterion, random_state=0) est.fit(X, y) importances = est.feature_importances_ n_important = np.sum(importances > 0.1) assert_equal(importances.shape[0], 10) assert_equal(n_important, 3) # XXX: Remove this test in 0.19 after transform support to estimators # is removed. X_new = assert_warns( DeprecationWarning, est.transform, X, threshold="mean") assert_less(0 < X_new.shape[1], X.shape[1]) # Check with parallel importances = est.feature_importances_ est.set_params(n_jobs=2) importances_parrallel = est.feature_importances_ assert_array_almost_equal(importances, importances_parrallel) # Check with sample weights sample_weight = check_random_state(0).randint(1, 10, len(X)) est = ForestEstimator(n_estimators=20, random_state=0, criterion=criterion) est.fit(X, y, sample_weight=sample_weight) importances = est.feature_importances_ assert_true(np.all(importances >= 0.0)) for scale in [0.5, 10, 100]: est = ForestEstimator(n_estimators=20, random_state=0, criterion=criterion) est.fit(X, y, sample_weight=scale * sample_weight) importances_bis = est.feature_importances_ assert_less(np.abs(importances - importances_bis).mean(), 0.001) def test_importances(): X, y = datasets.make_classification(n_samples=500, n_features=10, n_informative=3, n_redundant=0, n_repeated=0, shuffle=False, random_state=0) for name, criterion in product(FOREST_CLASSIFIERS, ["gini", "entropy"]): yield check_importances, name, criterion, X, y for name, criterion in product(FOREST_REGRESSORS, ["mse", "friedman_mse"]): yield check_importances, name, criterion, X, y def test_importances_asymptotic(): # Check whether variable importances of totally randomized trees # converge towards their theoretical values (See Louppe et al, # Understanding variable importances in forests of randomized trees, 2013). def binomial(k, n): return 0 if k < 0 or k > n else comb(int(n), int(k), exact=True) def entropy(samples): n_samples = len(samples) entropy = 0. for count in bincount(samples): p = 1. * count / n_samples if p > 0: entropy -= p * np.log2(p) return entropy def mdi_importance(X_m, X, y): n_samples, n_features = X.shape features = list(range(n_features)) features.pop(X_m) values = [np.unique(X[:, i]) for i in range(n_features)] imp = 0. for k in range(n_features): # Weight of each B of size k coef = 1. / (binomial(k, n_features) * (n_features - k)) # For all B of size k for B in combinations(features, k): # For all values B=b for b in product(*[values[B[j]] for j in range(k)]): mask_b = np.ones(n_samples, dtype=np.bool) for j in range(k): mask_b &= X[:, B[j]] == b[j] X_, y_ = X[mask_b, :], y[mask_b] n_samples_b = len(X_) if n_samples_b > 0: children = [] for xi in values[X_m]: mask_xi = X_[:, X_m] == xi children.append(y_[mask_xi]) imp += (coef * (1. * n_samples_b / n_samples) # P(B=b) * (entropy(y_) - sum([entropy(c) * len(c) / n_samples_b for c in children]))) return imp data = np.array([[0, 0, 1, 0, 0, 1, 0, 1], [1, 0, 1, 1, 1, 0, 1, 2], [1, 0, 1, 1, 0, 1, 1, 3], [0, 1, 1, 1, 0, 1, 0, 4], [1, 1, 0, 1, 0, 1, 1, 5], [1, 1, 0, 1, 1, 1, 1, 6], [1, 0, 1, 0, 0, 1, 0, 7], [1, 1, 1, 1, 1, 1, 1, 8], [1, 1, 1, 1, 0, 1, 1, 9], [1, 1, 1, 0, 1, 1, 1, 0]]) X, y = np.array(data[:, :7], dtype=np.bool), data[:, 7] n_features = X.shape[1] # Compute true importances true_importances = np.zeros(n_features) for i in range(n_features): true_importances[i] = mdi_importance(i, X, y) # Estimate importances with totally randomized trees clf = ExtraTreesClassifier(n_estimators=500, max_features=1, criterion="entropy", random_state=0).fit(X, y) importances = sum(tree.tree_.compute_feature_importances(normalize=False) for tree in clf.estimators_) / clf.n_estimators # Check correctness assert_almost_equal(entropy(y), sum(importances)) assert_less(np.abs(true_importances - importances).mean(), 0.01) def check_unfitted_feature_importances(name): assert_raises(ValueError, getattr, FOREST_ESTIMATORS[name](random_state=0), "feature_importances_") def test_unfitted_feature_importances(): for name in FOREST_ESTIMATORS: yield check_unfitted_feature_importances, name def check_oob_score(name, X, y, n_estimators=20): # Check that oob prediction is a good estimation of the generalization # error. # Proper behavior est = FOREST_ESTIMATORS[name](oob_score=True, random_state=0, n_estimators=n_estimators, bootstrap=True) n_samples = X.shape[0] est.fit(X[:n_samples // 2, :], y[:n_samples // 2]) test_score = est.score(X[n_samples // 2:, :], y[n_samples // 2:]) if name in FOREST_CLASSIFIERS: assert_less(abs(test_score - est.oob_score_), 0.1) else: assert_greater(test_score, est.oob_score_) assert_greater(est.oob_score_, .8) # Check warning if not enough estimators with np.errstate(divide="ignore", invalid="ignore"): est = FOREST_ESTIMATORS[name](oob_score=True, random_state=0, n_estimators=1, bootstrap=True) assert_warns(UserWarning, est.fit, X, y) def test_oob_score(): for name in FOREST_CLASSIFIERS: yield check_oob_score, name, iris.data, iris.target # csc matrix yield check_oob_score, name, csc_matrix(iris.data), iris.target # non-contiguous targets in classification yield check_oob_score, name, iris.data, iris.target * 2 + 1 for name in FOREST_REGRESSORS: yield check_oob_score, name, boston.data, boston.target, 50 # csc matrix yield check_oob_score, name, csc_matrix(boston.data), boston.target, 50 def check_oob_score_raise_error(name): ForestEstimator = FOREST_ESTIMATORS[name] if name in FOREST_TRANSFORMERS: for oob_score in [True, False]: assert_raises(TypeError, ForestEstimator, oob_score=oob_score) assert_raises(NotImplementedError, ForestEstimator()._set_oob_score, X, y) else: # Unfitted / no bootstrap / no oob_score for oob_score, bootstrap in [(True, False), (False, True), (False, False)]: est = ForestEstimator(oob_score=oob_score, bootstrap=bootstrap, random_state=0) assert_false(hasattr(est, "oob_score_")) # No bootstrap assert_raises(ValueError, ForestEstimator(oob_score=True, bootstrap=False).fit, X, y) def test_oob_score_raise_error(): for name in FOREST_ESTIMATORS: yield check_oob_score_raise_error, name def check_gridsearch(name): forest = FOREST_CLASSIFIERS[name]() clf = GridSearchCV(forest, {'n_estimators': (1, 2), 'max_depth': (1, 2)}) clf.fit(iris.data, iris.target) def test_gridsearch(): # Check that base trees can be grid-searched. for name in FOREST_CLASSIFIERS: yield check_gridsearch, name def check_parallel(name, X, y): """Check parallel computations in classification""" ForestEstimator = FOREST_ESTIMATORS[name] forest = ForestEstimator(n_estimators=10, n_jobs=3, random_state=0) forest.fit(X, y) assert_equal(len(forest), 10) forest.set_params(n_jobs=1) y1 = forest.predict(X) forest.set_params(n_jobs=2) y2 = forest.predict(X) assert_array_almost_equal(y1, y2, 3) def test_parallel(): for name in FOREST_CLASSIFIERS: yield check_parallel, name, iris.data, iris.target for name in FOREST_REGRESSORS: yield check_parallel, name, boston.data, boston.target def check_pickle(name, X, y): # Check pickability. ForestEstimator = FOREST_ESTIMATORS[name] obj = ForestEstimator(random_state=0) obj.fit(X, y) score = obj.score(X, y) pickle_object = pickle.dumps(obj) obj2 = pickle.loads(pickle_object) assert_equal(type(obj2), obj.__class__) score2 = obj2.score(X, y) assert_equal(score, score2) def test_pickle(): for name in FOREST_CLASSIFIERS: yield check_pickle, name, iris.data[::2], iris.target[::2] for name in FOREST_REGRESSORS: yield check_pickle, name, boston.data[::2], boston.target[::2] def check_multioutput(name): # Check estimators on multi-output problems. X_train = [[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1], [-2, 1], [-1, 1], [-1, 2], [2, -1], [1, -1], [1, -2]] y_train = [[-1, 0], [-1, 0], [-1, 0], [1, 1], [1, 1], [1, 1], [-1, 2], [-1, 2], [-1, 2], [1, 3], [1, 3], [1, 3]] X_test = [[-1, -1], [1, 1], [-1, 1], [1, -1]] y_test = [[-1, 0], [1, 1], [-1, 2], [1, 3]] est = FOREST_ESTIMATORS[name](random_state=0, bootstrap=False) y_pred = est.fit(X_train, y_train).predict(X_test) assert_array_almost_equal(y_pred, y_test) if name in FOREST_CLASSIFIERS: with np.errstate(divide="ignore"): proba = est.predict_proba(X_test) assert_equal(len(proba), 2) assert_equal(proba[0].shape, (4, 2)) assert_equal(proba[1].shape, (4, 4)) log_proba = est.predict_log_proba(X_test) assert_equal(len(log_proba), 2) assert_equal(log_proba[0].shape, (4, 2)) assert_equal(log_proba[1].shape, (4, 4)) def test_multioutput(): for name in FOREST_CLASSIFIERS: yield check_multioutput, name for name in FOREST_REGRESSORS: yield check_multioutput, name def check_classes_shape(name): # Test that n_classes_ and classes_ have proper shape. ForestClassifier = FOREST_CLASSIFIERS[name] # Classification, single output clf = ForestClassifier(random_state=0).fit(X, y) assert_equal(clf.n_classes_, 2) assert_array_equal(clf.classes_, [-1, 1]) # Classification, multi-output _y = np.vstack((y, np.array(y) * 2)).T clf = ForestClassifier(random_state=0).fit(X, _y) assert_array_equal(clf.n_classes_, [2, 2]) assert_array_equal(clf.classes_, [[-1, 1], [-2, 2]]) def test_classes_shape(): for name in FOREST_CLASSIFIERS: yield check_classes_shape, name def test_random_trees_dense_type(): # Test that the `sparse_output` parameter of RandomTreesEmbedding # works by returning a dense array. # Create the RTE with sparse=False hasher = RandomTreesEmbedding(n_estimators=10, sparse_output=False) X, y = datasets.make_circles(factor=0.5) X_transformed = hasher.fit_transform(X) # Assert that type is ndarray, not scipy.sparse.csr.csr_matrix assert_equal(type(X_transformed), np.ndarray) def test_random_trees_dense_equal(): # Test that the `sparse_output` parameter of RandomTreesEmbedding # works by returning the same array for both argument values. # Create the RTEs hasher_dense = RandomTreesEmbedding(n_estimators=10, sparse_output=False, random_state=0) hasher_sparse = RandomTreesEmbedding(n_estimators=10, sparse_output=True, random_state=0) X, y = datasets.make_circles(factor=0.5) X_transformed_dense = hasher_dense.fit_transform(X) X_transformed_sparse = hasher_sparse.fit_transform(X) # Assert that dense and sparse hashers have same array. assert_array_equal(X_transformed_sparse.toarray(), X_transformed_dense) def test_random_hasher(): # test random forest hashing on circles dataset # make sure that it is linearly separable. # even after projected to two SVD dimensions # Note: Not all random_states produce perfect results. hasher = RandomTreesEmbedding(n_estimators=30, random_state=1) X, y = datasets.make_circles(factor=0.5) X_transformed = hasher.fit_transform(X) # test fit and transform: hasher = RandomTreesEmbedding(n_estimators=30, random_state=1) assert_array_equal(hasher.fit(X).transform(X).toarray(), X_transformed.toarray()) # one leaf active per data point per forest assert_equal(X_transformed.shape[0], X.shape[0]) assert_array_equal(X_transformed.sum(axis=1), hasher.n_estimators) svd = TruncatedSVD(n_components=2) X_reduced = svd.fit_transform(X_transformed) linear_clf = LinearSVC() linear_clf.fit(X_reduced, y) assert_equal(linear_clf.score(X_reduced, y), 1.) def test_random_hasher_sparse_data(): X, y = datasets.make_multilabel_classification(random_state=0) hasher = RandomTreesEmbedding(n_estimators=30, random_state=1) X_transformed = hasher.fit_transform(X) X_transformed_sparse = hasher.fit_transform(csc_matrix(X)) assert_array_equal(X_transformed_sparse.toarray(), X_transformed.toarray()) def test_parallel_train(): rng = check_random_state(12321) n_samples, n_features = 80, 30 X_train = rng.randn(n_samples, n_features) y_train = rng.randint(0, 2, n_samples) clfs = [ RandomForestClassifier(n_estimators=20, n_jobs=n_jobs, random_state=12345).fit(X_train, y_train) for n_jobs in [1, 2, 3, 8, 16, 32] ] X_test = rng.randn(n_samples, n_features) probas = [clf.predict_proba(X_test) for clf in clfs] for proba1, proba2 in zip(probas, probas[1:]): assert_array_almost_equal(proba1, proba2) def test_distribution(): rng = check_random_state(12321) # Single variable with 4 values X = rng.randint(0, 4, size=(1000, 1)) y = rng.rand(1000) n_trees = 500 clf = ExtraTreesRegressor(n_estimators=n_trees, random_state=42).fit(X, y) uniques = defaultdict(int) for tree in clf.estimators_: tree = "".join(("%d,%d/" % (f, int(t)) if f >= 0 else "-") for f, t in zip(tree.tree_.feature, tree.tree_.threshold)) uniques[tree] += 1 uniques = sorted([(1. * count / n_trees, tree) for tree, count in uniques.items()]) # On a single variable problem where X_0 has 4 equiprobable values, there # are 5 ways to build a random tree. The more compact (0,1/0,0/--0,2/--) of # them has probability 1/3 while the 4 others have probability 1/6. assert_equal(len(uniques), 5) assert_greater(0.20, uniques[0][0]) # Rough approximation of 1/6. assert_greater(0.20, uniques[1][0]) assert_greater(0.20, uniques[2][0]) assert_greater(0.20, uniques[3][0]) assert_greater(uniques[4][0], 0.3) assert_equal(uniques[4][1], "0,1/0,0/--0,2/--") # Two variables, one with 2 values, one with 3 values X = np.empty((1000, 2)) X[:, 0] = np.random.randint(0, 2, 1000) X[:, 1] = np.random.randint(0, 3, 1000) y = rng.rand(1000) clf = ExtraTreesRegressor(n_estimators=100, max_features=1, random_state=1).fit(X, y) uniques = defaultdict(int) for tree in clf.estimators_: tree = "".join(("%d,%d/" % (f, int(t)) if f >= 0 else "-") for f, t in zip(tree.tree_.feature, tree.tree_.threshold)) uniques[tree] += 1 uniques = [(count, tree) for tree, count in uniques.items()] assert_equal(len(uniques), 8) def check_max_leaf_nodes_max_depth(name): X, y = hastie_X, hastie_y # Test precedence of max_leaf_nodes over max_depth. ForestEstimator = FOREST_ESTIMATORS[name] est = ForestEstimator(max_depth=1, max_leaf_nodes=4, n_estimators=1, random_state=0).fit(X, y) assert_greater(est.estimators_[0].tree_.max_depth, 1) est = ForestEstimator(max_depth=1, n_estimators=1, random_state=0).fit(X, y) assert_equal(est.estimators_[0].tree_.max_depth, 1) def test_max_leaf_nodes_max_depth(): for name in FOREST_ESTIMATORS: yield check_max_leaf_nodes_max_depth, name def check_min_samples_split(name): X, y = hastie_X, hastie_y ForestEstimator = FOREST_ESTIMATORS[name] # test boundary value assert_raises(ValueError, ForestEstimator(min_samples_split=-1).fit, X, y) assert_raises(ValueError, ForestEstimator(min_samples_split=0).fit, X, y) assert_raises(ValueError, ForestEstimator(min_samples_split=1.1).fit, X, y) est = ForestEstimator(min_samples_split=10, n_estimators=1, random_state=0) est.fit(X, y) node_idx = est.estimators_[0].tree_.children_left != -1 node_samples = est.estimators_[0].tree_.n_node_samples[node_idx] assert_greater(np.min(node_samples), len(X) * 0.5 - 1, "Failed with {0}".format(name)) est = ForestEstimator(min_samples_split=0.5, n_estimators=1, random_state=0) est.fit(X, y) node_idx = est.estimators_[0].tree_.children_left != -1 node_samples = est.estimators_[0].tree_.n_node_samples[node_idx] assert_greater(np.min(node_samples), len(X) * 0.5 - 1, "Failed with {0}".format(name)) def test_min_samples_split(): for name in FOREST_ESTIMATORS: yield check_min_samples_split, name def check_min_samples_leaf(name): X, y = hastie_X, hastie_y # Test if leaves contain more than leaf_count training examples ForestEstimator = FOREST_ESTIMATORS[name] # test boundary value assert_raises(ValueError, ForestEstimator(min_samples_leaf=-1).fit, X, y) assert_raises(ValueError, ForestEstimator(min_samples_leaf=0).fit, X, y) est = ForestEstimator(min_samples_leaf=5, n_estimators=1, random_state=0) est.fit(X, y) out = est.estimators_[0].tree_.apply(X) node_counts = bincount(out) # drop inner nodes leaf_count = node_counts[node_counts != 0] assert_greater(np.min(leaf_count), 4, "Failed with {0}".format(name)) est = ForestEstimator(min_samples_leaf=0.25, n_estimators=1, random_state=0) est.fit(X, y) out = est.estimators_[0].tree_.apply(X) node_counts = np.bincount(out) # drop inner nodes leaf_count = node_counts[node_counts != 0] assert_greater(np.min(leaf_count), len(X) * 0.25 - 1, "Failed with {0}".format(name)) def test_min_samples_leaf(): for name in FOREST_ESTIMATORS: yield check_min_samples_leaf, name def check_min_weight_fraction_leaf(name): X, y = hastie_X, hastie_y # Test if leaves contain at least min_weight_fraction_leaf of the # training set ForestEstimator = FOREST_ESTIMATORS[name] rng = np.random.RandomState(0) weights = rng.rand(X.shape[0]) total_weight = np.sum(weights) # test both DepthFirstTreeBuilder and BestFirstTreeBuilder # by setting max_leaf_nodes for frac in np.linspace(0, 0.5, 6): est = ForestEstimator(min_weight_fraction_leaf=frac, n_estimators=1, random_state=0) if "RandomForest" in name: est.bootstrap = False est.fit(X, y, sample_weight=weights) out = est.estimators_[0].tree_.apply(X) node_weights = bincount(out, weights=weights) # drop inner nodes leaf_weights = node_weights[node_weights != 0] assert_greater_equal( np.min(leaf_weights), total_weight * est.min_weight_fraction_leaf, "Failed with {0} " "min_weight_fraction_leaf={1}".format( name, est.min_weight_fraction_leaf)) def test_min_weight_fraction_leaf(): for name in FOREST_ESTIMATORS: yield check_min_weight_fraction_leaf, name def check_sparse_input(name, X, X_sparse, y): ForestEstimator = FOREST_ESTIMATORS[name] dense = ForestEstimator(random_state=0, max_depth=2).fit(X, y) sparse = ForestEstimator(random_state=0, max_depth=2).fit(X_sparse, y) assert_array_almost_equal(sparse.apply(X), dense.apply(X)) if name in FOREST_CLASSIFIERS or name in FOREST_REGRESSORS: assert_array_almost_equal(sparse.predict(X), dense.predict(X)) assert_array_almost_equal(sparse.feature_importances_, dense.feature_importances_) if name in FOREST_CLASSIFIERS: assert_array_almost_equal(sparse.predict_proba(X), dense.predict_proba(X)) assert_array_almost_equal(sparse.predict_log_proba(X), dense.predict_log_proba(X)) if name in FOREST_TRANSFORMERS: assert_array_almost_equal(sparse.transform(X).toarray(), dense.transform(X).toarray()) assert_array_almost_equal(sparse.fit_transform(X).toarray(), dense.fit_transform(X).toarray()) def test_sparse_input(): X, y = datasets.make_multilabel_classification(random_state=0, n_samples=50) for name, sparse_matrix in product(FOREST_ESTIMATORS, (csr_matrix, csc_matrix, coo_matrix)): yield check_sparse_input, name, X, sparse_matrix(X), y def check_memory_layout(name, dtype): # Check that it works no matter the memory layout est = FOREST_ESTIMATORS[name](random_state=0, bootstrap=False) # Nothing X = np.asarray(iris.data, dtype=dtype) y = iris.target assert_array_equal(est.fit(X, y).predict(X), y) # C-order X = np.asarray(iris.data, order="C", dtype=dtype) y = iris.target assert_array_equal(est.fit(X, y).predict(X), y) # F-order X = np.asarray(iris.data, order="F", dtype=dtype) y = iris.target assert_array_equal(est.fit(X, y).predict(X), y) # Contiguous X = np.ascontiguousarray(iris.data, dtype=dtype) y = iris.target assert_array_equal(est.fit(X, y).predict(X), y) if est.base_estimator.splitter in SPARSE_SPLITTERS: # csr matrix X = csr_matrix(iris.data, dtype=dtype) y = iris.target assert_array_equal(est.fit(X, y).predict(X), y) # csc_matrix X = csc_matrix(iris.data, dtype=dtype) y = iris.target assert_array_equal(est.fit(X, y).predict(X), y) # coo_matrix X = coo_matrix(iris.data, dtype=dtype) y = iris.target assert_array_equal(est.fit(X, y).predict(X), y) # Strided X = np.asarray(iris.data[::3], dtype=dtype) y = iris.target[::3] assert_array_equal(est.fit(X, y).predict(X), y) def test_memory_layout(): for name, dtype in product(FOREST_CLASSIFIERS, [np.float64, np.float32]): yield check_memory_layout, name, dtype for name, dtype in product(FOREST_REGRESSORS, [np.float64, np.float32]): yield check_memory_layout, name, dtype @ignore_warnings def check_1d_input(name, X, X_2d, y): ForestEstimator = FOREST_ESTIMATORS[name] assert_raises(ValueError, ForestEstimator(n_estimators=1, random_state=0).fit, X, y) est = ForestEstimator(random_state=0) est.fit(X_2d, y) if name in FOREST_CLASSIFIERS or name in FOREST_REGRESSORS: assert_raises(ValueError, est.predict, X) @ignore_warnings def test_1d_input(): X = iris.data[:, 0] X_2d = iris.data[:, 0].reshape((-1, 1)) y = iris.target for name in FOREST_ESTIMATORS: yield check_1d_input, name, X, X_2d, y def check_class_weights(name): # Check class_weights resemble sample_weights behavior. ForestClassifier = FOREST_CLASSIFIERS[name] # Iris is balanced, so no effect expected for using 'balanced' weights clf1 = ForestClassifier(random_state=0) clf1.fit(iris.data, iris.target) clf2 = ForestClassifier(class_weight='balanced', random_state=0) clf2.fit(iris.data, iris.target) assert_almost_equal(clf1.feature_importances_, clf2.feature_importances_) # Make a multi-output problem with three copies of Iris iris_multi = np.vstack((iris.target, iris.target, iris.target)).T # Create user-defined weights that should balance over the outputs clf3 = ForestClassifier(class_weight=[{0: 2., 1: 2., 2: 1.}, {0: 2., 1: 1., 2: 2.}, {0: 1., 1: 2., 2: 2.}], random_state=0) clf3.fit(iris.data, iris_multi) assert_almost_equal(clf2.feature_importances_, clf3.feature_importances_) # Check against multi-output "balanced" which should also have no effect clf4 = ForestClassifier(class_weight='balanced', random_state=0) clf4.fit(iris.data, iris_multi) assert_almost_equal(clf3.feature_importances_, clf4.feature_importances_) # Inflate importance of class 1, check against user-defined weights sample_weight = np.ones(iris.target.shape) sample_weight[iris.target == 1] *= 100 class_weight = {0: 1., 1: 100., 2: 1.} clf1 = ForestClassifier(random_state=0) clf1.fit(iris.data, iris.target, sample_weight) clf2 = ForestClassifier(class_weight=class_weight, random_state=0) clf2.fit(iris.data, iris.target) assert_almost_equal(clf1.feature_importances_, clf2.feature_importances_) # Check that sample_weight and class_weight are multiplicative clf1 = ForestClassifier(random_state=0) clf1.fit(iris.data, iris.target, sample_weight ** 2) clf2 = ForestClassifier(class_weight=class_weight, random_state=0) clf2.fit(iris.data, iris.target, sample_weight) assert_almost_equal(clf1.feature_importances_, clf2.feature_importances_) def test_class_weights(): for name in FOREST_CLASSIFIERS: yield check_class_weights, name def check_class_weight_balanced_and_bootstrap_multi_output(name): # Test class_weight works for multi-output""" ForestClassifier = FOREST_CLASSIFIERS[name] _y = np.vstack((y, np.array(y) * 2)).T clf = ForestClassifier(class_weight='balanced', random_state=0) clf.fit(X, _y) clf = ForestClassifier(class_weight=[{-1: 0.5, 1: 1.}, {-2: 1., 2: 1.}], random_state=0) clf.fit(X, _y) # smoke test for subsample and balanced subsample clf = ForestClassifier(class_weight='balanced_subsample', random_state=0) clf.fit(X, _y) clf = ForestClassifier(class_weight='subsample', random_state=0) ignore_warnings(clf.fit)(X, _y) def test_class_weight_balanced_and_bootstrap_multi_output(): for name in FOREST_CLASSIFIERS: yield check_class_weight_balanced_and_bootstrap_multi_output, name def check_class_weight_errors(name): # Test if class_weight raises errors and warnings when expected. ForestClassifier = FOREST_CLASSIFIERS[name] _y = np.vstack((y, np.array(y) * 2)).T # Invalid preset string clf = ForestClassifier(class_weight='the larch', random_state=0) assert_raises(ValueError, clf.fit, X, y) assert_raises(ValueError, clf.fit, X, _y) # Warning warm_start with preset clf = ForestClassifier(class_weight='auto', warm_start=True, random_state=0) assert_warns(UserWarning, clf.fit, X, y) assert_warns(UserWarning, clf.fit, X, _y) # Not a list or preset for multi-output clf = ForestClassifier(class_weight=1, random_state=0) assert_raises(ValueError, clf.fit, X, _y) # Incorrect length list for multi-output clf = ForestClassifier(class_weight=[{-1: 0.5, 1: 1.}], random_state=0) assert_raises(ValueError, clf.fit, X, _y) def test_class_weight_errors(): for name in FOREST_CLASSIFIERS: yield check_class_weight_errors, name def check_warm_start(name, random_state=42): # Test if fitting incrementally with warm start gives a forest of the # right size and the same results as a normal fit. X, y = hastie_X, hastie_y ForestEstimator = FOREST_ESTIMATORS[name] clf_ws = None for n_estimators in [5, 10]: if clf_ws is None: clf_ws = ForestEstimator(n_estimators=n_estimators, random_state=random_state, warm_start=True) else: clf_ws.set_params(n_estimators=n_estimators) clf_ws.fit(X, y) assert_equal(len(clf_ws), n_estimators) clf_no_ws = ForestEstimator(n_estimators=10, random_state=random_state, warm_start=False) clf_no_ws.fit(X, y) assert_equal(set([tree.random_state for tree in clf_ws]), set([tree.random_state for tree in clf_no_ws])) assert_array_equal(clf_ws.apply(X), clf_no_ws.apply(X), err_msg="Failed with {0}".format(name)) def test_warm_start(): for name in FOREST_ESTIMATORS: yield check_warm_start, name def check_warm_start_clear(name): # Test if fit clears state and grows a new forest when warm_start==False. X, y = hastie_X, hastie_y ForestEstimator = FOREST_ESTIMATORS[name] clf = ForestEstimator(n_estimators=5, max_depth=1, warm_start=False, random_state=1) clf.fit(X, y) clf_2 = ForestEstimator(n_estimators=5, max_depth=1, warm_start=True, random_state=2) clf_2.fit(X, y) # inits state clf_2.set_params(warm_start=False, random_state=1) clf_2.fit(X, y) # clears old state and equals clf assert_array_almost_equal(clf_2.apply(X), clf.apply(X)) def test_warm_start_clear(): for name in FOREST_ESTIMATORS: yield check_warm_start_clear, name def check_warm_start_smaller_n_estimators(name): # Test if warm start second fit with smaller n_estimators raises error. X, y = hastie_X, hastie_y ForestEstimator = FOREST_ESTIMATORS[name] clf = ForestEstimator(n_estimators=5, max_depth=1, warm_start=True) clf.fit(X, y) clf.set_params(n_estimators=4) assert_raises(ValueError, clf.fit, X, y) def test_warm_start_smaller_n_estimators(): for name in FOREST_ESTIMATORS: yield check_warm_start_smaller_n_estimators, name def check_warm_start_equal_n_estimators(name): # Test if warm start with equal n_estimators does nothing and returns the # same forest and raises a warning. X, y = hastie_X, hastie_y ForestEstimator = FOREST_ESTIMATORS[name] clf = ForestEstimator(n_estimators=5, max_depth=3, warm_start=True, random_state=1) clf.fit(X, y) clf_2 = ForestEstimator(n_estimators=5, max_depth=3, warm_start=True, random_state=1) clf_2.fit(X, y) # Now clf_2 equals clf. clf_2.set_params(random_state=2) assert_warns(UserWarning, clf_2.fit, X, y) # If we had fit the trees again we would have got a different forest as we # changed the random state. assert_array_equal(clf.apply(X), clf_2.apply(X)) def test_warm_start_equal_n_estimators(): for name in FOREST_ESTIMATORS: yield check_warm_start_equal_n_estimators, name def check_warm_start_oob(name): # Test that the warm start computes oob score when asked. X, y = hastie_X, hastie_y ForestEstimator = FOREST_ESTIMATORS[name] # Use 15 estimators to avoid 'some inputs do not have OOB scores' warning. clf = ForestEstimator(n_estimators=15, max_depth=3, warm_start=False, random_state=1, bootstrap=True, oob_score=True) clf.fit(X, y) clf_2 = ForestEstimator(n_estimators=5, max_depth=3, warm_start=False, random_state=1, bootstrap=True, oob_score=False) clf_2.fit(X, y) clf_2.set_params(warm_start=True, oob_score=True, n_estimators=15) clf_2.fit(X, y) assert_true(hasattr(clf_2, 'oob_score_')) assert_equal(clf.oob_score_, clf_2.oob_score_) # Test that oob_score is computed even if we don't need to train # additional trees. clf_3 = ForestEstimator(n_estimators=15, max_depth=3, warm_start=True, random_state=1, bootstrap=True, oob_score=False) clf_3.fit(X, y) assert_true(not(hasattr(clf_3, 'oob_score_'))) clf_3.set_params(oob_score=True) ignore_warnings(clf_3.fit)(X, y) assert_equal(clf.oob_score_, clf_3.oob_score_) def test_warm_start_oob(): for name in FOREST_CLASSIFIERS: yield check_warm_start_oob, name for name in FOREST_REGRESSORS: yield check_warm_start_oob, name def test_dtype_convert(n_classes=15): classifier = RandomForestClassifier(random_state=0, bootstrap=False) X = np.eye(n_classes) y = [ch for ch in 'ABCDEFGHIJKLMNOPQRSTU'[:n_classes]] result = classifier.fit(X, y).predict(X) assert_array_equal(classifier.classes_, y) assert_array_equal(result, y) def check_decision_path(name): X, y = hastie_X, hastie_y n_samples = X.shape[0] ForestEstimator = FOREST_ESTIMATORS[name] est = ForestEstimator(n_estimators=5, max_depth=1, warm_start=False, random_state=1) est.fit(X, y) indicator, n_nodes_ptr = est.decision_path(X) assert_equal(indicator.shape[1], n_nodes_ptr[-1]) assert_equal(indicator.shape[0], n_samples) assert_array_equal(np.diff(n_nodes_ptr), [e.tree_.node_count for e in est.estimators_]) # Assert that leaves index are correct leaves = est.apply(X) for est_id in range(leaves.shape[1]): leave_indicator = [indicator[i, n_nodes_ptr[est_id] + j] for i, j in enumerate(leaves[:, est_id])] assert_array_almost_equal(leave_indicator, np.ones(shape=n_samples)) def test_decision_path(): for name in FOREST_CLASSIFIERS: yield check_decision_path, name for name in FOREST_REGRESSORS: yield check_decision_path, name
bsd-3-clause
pratapvardhan/pandas
pandas/tests/reshape/test_pivot.py
2
81063
# -*- coding: utf-8 -*- from datetime import datetime, date, timedelta import pytest import numpy as np from collections import OrderedDict import pandas as pd from pandas import (DataFrame, Series, Index, MultiIndex, Grouper, date_range, concat, Categorical) from pandas.core.reshape.pivot import pivot_table, crosstab from pandas.compat import range, product import pandas.util.testing as tm from pandas.api.types import CategoricalDtype as CDT @pytest.fixture(params=[True, False]) def dropna(request): return request.param class TestPivotTable(object): def setup_method(self, method): self.data = DataFrame({'A': ['foo', 'foo', 'foo', 'foo', 'bar', 'bar', 'bar', 'bar', 'foo', 'foo', 'foo'], 'B': ['one', 'one', 'one', 'two', 'one', 'one', 'one', 'two', 'two', 'two', 'one'], 'C': ['dull', 'dull', 'shiny', 'dull', 'dull', 'shiny', 'shiny', 'dull', 'shiny', 'shiny', 'shiny'], 'D': np.random.randn(11), 'E': np.random.randn(11), 'F': np.random.randn(11)}) def test_pivot_table(self): index = ['A', 'B'] columns = 'C' table = pivot_table(self.data, values='D', index=index, columns=columns) table2 = self.data.pivot_table( values='D', index=index, columns=columns) tm.assert_frame_equal(table, table2) # this works pivot_table(self.data, values='D', index=index) if len(index) > 1: assert table.index.names == tuple(index) else: assert table.index.name == index[0] if len(columns) > 1: assert table.columns.names == columns else: assert table.columns.name == columns[0] expected = self.data.groupby( index + [columns])['D'].agg(np.mean).unstack() tm.assert_frame_equal(table, expected) def test_pivot_table_nocols(self): df = DataFrame({'rows': ['a', 'b', 'c'], 'cols': ['x', 'y', 'z'], 'values': [1, 2, 3]}) rs = df.pivot_table(columns='cols', aggfunc=np.sum) xp = df.pivot_table(index='cols', aggfunc=np.sum).T tm.assert_frame_equal(rs, xp) rs = df.pivot_table(columns='cols', aggfunc={'values': 'mean'}) xp = df.pivot_table(index='cols', aggfunc={'values': 'mean'}).T tm.assert_frame_equal(rs, xp) def test_pivot_table_dropna(self): df = DataFrame({'amount': {0: 60000, 1: 100000, 2: 50000, 3: 30000}, 'customer': {0: 'A', 1: 'A', 2: 'B', 3: 'C'}, 'month': {0: 201307, 1: 201309, 2: 201308, 3: 201310}, 'product': {0: 'a', 1: 'b', 2: 'c', 3: 'd'}, 'quantity': {0: 2000000, 1: 500000, 2: 1000000, 3: 1000000}}) pv_col = df.pivot_table('quantity', 'month', [ 'customer', 'product'], dropna=False) pv_ind = df.pivot_table( 'quantity', ['customer', 'product'], 'month', dropna=False) m = MultiIndex.from_tuples([('A', 'a'), ('A', 'b'), ('A', 'c'), ('A', 'd'), ('B', 'a'), ('B', 'b'), ('B', 'c'), ('B', 'd'), ('C', 'a'), ('C', 'b'), ('C', 'c'), ('C', 'd')], names=['customer', 'product']) tm.assert_index_equal(pv_col.columns, m) tm.assert_index_equal(pv_ind.index, m) def test_pivot_table_categorical(self): cat1 = Categorical(["a", "a", "b", "b"], categories=["a", "b", "z"], ordered=True) cat2 = Categorical(["c", "d", "c", "d"], categories=["c", "d", "y"], ordered=True) df = DataFrame({"A": cat1, "B": cat2, "values": [1, 2, 3, 4]}) result = pd.pivot_table(df, values='values', index=['A', 'B'], dropna=True) exp_index = pd.MultiIndex.from_arrays( [cat1, cat2], names=['A', 'B']) expected = DataFrame( {'values': [1, 2, 3, 4]}, index=exp_index) tm.assert_frame_equal(result, expected) def test_pivot_table_dropna_categoricals(self, dropna): # GH 15193 categories = ['a', 'b', 'c', 'd'] df = DataFrame({'A': ['a', 'a', 'a', 'b', 'b', 'b', 'c', 'c', 'c'], 'B': [1, 2, 3, 1, 2, 3, 1, 2, 3], 'C': range(0, 9)}) df['A'] = df['A'].astype(CDT(categories, ordered=False)) result = df.pivot_table(index='B', columns='A', values='C', dropna=dropna) expected_columns = Series(['a', 'b', 'c'], name='A') expected_columns = expected_columns.astype( CDT(categories, ordered=False)) expected_index = Series([1, 2, 3], name='B') expected = DataFrame([[0, 3, 6], [1, 4, 7], [2, 5, 8]], index=expected_index, columns=expected_columns,) if not dropna: # add back the non observed to compare expected = expected.reindex( columns=Categorical(categories)).astype('float') tm.assert_frame_equal(result, expected) def test_pivot_with_non_observable_dropna(self, dropna): # gh-21133 df = pd.DataFrame( {'A': pd.Categorical([np.nan, 'low', 'high', 'low', 'high'], categories=['low', 'high'], ordered=True), 'B': range(5)}) result = df.pivot_table(index='A', values='B', dropna=dropna) expected = pd.DataFrame( {'B': [2, 3]}, index=pd.Index( pd.Categorical.from_codes([0, 1], categories=['low', 'high'], ordered=True), name='A')) tm.assert_frame_equal(result, expected) # gh-21378 df = pd.DataFrame( {'A': pd.Categorical(['left', 'low', 'high', 'low', 'high'], categories=['low', 'high', 'left'], ordered=True), 'B': range(5)}) result = df.pivot_table(index='A', values='B', dropna=dropna) expected = pd.DataFrame( {'B': [2, 3, 0]}, index=pd.Index( pd.Categorical.from_codes([0, 1, 2], categories=['low', 'high', 'left'], ordered=True), name='A')) tm.assert_frame_equal(result, expected) def test_pass_array(self): result = self.data.pivot_table( 'D', index=self.data.A, columns=self.data.C) expected = self.data.pivot_table('D', index='A', columns='C') tm.assert_frame_equal(result, expected) def test_pass_function(self): result = self.data.pivot_table('D', index=lambda x: x // 5, columns=self.data.C) expected = self.data.pivot_table('D', index=self.data.index // 5, columns='C') tm.assert_frame_equal(result, expected) def test_pivot_table_multiple(self): index = ['A', 'B'] columns = 'C' table = pivot_table(self.data, index=index, columns=columns) expected = self.data.groupby(index + [columns]).agg(np.mean).unstack() tm.assert_frame_equal(table, expected) def test_pivot_dtypes(self): # can convert dtypes f = DataFrame({'a': ['cat', 'bat', 'cat', 'bat'], 'v': [ 1, 2, 3, 4], 'i': ['a', 'b', 'a', 'b']}) assert f.dtypes['v'] == 'int64' z = pivot_table(f, values='v', index=['a'], columns=[ 'i'], fill_value=0, aggfunc=np.sum) result = z.get_dtype_counts() expected = Series(dict(int64=2)) tm.assert_series_equal(result, expected) # cannot convert dtypes f = DataFrame({'a': ['cat', 'bat', 'cat', 'bat'], 'v': [ 1.5, 2.5, 3.5, 4.5], 'i': ['a', 'b', 'a', 'b']}) assert f.dtypes['v'] == 'float64' z = pivot_table(f, values='v', index=['a'], columns=[ 'i'], fill_value=0, aggfunc=np.mean) result = z.get_dtype_counts() expected = Series(dict(float64=2)) tm.assert_series_equal(result, expected) @pytest.mark.parametrize('columns,values', [('bool1', ['float1', 'float2']), ('bool1', ['float1', 'float2', 'bool1']), ('bool2', ['float1', 'float2', 'bool1'])]) def test_pivot_preserve_dtypes(self, columns, values): # GH 7142 regression test v = np.arange(5, dtype=np.float64) df = DataFrame({'float1': v, 'float2': v + 2.0, 'bool1': v <= 2, 'bool2': v <= 3}) df_res = df.reset_index().pivot_table( index='index', columns=columns, values=values) result = dict(df_res.dtypes) expected = {col: np.dtype('O') if col[0].startswith('b') else np.dtype('float64') for col in df_res} assert result == expected def test_pivot_no_values(self): # GH 14380 idx = pd.DatetimeIndex(['2011-01-01', '2011-02-01', '2011-01-02', '2011-01-01', '2011-01-02']) df = pd.DataFrame({'A': [1, 2, 3, 4, 5]}, index=idx) res = df.pivot_table(index=df.index.month, columns=df.index.day) exp_columns = pd.MultiIndex.from_tuples([('A', 1), ('A', 2)]) exp = pd.DataFrame([[2.5, 4.0], [2.0, np.nan]], index=[1, 2], columns=exp_columns) tm.assert_frame_equal(res, exp) df = pd.DataFrame({'A': [1, 2, 3, 4, 5], 'dt': pd.date_range('2011-01-01', freq='D', periods=5)}, index=idx) res = df.pivot_table(index=df.index.month, columns=pd.Grouper(key='dt', freq='M')) exp_columns = pd.MultiIndex.from_tuples([('A', pd.Timestamp('2011-01-31'))]) exp_columns.names = [None, 'dt'] exp = pd.DataFrame([3.25, 2.0], index=[1, 2], columns=exp_columns) tm.assert_frame_equal(res, exp) res = df.pivot_table(index=pd.Grouper(freq='A'), columns=pd.Grouper(key='dt', freq='M')) exp = pd.DataFrame([3], index=pd.DatetimeIndex(['2011-12-31']), columns=exp_columns) tm.assert_frame_equal(res, exp) def test_pivot_multi_values(self): result = pivot_table(self.data, values=['D', 'E'], index='A', columns=['B', 'C'], fill_value=0) expected = pivot_table(self.data.drop(['F'], axis=1), index='A', columns=['B', 'C'], fill_value=0) tm.assert_frame_equal(result, expected) def test_pivot_multi_functions(self): f = lambda func: pivot_table(self.data, values=['D', 'E'], index=['A', 'B'], columns='C', aggfunc=func) result = f([np.mean, np.std]) means = f(np.mean) stds = f(np.std) expected = concat([means, stds], keys=['mean', 'std'], axis=1) tm.assert_frame_equal(result, expected) # margins not supported?? f = lambda func: pivot_table(self.data, values=['D', 'E'], index=['A', 'B'], columns='C', aggfunc=func, margins=True) result = f([np.mean, np.std]) means = f(np.mean) stds = f(np.std) expected = concat([means, stds], keys=['mean', 'std'], axis=1) tm.assert_frame_equal(result, expected) def test_pivot_index_with_nan(self): # GH 3588 nan = np.nan df = DataFrame({'a': ['R1', 'R2', nan, 'R4'], 'b': ['C1', 'C2', 'C3', 'C4'], 'c': [10, 15, 17, 20]}) result = df.pivot('a', 'b', 'c') expected = DataFrame([[nan, nan, 17, nan], [10, nan, nan, nan], [nan, 15, nan, nan], [nan, nan, nan, 20]], index=Index([nan, 'R1', 'R2', 'R4'], name='a'), columns=Index(['C1', 'C2', 'C3', 'C4'], name='b')) tm.assert_frame_equal(result, expected) tm.assert_frame_equal(df.pivot('b', 'a', 'c'), expected.T) # GH9491 df = DataFrame({'a': pd.date_range('2014-02-01', periods=6, freq='D'), 'c': 100 + np.arange(6)}) df['b'] = df['a'] - pd.Timestamp('2014-02-02') df.loc[1, 'a'] = df.loc[3, 'a'] = nan df.loc[1, 'b'] = df.loc[4, 'b'] = nan pv = df.pivot('a', 'b', 'c') assert pv.notna().values.sum() == len(df) for _, row in df.iterrows(): assert pv.loc[row['a'], row['b']] == row['c'] tm.assert_frame_equal(df.pivot('b', 'a', 'c'), pv.T) def test_pivot_with_tz(self): # GH 5878 df = DataFrame({'dt1': [datetime(2013, 1, 1, 9, 0), datetime(2013, 1, 2, 9, 0), datetime(2013, 1, 1, 9, 0), datetime(2013, 1, 2, 9, 0)], 'dt2': [datetime(2014, 1, 1, 9, 0), datetime(2014, 1, 1, 9, 0), datetime(2014, 1, 2, 9, 0), datetime(2014, 1, 2, 9, 0)], 'data1': np.arange(4, dtype='int64'), 'data2': np.arange(4, dtype='int64')}) df['dt1'] = df['dt1'].apply(lambda d: pd.Timestamp(d, tz='US/Pacific')) df['dt2'] = df['dt2'].apply(lambda d: pd.Timestamp(d, tz='Asia/Tokyo')) exp_col1 = Index(['data1', 'data1', 'data2', 'data2']) exp_col2 = pd.DatetimeIndex(['2014/01/01 09:00', '2014/01/02 09:00'] * 2, name='dt2', tz='Asia/Tokyo') exp_col = pd.MultiIndex.from_arrays([exp_col1, exp_col2]) expected = DataFrame([[0, 2, 0, 2], [1, 3, 1, 3]], index=pd.DatetimeIndex(['2013/01/01 09:00', '2013/01/02 09:00'], name='dt1', tz='US/Pacific'), columns=exp_col) pv = df.pivot(index='dt1', columns='dt2') tm.assert_frame_equal(pv, expected) expected = DataFrame([[0, 2], [1, 3]], index=pd.DatetimeIndex(['2013/01/01 09:00', '2013/01/02 09:00'], name='dt1', tz='US/Pacific'), columns=pd.DatetimeIndex(['2014/01/01 09:00', '2014/01/02 09:00'], name='dt2', tz='Asia/Tokyo')) pv = df.pivot(index='dt1', columns='dt2', values='data1') tm.assert_frame_equal(pv, expected) def test_pivot_periods(self): df = DataFrame({'p1': [pd.Period('2013-01-01', 'D'), pd.Period('2013-01-02', 'D'), pd.Period('2013-01-01', 'D'), pd.Period('2013-01-02', 'D')], 'p2': [pd.Period('2013-01', 'M'), pd.Period('2013-01', 'M'), pd.Period('2013-02', 'M'), pd.Period('2013-02', 'M')], 'data1': np.arange(4, dtype='int64'), 'data2': np.arange(4, dtype='int64')}) exp_col1 = Index(['data1', 'data1', 'data2', 'data2']) exp_col2 = pd.PeriodIndex(['2013-01', '2013-02'] * 2, name='p2', freq='M') exp_col = pd.MultiIndex.from_arrays([exp_col1, exp_col2]) expected = DataFrame([[0, 2, 0, 2], [1, 3, 1, 3]], index=pd.PeriodIndex(['2013-01-01', '2013-01-02'], name='p1', freq='D'), columns=exp_col) pv = df.pivot(index='p1', columns='p2') tm.assert_frame_equal(pv, expected) expected = DataFrame([[0, 2], [1, 3]], index=pd.PeriodIndex(['2013-01-01', '2013-01-02'], name='p1', freq='D'), columns=pd.PeriodIndex(['2013-01', '2013-02'], name='p2', freq='M')) pv = df.pivot(index='p1', columns='p2', values='data1') tm.assert_frame_equal(pv, expected) @pytest.mark.parametrize('values', [ ['baz', 'zoo'], np.array(['baz', 'zoo']), pd.Series(['baz', 'zoo']), pd.Index(['baz', 'zoo']) ]) def test_pivot_with_list_like_values(self, values): # issue #17160 df = pd.DataFrame({'foo': ['one', 'one', 'one', 'two', 'two', 'two'], 'bar': ['A', 'B', 'C', 'A', 'B', 'C'], 'baz': [1, 2, 3, 4, 5, 6], 'zoo': ['x', 'y', 'z', 'q', 'w', 't']}) result = df.pivot(index='foo', columns='bar', values=values) data = [[1, 2, 3, 'x', 'y', 'z'], [4, 5, 6, 'q', 'w', 't']] index = Index(data=['one', 'two'], name='foo') columns = MultiIndex(levels=[['baz', 'zoo'], ['A', 'B', 'C']], labels=[[0, 0, 0, 1, 1, 1], [0, 1, 2, 0, 1, 2]], names=[None, 'bar']) expected = DataFrame(data=data, index=index, columns=columns, dtype='object') tm.assert_frame_equal(result, expected) @pytest.mark.parametrize('values', [ ['bar', 'baz'], np.array(['bar', 'baz']), pd.Series(['bar', 'baz']), pd.Index(['bar', 'baz']) ]) def test_pivot_with_list_like_values_nans(self, values): # issue #17160 df = pd.DataFrame({'foo': ['one', 'one', 'one', 'two', 'two', 'two'], 'bar': ['A', 'B', 'C', 'A', 'B', 'C'], 'baz': [1, 2, 3, 4, 5, 6], 'zoo': ['x', 'y', 'z', 'q', 'w', 't']}) result = df.pivot(index='zoo', columns='foo', values=values) data = [[np.nan, 'A', np.nan, 4], [np.nan, 'C', np.nan, 6], [np.nan, 'B', np.nan, 5], ['A', np.nan, 1, np.nan], ['B', np.nan, 2, np.nan], ['C', np.nan, 3, np.nan]] index = Index(data=['q', 't', 'w', 'x', 'y', 'z'], name='zoo') columns = MultiIndex(levels=[['bar', 'baz'], ['one', 'two']], labels=[[0, 0, 1, 1], [0, 1, 0, 1]], names=[None, 'foo']) expected = DataFrame(data=data, index=index, columns=columns, dtype='object') tm.assert_frame_equal(result, expected) @pytest.mark.xfail(reason='MultiIndexed unstack with tuple names fails' 'with KeyError #19966') def test_pivot_with_multiindex(self): # issue #17160 index = Index(data=[0, 1, 2, 3, 4, 5]) data = [['one', 'A', 1, 'x'], ['one', 'B', 2, 'y'], ['one', 'C', 3, 'z'], ['two', 'A', 4, 'q'], ['two', 'B', 5, 'w'], ['two', 'C', 6, 't']] columns = MultiIndex(levels=[['bar', 'baz'], ['first', 'second']], labels=[[0, 0, 1, 1], [0, 1, 0, 1]]) df = DataFrame(data=data, index=index, columns=columns, dtype='object') result = df.pivot(index=('bar', 'first'), columns=('bar', 'second'), values=('baz', 'first')) data = {'A': Series([1, 4], index=['one', 'two']), 'B': Series([2, 5], index=['one', 'two']), 'C': Series([3, 6], index=['one', 'two'])} expected = DataFrame(data) tm.assert_frame_equal(result, expected) def test_pivot_with_tuple_of_values(self): # issue #17160 df = pd.DataFrame({'foo': ['one', 'one', 'one', 'two', 'two', 'two'], 'bar': ['A', 'B', 'C', 'A', 'B', 'C'], 'baz': [1, 2, 3, 4, 5, 6], 'zoo': ['x', 'y', 'z', 'q', 'w', 't']}) with pytest.raises(KeyError): # tuple is seen as a single column name df.pivot(index='zoo', columns='foo', values=('bar', 'baz')) def test_margins(self): def _check_output(result, values_col, index=['A', 'B'], columns=['C'], margins_col='All'): col_margins = result.loc[result.index[:-1], margins_col] expected_col_margins = self.data.groupby(index)[values_col].mean() tm.assert_series_equal(col_margins, expected_col_margins, check_names=False) assert col_margins.name == margins_col result = result.sort_index() index_margins = result.loc[(margins_col, '')].iloc[:-1] expected_ix_margins = self.data.groupby(columns)[values_col].mean() tm.assert_series_equal(index_margins, expected_ix_margins, check_names=False) assert index_margins.name == (margins_col, '') grand_total_margins = result.loc[(margins_col, ''), margins_col] expected_total_margins = self.data[values_col].mean() assert grand_total_margins == expected_total_margins # column specified result = self.data.pivot_table(values='D', index=['A', 'B'], columns='C', margins=True, aggfunc=np.mean) _check_output(result, 'D') # Set a different margins_name (not 'All') result = self.data.pivot_table(values='D', index=['A', 'B'], columns='C', margins=True, aggfunc=np.mean, margins_name='Totals') _check_output(result, 'D', margins_col='Totals') # no column specified table = self.data.pivot_table(index=['A', 'B'], columns='C', margins=True, aggfunc=np.mean) for value_col in table.columns.levels[0]: _check_output(table[value_col], value_col) # no col # to help with a buglet self.data.columns = [k * 2 for k in self.data.columns] table = self.data.pivot_table(index=['AA', 'BB'], margins=True, aggfunc=np.mean) for value_col in table.columns: totals = table.loc[('All', ''), value_col] assert totals == self.data[value_col].mean() # no rows rtable = self.data.pivot_table(columns=['AA', 'BB'], margins=True, aggfunc=np.mean) assert isinstance(rtable, Series) table = self.data.pivot_table(index=['AA', 'BB'], margins=True, aggfunc='mean') for item in ['DD', 'EE', 'FF']: totals = table.loc[('All', ''), item] assert totals == self.data[item].mean() # issue number #8349: pivot_table with margins and dictionary aggfunc data = [ {'JOB': 'Worker', 'NAME': 'Bob', 'YEAR': 2013, 'MONTH': 12, 'DAYS': 3, 'SALARY': 17}, {'JOB': 'Employ', 'NAME': 'Mary', 'YEAR': 2013, 'MONTH': 12, 'DAYS': 5, 'SALARY': 23}, {'JOB': 'Worker', 'NAME': 'Bob', 'YEAR': 2014, 'MONTH': 1, 'DAYS': 10, 'SALARY': 100}, {'JOB': 'Worker', 'NAME': 'Bob', 'YEAR': 2014, 'MONTH': 1, 'DAYS': 11, 'SALARY': 110}, {'JOB': 'Employ', 'NAME': 'Mary', 'YEAR': 2014, 'MONTH': 1, 'DAYS': 15, 'SALARY': 200}, {'JOB': 'Worker', 'NAME': 'Bob', 'YEAR': 2014, 'MONTH': 2, 'DAYS': 8, 'SALARY': 80}, {'JOB': 'Employ', 'NAME': 'Mary', 'YEAR': 2014, 'MONTH': 2, 'DAYS': 5, 'SALARY': 190}, ] df = DataFrame(data) df = df.set_index(['JOB', 'NAME', 'YEAR', 'MONTH'], drop=False, append=False) with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): result = df.pivot_table(index=['JOB', 'NAME'], columns=['YEAR', 'MONTH'], values=['DAYS', 'SALARY'], aggfunc={'DAYS': 'mean', 'SALARY': 'sum'}, margins=True) with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): expected = df.pivot_table(index=['JOB', 'NAME'], columns=['YEAR', 'MONTH'], values=['DAYS'], aggfunc='mean', margins=True) tm.assert_frame_equal(result['DAYS'], expected['DAYS']) with tm.assert_produces_warning(FutureWarning, check_stacklevel=False): expected = df.pivot_table(index=['JOB', 'NAME'], columns=['YEAR', 'MONTH'], values=['SALARY'], aggfunc='sum', margins=True) tm.assert_frame_equal(result['SALARY'], expected['SALARY']) def test_margins_dtype(self): # GH 17013 df = self.data.copy() df[['D', 'E', 'F']] = np.arange(len(df) * 3).reshape(len(df), 3) mi_val = list(product(['bar', 'foo'], ['one', 'two'])) + [('All', '')] mi = MultiIndex.from_tuples(mi_val, names=('A', 'B')) expected = DataFrame({'dull': [12, 21, 3, 9, 45], 'shiny': [33, 0, 36, 51, 120]}, index=mi).rename_axis('C', axis=1) expected['All'] = expected['dull'] + expected['shiny'] result = df.pivot_table(values='D', index=['A', 'B'], columns='C', margins=True, aggfunc=np.sum, fill_value=0) tm.assert_frame_equal(expected, result) @pytest.mark.xfail(reason='GH 17035 (len of floats is casted back to ' 'floats)') def test_margins_dtype_len(self): mi_val = list(product(['bar', 'foo'], ['one', 'two'])) + [('All', '')] mi = MultiIndex.from_tuples(mi_val, names=('A', 'B')) expected = DataFrame({'dull': [1, 1, 2, 1, 5], 'shiny': [2, 0, 2, 2, 6]}, index=mi).rename_axis('C', axis=1) expected['All'] = expected['dull'] + expected['shiny'] result = self.data.pivot_table(values='D', index=['A', 'B'], columns='C', margins=True, aggfunc=len, fill_value=0) tm.assert_frame_equal(expected, result) def test_pivot_integer_columns(self): # caused by upstream bug in unstack d = date.min data = list(product(['foo', 'bar'], ['A', 'B', 'C'], ['x1', 'x2'], [d + timedelta(i) for i in range(20)], [1.0])) df = DataFrame(data) table = df.pivot_table(values=4, index=[0, 1, 3], columns=[2]) df2 = df.rename(columns=str) table2 = df2.pivot_table( values='4', index=['0', '1', '3'], columns=['2']) tm.assert_frame_equal(table, table2, check_names=False) def test_pivot_no_level_overlap(self): # GH #1181 data = DataFrame({'a': ['a', 'a', 'a', 'a', 'b', 'b', 'b', 'b'] * 2, 'b': [0, 0, 0, 0, 1, 1, 1, 1] * 2, 'c': (['foo'] * 4 + ['bar'] * 4) * 2, 'value': np.random.randn(16)}) table = data.pivot_table('value', index='a', columns=['b', 'c']) grouped = data.groupby(['a', 'b', 'c'])['value'].mean() expected = grouped.unstack('b').unstack('c').dropna(axis=1, how='all') tm.assert_frame_equal(table, expected) def test_pivot_columns_lexsorted(self): n = 10000 dtype = np.dtype([ ("Index", object), ("Symbol", object), ("Year", int), ("Month", int), ("Day", int), ("Quantity", int), ("Price", float), ]) products = np.array([ ('SP500', 'ADBE'), ('SP500', 'NVDA'), ('SP500', 'ORCL'), ('NDQ100', 'AAPL'), ('NDQ100', 'MSFT'), ('NDQ100', 'GOOG'), ('FTSE', 'DGE.L'), ('FTSE', 'TSCO.L'), ('FTSE', 'GSK.L'), ], dtype=[('Index', object), ('Symbol', object)]) items = np.empty(n, dtype=dtype) iproduct = np.random.randint(0, len(products), n) items['Index'] = products['Index'][iproduct] items['Symbol'] = products['Symbol'][iproduct] dr = pd.date_range(date(2000, 1, 1), date(2010, 12, 31)) dates = dr[np.random.randint(0, len(dr), n)] items['Year'] = dates.year items['Month'] = dates.month items['Day'] = dates.day items['Price'] = np.random.lognormal(4.0, 2.0, n) df = DataFrame(items) pivoted = df.pivot_table('Price', index=['Month', 'Day'], columns=['Index', 'Symbol', 'Year'], aggfunc='mean') assert pivoted.columns.is_monotonic def test_pivot_complex_aggfunc(self): f = OrderedDict([('D', ['std']), ('E', ['sum'])]) expected = self.data.groupby(['A', 'B']).agg(f).unstack('B') result = self.data.pivot_table(index='A', columns='B', aggfunc=f) tm.assert_frame_equal(result, expected) def test_margins_no_values_no_cols(self): # Regression test on pivot table: no values or cols passed. result = self.data[['A', 'B']].pivot_table( index=['A', 'B'], aggfunc=len, margins=True) result_list = result.tolist() assert sum(result_list[:-1]) == result_list[-1] def test_margins_no_values_two_rows(self): # Regression test on pivot table: no values passed but rows are a # multi-index result = self.data[['A', 'B', 'C']].pivot_table( index=['A', 'B'], columns='C', aggfunc=len, margins=True) assert result.All.tolist() == [3.0, 1.0, 4.0, 3.0, 11.0] def test_margins_no_values_one_row_one_col(self): # Regression test on pivot table: no values passed but row and col # defined result = self.data[['A', 'B']].pivot_table( index='A', columns='B', aggfunc=len, margins=True) assert result.All.tolist() == [4.0, 7.0, 11.0] def test_margins_no_values_two_row_two_cols(self): # Regression test on pivot table: no values passed but rows and cols # are multi-indexed self.data['D'] = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k'] result = self.data[['A', 'B', 'C', 'D']].pivot_table( index=['A', 'B'], columns=['C', 'D'], aggfunc=len, margins=True) assert result.All.tolist() == [3.0, 1.0, 4.0, 3.0, 11.0] def test_pivot_table_with_margins_set_margin_name(self): # see gh-3335 for margin_name in ['foo', 'one', 666, None, ['a', 'b']]: with pytest.raises(ValueError): # multi-index index pivot_table(self.data, values='D', index=['A', 'B'], columns=['C'], margins=True, margins_name=margin_name) with pytest.raises(ValueError): # multi-index column pivot_table(self.data, values='D', index=['C'], columns=['A', 'B'], margins=True, margins_name=margin_name) with pytest.raises(ValueError): # non-multi-index index/column pivot_table(self.data, values='D', index=['A'], columns=['B'], margins=True, margins_name=margin_name) def test_pivot_timegrouper(self): df = DataFrame({ 'Branch': 'A A A A A A A B'.split(), 'Buyer': 'Carl Mark Carl Carl Joe Joe Joe Carl'.split(), 'Quantity': [1, 3, 5, 1, 8, 1, 9, 3], 'Date': [datetime(2013, 1, 1), datetime(2013, 1, 1), datetime(2013, 10, 1), datetime(2013, 10, 2), datetime(2013, 10, 1), datetime(2013, 10, 2), datetime(2013, 12, 2), datetime(2013, 12, 2), ]}).set_index('Date') expected = DataFrame(np.array([10, 18, 3], dtype='int64') .reshape(1, 3), index=[datetime(2013, 12, 31)], columns='Carl Joe Mark'.split()) expected.index.name = 'Date' expected.columns.name = 'Buyer' result = pivot_table(df, index=Grouper(freq='A'), columns='Buyer', values='Quantity', aggfunc=np.sum) tm.assert_frame_equal(result, expected) result = pivot_table(df, index='Buyer', columns=Grouper(freq='A'), values='Quantity', aggfunc=np.sum) tm.assert_frame_equal(result, expected.T) expected = DataFrame(np.array([1, np.nan, 3, 9, 18, np.nan]) .reshape(2, 3), index=[datetime(2013, 1, 1), datetime(2013, 7, 1)], columns='Carl Joe Mark'.split()) expected.index.name = 'Date' expected.columns.name = 'Buyer' result = pivot_table(df, index=Grouper(freq='6MS'), columns='Buyer', values='Quantity', aggfunc=np.sum) tm.assert_frame_equal(result, expected) result = pivot_table(df, index='Buyer', columns=Grouper(freq='6MS'), values='Quantity', aggfunc=np.sum) tm.assert_frame_equal(result, expected.T) # passing the name df = df.reset_index() result = pivot_table(df, index=Grouper(freq='6MS', key='Date'), columns='Buyer', values='Quantity', aggfunc=np.sum) tm.assert_frame_equal(result, expected) result = pivot_table(df, index='Buyer', columns=Grouper(freq='6MS', key='Date'), values='Quantity', aggfunc=np.sum) tm.assert_frame_equal(result, expected.T) pytest.raises(KeyError, lambda: pivot_table( df, index=Grouper(freq='6MS', key='foo'), columns='Buyer', values='Quantity', aggfunc=np.sum)) pytest.raises(KeyError, lambda: pivot_table( df, index='Buyer', columns=Grouper(freq='6MS', key='foo'), values='Quantity', aggfunc=np.sum)) # passing the level df = df.set_index('Date') result = pivot_table(df, index=Grouper(freq='6MS', level='Date'), columns='Buyer', values='Quantity', aggfunc=np.sum) tm.assert_frame_equal(result, expected) result = pivot_table(df, index='Buyer', columns=Grouper(freq='6MS', level='Date'), values='Quantity', aggfunc=np.sum) tm.assert_frame_equal(result, expected.T) pytest.raises(ValueError, lambda: pivot_table( df, index=Grouper(freq='6MS', level='foo'), columns='Buyer', values='Quantity', aggfunc=np.sum)) pytest.raises(ValueError, lambda: pivot_table( df, index='Buyer', columns=Grouper(freq='6MS', level='foo'), values='Quantity', aggfunc=np.sum)) # double grouper df = DataFrame({ 'Branch': 'A A A A A A A B'.split(), 'Buyer': 'Carl Mark Carl Carl Joe Joe Joe Carl'.split(), 'Quantity': [1, 3, 5, 1, 8, 1, 9, 3], 'Date': [datetime(2013, 11, 1, 13, 0), datetime(2013, 9, 1, 13, 5), datetime(2013, 10, 1, 20, 0), datetime(2013, 10, 2, 10, 0), datetime(2013, 11, 1, 20, 0), datetime(2013, 10, 2, 10, 0), datetime(2013, 10, 2, 12, 0), datetime(2013, 12, 5, 14, 0)], 'PayDay': [datetime(2013, 10, 4, 0, 0), datetime(2013, 10, 15, 13, 5), datetime(2013, 9, 5, 20, 0), datetime(2013, 11, 2, 10, 0), datetime(2013, 10, 7, 20, 0), datetime(2013, 9, 5, 10, 0), datetime(2013, 12, 30, 12, 0), datetime(2013, 11, 20, 14, 0), ]}) result = pivot_table(df, index=Grouper(freq='M', key='Date'), columns=Grouper(freq='M', key='PayDay'), values='Quantity', aggfunc=np.sum) expected = DataFrame(np.array([np.nan, 3, np.nan, np.nan, 6, np.nan, 1, 9, np.nan, 9, np.nan, np.nan, np.nan, np.nan, 3, np.nan]).reshape(4, 4), index=[datetime(2013, 9, 30), datetime(2013, 10, 31), datetime(2013, 11, 30), datetime(2013, 12, 31)], columns=[datetime(2013, 9, 30), datetime(2013, 10, 31), datetime(2013, 11, 30), datetime(2013, 12, 31)]) expected.index.name = 'Date' expected.columns.name = 'PayDay' tm.assert_frame_equal(result, expected) result = pivot_table(df, index=Grouper(freq='M', key='PayDay'), columns=Grouper(freq='M', key='Date'), values='Quantity', aggfunc=np.sum) tm.assert_frame_equal(result, expected.T) tuples = [(datetime(2013, 9, 30), datetime(2013, 10, 31)), (datetime(2013, 10, 31), datetime(2013, 9, 30)), (datetime(2013, 10, 31), datetime(2013, 11, 30)), (datetime(2013, 10, 31), datetime(2013, 12, 31)), (datetime(2013, 11, 30), datetime(2013, 10, 31)), (datetime(2013, 12, 31), datetime(2013, 11, 30)), ] idx = MultiIndex.from_tuples(tuples, names=['Date', 'PayDay']) expected = DataFrame(np.array([3, np.nan, 6, np.nan, 1, np.nan, 9, np.nan, 9, np.nan, np.nan, 3]).reshape(6, 2), index=idx, columns=['A', 'B']) expected.columns.name = 'Branch' result = pivot_table( df, index=[Grouper(freq='M', key='Date'), Grouper(freq='M', key='PayDay')], columns=['Branch'], values='Quantity', aggfunc=np.sum) tm.assert_frame_equal(result, expected) result = pivot_table(df, index=['Branch'], columns=[Grouper(freq='M', key='Date'), Grouper(freq='M', key='PayDay')], values='Quantity', aggfunc=np.sum) tm.assert_frame_equal(result, expected.T) def test_pivot_datetime_tz(self): dates1 = ['2011-07-19 07:00:00', '2011-07-19 08:00:00', '2011-07-19 09:00:00', '2011-07-19 07:00:00', '2011-07-19 08:00:00', '2011-07-19 09:00:00'] dates2 = ['2013-01-01 15:00:00', '2013-01-01 15:00:00', '2013-01-01 15:00:00', '2013-02-01 15:00:00', '2013-02-01 15:00:00', '2013-02-01 15:00:00'] df = DataFrame({'label': ['a', 'a', 'a', 'b', 'b', 'b'], 'dt1': dates1, 'dt2': dates2, 'value1': np.arange(6, dtype='int64'), 'value2': [1, 2] * 3}) df['dt1'] = df['dt1'].apply(lambda d: pd.Timestamp(d, tz='US/Pacific')) df['dt2'] = df['dt2'].apply(lambda d: pd.Timestamp(d, tz='Asia/Tokyo')) exp_idx = pd.DatetimeIndex(['2011-07-19 07:00:00', '2011-07-19 08:00:00', '2011-07-19 09:00:00'], tz='US/Pacific', name='dt1') exp_col1 = Index(['value1', 'value1']) exp_col2 = Index(['a', 'b'], name='label') exp_col = MultiIndex.from_arrays([exp_col1, exp_col2]) expected = DataFrame([[0, 3], [1, 4], [2, 5]], index=exp_idx, columns=exp_col) result = pivot_table(df, index=['dt1'], columns=[ 'label'], values=['value1']) tm.assert_frame_equal(result, expected) exp_col1 = Index(['sum', 'sum', 'sum', 'sum', 'mean', 'mean', 'mean', 'mean']) exp_col2 = Index(['value1', 'value1', 'value2', 'value2'] * 2) exp_col3 = pd.DatetimeIndex(['2013-01-01 15:00:00', '2013-02-01 15:00:00'] * 4, tz='Asia/Tokyo', name='dt2') exp_col = MultiIndex.from_arrays([exp_col1, exp_col2, exp_col3]) expected = DataFrame(np.array([[0, 3, 1, 2, 0, 3, 1, 2], [1, 4, 2, 1, 1, 4, 2, 1], [2, 5, 1, 2, 2, 5, 1, 2]], dtype='int64'), index=exp_idx, columns=exp_col) result = pivot_table(df, index=['dt1'], columns=['dt2'], values=['value1', 'value2'], aggfunc=[np.sum, np.mean]) tm.assert_frame_equal(result, expected) def test_pivot_dtaccessor(self): # GH 8103 dates1 = ['2011-07-19 07:00:00', '2011-07-19 08:00:00', '2011-07-19 09:00:00', '2011-07-19 07:00:00', '2011-07-19 08:00:00', '2011-07-19 09:00:00'] dates2 = ['2013-01-01 15:00:00', '2013-01-01 15:00:00', '2013-01-01 15:00:00', '2013-02-01 15:00:00', '2013-02-01 15:00:00', '2013-02-01 15:00:00'] df = DataFrame({'label': ['a', 'a', 'a', 'b', 'b', 'b'], 'dt1': dates1, 'dt2': dates2, 'value1': np.arange(6, dtype='int64'), 'value2': [1, 2] * 3}) df['dt1'] = df['dt1'].apply(lambda d: pd.Timestamp(d)) df['dt2'] = df['dt2'].apply(lambda d: pd.Timestamp(d)) result = pivot_table(df, index='label', columns=df['dt1'].dt.hour, values='value1') exp_idx = Index(['a', 'b'], name='label') expected = DataFrame({7: [0, 3], 8: [1, 4], 9: [2, 5]}, index=exp_idx, columns=Index([7, 8, 9], name='dt1')) tm.assert_frame_equal(result, expected) result = pivot_table(df, index=df['dt2'].dt.month, columns=df['dt1'].dt.hour, values='value1') expected = DataFrame({7: [0, 3], 8: [1, 4], 9: [2, 5]}, index=Index([1, 2], name='dt2'), columns=Index([7, 8, 9], name='dt1')) tm.assert_frame_equal(result, expected) result = pivot_table(df, index=df['dt2'].dt.year.values, columns=[df['dt1'].dt.hour, df['dt2'].dt.month], values='value1') exp_col = MultiIndex.from_arrays( [[7, 7, 8, 8, 9, 9], [1, 2] * 3], names=['dt1', 'dt2']) expected = DataFrame(np.array([[0, 3, 1, 4, 2, 5]], dtype='int64'), index=[2013], columns=exp_col) tm.assert_frame_equal(result, expected) result = pivot_table(df, index=np.array(['X', 'X', 'X', 'X', 'Y', 'Y']), columns=[df['dt1'].dt.hour, df['dt2'].dt.month], values='value1') expected = DataFrame(np.array([[0, 3, 1, np.nan, 2, np.nan], [np.nan, np.nan, np.nan, 4, np.nan, 5]]), index=['X', 'Y'], columns=exp_col) tm.assert_frame_equal(result, expected) def test_daily(self): rng = date_range('1/1/2000', '12/31/2004', freq='D') ts = Series(np.random.randn(len(rng)), index=rng) annual = pivot_table(DataFrame(ts), index=ts.index.year, columns=ts.index.dayofyear) annual.columns = annual.columns.droplevel(0) doy = np.asarray(ts.index.dayofyear) for i in range(1, 367): subset = ts[doy == i] subset.index = subset.index.year result = annual[i].dropna() tm.assert_series_equal(result, subset, check_names=False) assert result.name == i def test_monthly(self): rng = date_range('1/1/2000', '12/31/2004', freq='M') ts = Series(np.random.randn(len(rng)), index=rng) annual = pivot_table(pd.DataFrame(ts), index=ts.index.year, columns=ts.index.month) annual.columns = annual.columns.droplevel(0) month = ts.index.month for i in range(1, 13): subset = ts[month == i] subset.index = subset.index.year result = annual[i].dropna() tm.assert_series_equal(result, subset, check_names=False) assert result.name == i def test_pivot_table_with_iterator_values(self): # GH 12017 aggs = {'D': 'sum', 'E': 'mean'} pivot_values_list = pd.pivot_table( self.data, index=['A'], values=list(aggs.keys()), aggfunc=aggs, ) pivot_values_keys = pd.pivot_table( self.data, index=['A'], values=aggs.keys(), aggfunc=aggs, ) tm.assert_frame_equal(pivot_values_keys, pivot_values_list) agg_values_gen = (value for value in aggs.keys()) pivot_values_gen = pd.pivot_table( self.data, index=['A'], values=agg_values_gen, aggfunc=aggs, ) tm.assert_frame_equal(pivot_values_gen, pivot_values_list) def test_pivot_table_margins_name_with_aggfunc_list(self): # GH 13354 margins_name = 'Weekly' costs = pd.DataFrame( {'item': ['bacon', 'cheese', 'bacon', 'cheese'], 'cost': [2.5, 4.5, 3.2, 3.3], 'day': ['M', 'M', 'T', 'T']} ) table = costs.pivot_table( index="item", columns="day", margins=True, margins_name=margins_name, aggfunc=[np.mean, max] ) ix = pd.Index( ['bacon', 'cheese', margins_name], dtype='object', name='item' ) tups = [('mean', 'cost', 'M'), ('mean', 'cost', 'T'), ('mean', 'cost', margins_name), ('max', 'cost', 'M'), ('max', 'cost', 'T'), ('max', 'cost', margins_name)] cols = pd.MultiIndex.from_tuples(tups, names=[None, None, 'day']) expected = pd.DataFrame(table.values, index=ix, columns=cols) tm.assert_frame_equal(table, expected) @pytest.mark.xfail(reason='GH 17035 (np.mean of ints is casted back to ' 'ints)') def test_categorical_margins(self, observed): # GH 10989 df = pd.DataFrame({'x': np.arange(8), 'y': np.arange(8) // 4, 'z': np.arange(8) % 2}) expected = pd.DataFrame([[1.0, 2.0, 1.5], [5, 6, 5.5], [3, 4, 3.5]]) expected.index = Index([0, 1, 'All'], name='y') expected.columns = Index([0, 1, 'All'], name='z') table = df.pivot_table('x', 'y', 'z', dropna=observed, margins=True) tm.assert_frame_equal(table, expected) @pytest.mark.xfail(reason='GH 17035 (np.mean of ints is casted back to ' 'ints)') def test_categorical_margins_category(self, observed): df = pd.DataFrame({'x': np.arange(8), 'y': np.arange(8) // 4, 'z': np.arange(8) % 2}) expected = pd.DataFrame([[1.0, 2.0, 1.5], [5, 6, 5.5], [3, 4, 3.5]]) expected.index = Index([0, 1, 'All'], name='y') expected.columns = Index([0, 1, 'All'], name='z') df.y = df.y.astype('category') df.z = df.z.astype('category') table = df.pivot_table('x', 'y', 'z', dropna=observed, margins=True) tm.assert_frame_equal(table, expected) def test_categorical_aggfunc(self, observed): # GH 9534 df = pd.DataFrame({"C1": ["A", "B", "C", "C"], "C2": ["a", "a", "b", "b"], "V": [1, 2, 3, 4]}) df["C1"] = df["C1"].astype("category") result = df.pivot_table("V", index="C1", columns="C2", dropna=observed, aggfunc="count") expected_index = pd.CategoricalIndex(['A', 'B', 'C'], categories=['A', 'B', 'C'], ordered=False, name='C1') expected_columns = pd.Index(['a', 'b'], name='C2') expected_data = np.array([[1., np.nan], [1., np.nan], [np.nan, 2.]]) expected = pd.DataFrame(expected_data, index=expected_index, columns=expected_columns) tm.assert_frame_equal(result, expected) def test_categorical_pivot_index_ordering(self, observed): # GH 8731 df = pd.DataFrame({'Sales': [100, 120, 220], 'Month': ['January', 'January', 'January'], 'Year': [2013, 2014, 2013]}) months = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'] df['Month'] = df['Month'].astype('category').cat.set_categories(months) result = df.pivot_table(values='Sales', index='Month', columns='Year', dropna=observed, aggfunc='sum') expected_columns = pd.Int64Index([2013, 2014], name='Year') expected_index = pd.CategoricalIndex(['January'], categories=months, ordered=False, name='Month') expected = pd.DataFrame([[320, 120]], index=expected_index, columns=expected_columns) if not observed: result = result.dropna().astype(np.int64) tm.assert_frame_equal(result, expected) def test_pivot_table_not_series(self): # GH 4386 # pivot_table always returns a DataFrame # when values is not list like and columns is None # and aggfunc is not instance of list df = DataFrame({'col1': [3, 4, 5], 'col2': ['C', 'D', 'E'], 'col3': [1, 3, 9]}) result = df.pivot_table('col1', index=['col3', 'col2'], aggfunc=np.sum) m = MultiIndex.from_arrays([[1, 3, 9], ['C', 'D', 'E']], names=['col3', 'col2']) expected = DataFrame([3, 4, 5], index=m, columns=['col1']) tm.assert_frame_equal(result, expected) result = df.pivot_table( 'col1', index='col3', columns='col2', aggfunc=np.sum ) expected = DataFrame([[3, np.NaN, np.NaN], [np.NaN, 4, np.NaN], [np.NaN, np.NaN, 5]], index=Index([1, 3, 9], name='col3'), columns=Index(['C', 'D', 'E'], name='col2')) tm.assert_frame_equal(result, expected) result = df.pivot_table('col1', index='col3', aggfunc=[np.sum]) m = MultiIndex.from_arrays([['sum'], ['col1']]) expected = DataFrame([3, 4, 5], index=Index([1, 3, 9], name='col3'), columns=m) tm.assert_frame_equal(result, expected) def test_pivot_margins_name_unicode(self): # issue #13292 greek = u'\u0394\u03bf\u03ba\u03b9\u03bc\u03ae' frame = pd.DataFrame({'foo': [1, 2, 3]}) table = pd.pivot_table(frame, index=['foo'], aggfunc=len, margins=True, margins_name=greek) index = pd.Index([1, 2, 3, greek], dtype='object', name='foo') expected = pd.DataFrame(index=index) tm.assert_frame_equal(table, expected) def test_pivot_string_as_func(self): # GH #18713 # for correctness purposes data = DataFrame({'A': ['foo', 'foo', 'foo', 'foo', 'bar', 'bar', 'bar', 'bar', 'foo', 'foo', 'foo'], 'B': ['one', 'one', 'one', 'two', 'one', 'one', 'one', 'two', 'two', 'two', 'one'], 'C': range(11)}) result = pivot_table(data, index='A', columns='B', aggfunc='sum') mi = MultiIndex(levels=[['C'], ['one', 'two']], labels=[[0, 0], [0, 1]], names=[None, 'B']) expected = DataFrame({('C', 'one'): {'bar': 15, 'foo': 13}, ('C', 'two'): {'bar': 7, 'foo': 20}}, columns=mi).rename_axis('A') tm.assert_frame_equal(result, expected) result = pivot_table(data, index='A', columns='B', aggfunc=['sum', 'mean']) mi = MultiIndex(levels=[['sum', 'mean'], ['C'], ['one', 'two']], labels=[[0, 0, 1, 1], [0, 0, 0, 0], [0, 1, 0, 1]], names=[None, None, 'B']) expected = DataFrame({('mean', 'C', 'one'): {'bar': 5.0, 'foo': 3.25}, ('mean', 'C', 'two'): {'bar': 7.0, 'foo': 6.666666666666667}, ('sum', 'C', 'one'): {'bar': 15, 'foo': 13}, ('sum', 'C', 'two'): {'bar': 7, 'foo': 20}}, columns=mi).rename_axis('A') tm.assert_frame_equal(result, expected) @pytest.mark.parametrize('f, f_numpy', [('sum', np.sum), ('mean', np.mean), ('std', np.std), (['sum', 'mean'], [np.sum, np.mean]), (['sum', 'std'], [np.sum, np.std]), (['std', 'mean'], [np.std, np.mean])]) def test_pivot_string_func_vs_func(self, f, f_numpy): # GH #18713 # for consistency purposes result = pivot_table(self.data, index='A', columns='B', aggfunc=f) expected = pivot_table(self.data, index='A', columns='B', aggfunc=f_numpy) tm.assert_frame_equal(result, expected) class TestCrosstab(object): def setup_method(self, method): df = DataFrame({'A': ['foo', 'foo', 'foo', 'foo', 'bar', 'bar', 'bar', 'bar', 'foo', 'foo', 'foo'], 'B': ['one', 'one', 'one', 'two', 'one', 'one', 'one', 'two', 'two', 'two', 'one'], 'C': ['dull', 'dull', 'shiny', 'dull', 'dull', 'shiny', 'shiny', 'dull', 'shiny', 'shiny', 'shiny'], 'D': np.random.randn(11), 'E': np.random.randn(11), 'F': np.random.randn(11)}) self.df = df.append(df, ignore_index=True) def test_crosstab_single(self): df = self.df result = crosstab(df['A'], df['C']) expected = df.groupby(['A', 'C']).size().unstack() tm.assert_frame_equal(result, expected.fillna(0).astype(np.int64)) def test_crosstab_multiple(self): df = self.df result = crosstab(df['A'], [df['B'], df['C']]) expected = df.groupby(['A', 'B', 'C']).size() expected = expected.unstack( 'B').unstack('C').fillna(0).astype(np.int64) tm.assert_frame_equal(result, expected) result = crosstab([df['B'], df['C']], df['A']) expected = df.groupby(['B', 'C', 'A']).size() expected = expected.unstack('A').fillna(0).astype(np.int64) tm.assert_frame_equal(result, expected) def test_crosstab_ndarray(self): a = np.random.randint(0, 5, size=100) b = np.random.randint(0, 3, size=100) c = np.random.randint(0, 10, size=100) df = DataFrame({'a': a, 'b': b, 'c': c}) result = crosstab(a, [b, c], rownames=['a'], colnames=('b', 'c')) expected = crosstab(df['a'], [df['b'], df['c']]) tm.assert_frame_equal(result, expected) result = crosstab([b, c], a, colnames=['a'], rownames=('b', 'c')) expected = crosstab([df['b'], df['c']], df['a']) tm.assert_frame_equal(result, expected) # assign arbitrary names result = crosstab(self.df['A'].values, self.df['C'].values) assert result.index.name == 'row_0' assert result.columns.name == 'col_0' def test_crosstab_non_aligned(self): # GH 17005 a = pd.Series([0, 1, 1], index=['a', 'b', 'c']) b = pd.Series([3, 4, 3, 4, 3], index=['a', 'b', 'c', 'd', 'f']) c = np.array([3, 4, 3]) expected = pd.DataFrame([[1, 0], [1, 1]], index=Index([0, 1], name='row_0'), columns=Index([3, 4], name='col_0')) result = crosstab(a, b) tm.assert_frame_equal(result, expected) result = crosstab(a, c) tm.assert_frame_equal(result, expected) def test_crosstab_margins(self): a = np.random.randint(0, 7, size=100) b = np.random.randint(0, 3, size=100) c = np.random.randint(0, 5, size=100) df = DataFrame({'a': a, 'b': b, 'c': c}) result = crosstab(a, [b, c], rownames=['a'], colnames=('b', 'c'), margins=True) assert result.index.names == ('a',) assert result.columns.names == ['b', 'c'] all_cols = result['All', ''] exp_cols = df.groupby(['a']).size().astype('i8') # to keep index.name exp_margin = Series([len(df)], index=Index(['All'], name='a')) exp_cols = exp_cols.append(exp_margin) exp_cols.name = ('All', '') tm.assert_series_equal(all_cols, exp_cols) all_rows = result.loc['All'] exp_rows = df.groupby(['b', 'c']).size().astype('i8') exp_rows = exp_rows.append(Series([len(df)], index=[('All', '')])) exp_rows.name = 'All' exp_rows = exp_rows.reindex(all_rows.index) exp_rows = exp_rows.fillna(0).astype(np.int64) tm.assert_series_equal(all_rows, exp_rows) def test_crosstab_margins_set_margin_name(self): # GH 15972 a = np.random.randint(0, 7, size=100) b = np.random.randint(0, 3, size=100) c = np.random.randint(0, 5, size=100) df = DataFrame({'a': a, 'b': b, 'c': c}) result = crosstab(a, [b, c], rownames=['a'], colnames=('b', 'c'), margins=True, margins_name='TOTAL') assert result.index.names == ('a',) assert result.columns.names == ['b', 'c'] all_cols = result['TOTAL', ''] exp_cols = df.groupby(['a']).size().astype('i8') # to keep index.name exp_margin = Series([len(df)], index=Index(['TOTAL'], name='a')) exp_cols = exp_cols.append(exp_margin) exp_cols.name = ('TOTAL', '') tm.assert_series_equal(all_cols, exp_cols) all_rows = result.loc['TOTAL'] exp_rows = df.groupby(['b', 'c']).size().astype('i8') exp_rows = exp_rows.append(Series([len(df)], index=[('TOTAL', '')])) exp_rows.name = 'TOTAL' exp_rows = exp_rows.reindex(all_rows.index) exp_rows = exp_rows.fillna(0).astype(np.int64) tm.assert_series_equal(all_rows, exp_rows) for margins_name in [666, None, ['a', 'b']]: with pytest.raises(ValueError): crosstab(a, [b, c], rownames=['a'], colnames=('b', 'c'), margins=True, margins_name=margins_name) def test_crosstab_pass_values(self): a = np.random.randint(0, 7, size=100) b = np.random.randint(0, 3, size=100) c = np.random.randint(0, 5, size=100) values = np.random.randn(100) table = crosstab([a, b], c, values, aggfunc=np.sum, rownames=['foo', 'bar'], colnames=['baz']) df = DataFrame({'foo': a, 'bar': b, 'baz': c, 'values': values}) expected = df.pivot_table('values', index=['foo', 'bar'], columns='baz', aggfunc=np.sum) tm.assert_frame_equal(table, expected) def test_crosstab_dropna(self): # GH 3820 a = np.array(['foo', 'foo', 'foo', 'bar', 'bar', 'foo', 'foo'], dtype=object) b = np.array(['one', 'one', 'two', 'one', 'two', 'two', 'two'], dtype=object) c = np.array(['dull', 'dull', 'dull', 'dull', 'dull', 'shiny', 'shiny'], dtype=object) res = pd.crosstab(a, [b, c], rownames=['a'], colnames=['b', 'c'], dropna=False) m = MultiIndex.from_tuples([('one', 'dull'), ('one', 'shiny'), ('two', 'dull'), ('two', 'shiny')], names=['b', 'c']) tm.assert_index_equal(res.columns, m) def test_crosstab_no_overlap(self): # GS 10291 s1 = pd.Series([1, 2, 3], index=[1, 2, 3]) s2 = pd.Series([4, 5, 6], index=[4, 5, 6]) actual = crosstab(s1, s2) expected = pd.DataFrame() tm.assert_frame_equal(actual, expected) def test_margin_dropna(self): # GH 12577 # pivot_table counts null into margin ('All') # when margins=true and dropna=true df = pd.DataFrame({'a': [1, 2, 2, 2, 2, np.nan], 'b': [3, 3, 4, 4, 4, 4]}) actual = pd.crosstab(df.a, df.b, margins=True, dropna=True) expected = pd.DataFrame([[1, 0, 1], [1, 3, 4], [2, 3, 5]]) expected.index = Index([1.0, 2.0, 'All'], name='a') expected.columns = Index([3, 4, 'All'], name='b') tm.assert_frame_equal(actual, expected) df = DataFrame({'a': [1, np.nan, np.nan, np.nan, 2, np.nan], 'b': [3, np.nan, 4, 4, 4, 4]}) actual = pd.crosstab(df.a, df.b, margins=True, dropna=True) expected = pd.DataFrame([[1, 0, 1], [0, 1, 1], [1, 1, 2]]) expected.index = Index([1.0, 2.0, 'All'], name='a') expected.columns = Index([3.0, 4.0, 'All'], name='b') tm.assert_frame_equal(actual, expected) df = DataFrame({'a': [1, np.nan, np.nan, np.nan, np.nan, 2], 'b': [3, 3, 4, 4, 4, 4]}) actual = pd.crosstab(df.a, df.b, margins=True, dropna=True) expected = pd.DataFrame([[1, 0, 1], [0, 1, 1], [1, 1, 2]]) expected.index = Index([1.0, 2.0, 'All'], name='a') expected.columns = Index([3, 4, 'All'], name='b') tm.assert_frame_equal(actual, expected) # GH 12642 # _add_margins raises KeyError: Level None not found # when margins=True and dropna=False df = pd.DataFrame({'a': [1, 2, 2, 2, 2, np.nan], 'b': [3, 3, 4, 4, 4, 4]}) actual = pd.crosstab(df.a, df.b, margins=True, dropna=False) expected = pd.DataFrame([[1, 0, 1], [1, 3, 4], [2, 4, 6]]) expected.index = Index([1.0, 2.0, 'All'], name='a') expected.columns = Index([3, 4, 'All'], name='b') tm.assert_frame_equal(actual, expected) df = DataFrame({'a': [1, np.nan, np.nan, np.nan, 2, np.nan], 'b': [3, np.nan, 4, 4, 4, 4]}) actual = pd.crosstab(df.a, df.b, margins=True, dropna=False) expected = pd.DataFrame([[1, 0, 1], [0, 1, 1], [1, 4, 6]]) expected.index = Index([1.0, 2.0, 'All'], name='a') expected.columns = Index([3.0, 4.0, 'All'], name='b') tm.assert_frame_equal(actual, expected) a = np.array(['foo', 'foo', 'foo', 'bar', 'bar', 'foo', 'foo'], dtype=object) b = np.array(['one', 'one', 'two', 'one', 'two', np.nan, 'two'], dtype=object) c = np.array(['dull', 'dull', 'dull', 'dull', 'dull', 'shiny', 'shiny'], dtype=object) actual = pd.crosstab(a, [b, c], rownames=['a'], colnames=['b', 'c'], margins=True, dropna=False) m = MultiIndex.from_arrays([['one', 'one', 'two', 'two', 'All'], ['dull', 'shiny', 'dull', 'shiny', '']], names=['b', 'c']) expected = DataFrame([[1, 0, 1, 0, 2], [2, 0, 1, 1, 5], [3, 0, 2, 1, 7]], columns=m) expected.index = Index(['bar', 'foo', 'All'], name='a') tm.assert_frame_equal(actual, expected) actual = pd.crosstab([a, b], c, rownames=['a', 'b'], colnames=['c'], margins=True, dropna=False) m = MultiIndex.from_arrays([['bar', 'bar', 'foo', 'foo', 'All'], ['one', 'two', 'one', 'two', '']], names=['a', 'b']) expected = DataFrame([[1, 0, 1], [1, 0, 1], [2, 0, 2], [1, 1, 2], [5, 2, 7]], index=m) expected.columns = Index(['dull', 'shiny', 'All'], name='c') tm.assert_frame_equal(actual, expected) actual = pd.crosstab([a, b], c, rownames=['a', 'b'], colnames=['c'], margins=True, dropna=True) m = MultiIndex.from_arrays([['bar', 'bar', 'foo', 'foo', 'All'], ['one', 'two', 'one', 'two', '']], names=['a', 'b']) expected = DataFrame([[1, 0, 1], [1, 0, 1], [2, 0, 2], [1, 1, 2], [5, 1, 6]], index=m) expected.columns = Index(['dull', 'shiny', 'All'], name='c') tm.assert_frame_equal(actual, expected) def test_crosstab_normalize(self): # Issue 12578 df = pd.DataFrame({'a': [1, 2, 2, 2, 2], 'b': [3, 3, 4, 4, 4], 'c': [1, 1, np.nan, 1, 1]}) rindex = pd.Index([1, 2], name='a') cindex = pd.Index([3, 4], name='b') full_normal = pd.DataFrame([[0.2, 0], [0.2, 0.6]], index=rindex, columns=cindex) row_normal = pd.DataFrame([[1.0, 0], [0.25, 0.75]], index=rindex, columns=cindex) col_normal = pd.DataFrame([[0.5, 0], [0.5, 1.0]], index=rindex, columns=cindex) # Check all normalize args tm.assert_frame_equal(pd.crosstab(df.a, df.b, normalize='all'), full_normal) tm.assert_frame_equal(pd.crosstab(df.a, df.b, normalize=True), full_normal) tm.assert_frame_equal(pd.crosstab(df.a, df.b, normalize='index'), row_normal) tm.assert_frame_equal(pd.crosstab(df.a, df.b, normalize='columns'), col_normal) tm.assert_frame_equal(pd.crosstab(df.a, df.b, normalize=1), pd.crosstab(df.a, df.b, normalize='columns')) tm.assert_frame_equal(pd.crosstab(df.a, df.b, normalize=0), pd.crosstab(df.a, df.b, normalize='index')) row_normal_margins = pd.DataFrame([[1.0, 0], [0.25, 0.75], [0.4, 0.6]], index=pd.Index([1, 2, 'All'], name='a', dtype='object'), columns=pd.Index([3, 4], name='b', dtype='object')) col_normal_margins = pd.DataFrame([[0.5, 0, 0.2], [0.5, 1.0, 0.8]], index=pd.Index([1, 2], name='a', dtype='object'), columns=pd.Index([3, 4, 'All'], name='b', dtype='object')) all_normal_margins = pd.DataFrame([[0.2, 0, 0.2], [0.2, 0.6, 0.8], [0.4, 0.6, 1]], index=pd.Index([1, 2, 'All'], name='a', dtype='object'), columns=pd.Index([3, 4, 'All'], name='b', dtype='object')) tm.assert_frame_equal(pd.crosstab(df.a, df.b, normalize='index', margins=True), row_normal_margins) tm.assert_frame_equal(pd.crosstab(df.a, df.b, normalize='columns', margins=True), col_normal_margins) tm.assert_frame_equal(pd.crosstab(df.a, df.b, normalize=True, margins=True), all_normal_margins) # Test arrays pd.crosstab([np.array([1, 1, 2, 2]), np.array([1, 2, 1, 2])], np.array([1, 2, 1, 2])) # Test with aggfunc norm_counts = pd.DataFrame([[0.25, 0, 0.25], [0.25, 0.5, 0.75], [0.5, 0.5, 1]], index=pd.Index([1, 2, 'All'], name='a', dtype='object'), columns=pd.Index([3, 4, 'All'], name='b')) test_case = pd.crosstab(df.a, df.b, df.c, aggfunc='count', normalize='all', margins=True) tm.assert_frame_equal(test_case, norm_counts) df = pd.DataFrame({'a': [1, 2, 2, 2, 2], 'b': [3, 3, 4, 4, 4], 'c': [0, 4, np.nan, 3, 3]}) norm_sum = pd.DataFrame([[0, 0, 0.], [0.4, 0.6, 1], [0.4, 0.6, 1]], index=pd.Index([1, 2, 'All'], name='a', dtype='object'), columns=pd.Index([3, 4, 'All'], name='b', dtype='object')) test_case = pd.crosstab(df.a, df.b, df.c, aggfunc=np.sum, normalize='all', margins=True) tm.assert_frame_equal(test_case, norm_sum) def test_crosstab_with_empties(self): # Check handling of empties df = pd.DataFrame({'a': [1, 2, 2, 2, 2], 'b': [3, 3, 4, 4, 4], 'c': [np.nan, np.nan, np.nan, np.nan, np.nan]}) empty = pd.DataFrame([[0.0, 0.0], [0.0, 0.0]], index=pd.Index([1, 2], name='a', dtype='int64'), columns=pd.Index([3, 4], name='b')) for i in [True, 'index', 'columns']: calculated = pd.crosstab(df.a, df.b, values=df.c, aggfunc='count', normalize=i) tm.assert_frame_equal(empty, calculated) nans = pd.DataFrame([[0.0, np.nan], [0.0, 0.0]], index=pd.Index([1, 2], name='a', dtype='int64'), columns=pd.Index([3, 4], name='b')) calculated = pd.crosstab(df.a, df.b, values=df.c, aggfunc='count', normalize=False) tm.assert_frame_equal(nans, calculated) def test_crosstab_errors(self): # Issue 12578 df = pd.DataFrame({'a': [1, 2, 2, 2, 2], 'b': [3, 3, 4, 4, 4], 'c': [1, 1, np.nan, 1, 1]}) error = 'values cannot be used without an aggfunc.' with tm.assert_raises_regex(ValueError, error): pd.crosstab(df.a, df.b, values=df.c) error = 'aggfunc cannot be used without values' with tm.assert_raises_regex(ValueError, error): pd.crosstab(df.a, df.b, aggfunc=np.mean) error = 'Not a valid normalize argument' with tm.assert_raises_regex(ValueError, error): pd.crosstab(df.a, df.b, normalize='42') with tm.assert_raises_regex(ValueError, error): pd.crosstab(df.a, df.b, normalize=42) error = 'Not a valid margins argument' with tm.assert_raises_regex(ValueError, error): pd.crosstab(df.a, df.b, normalize='all', margins=42) def test_crosstab_with_categorial_columns(self): # GH 8860 df = pd.DataFrame({'MAKE': ['Honda', 'Acura', 'Tesla', 'Honda', 'Honda', 'Acura'], 'MODEL': ['Sedan', 'Sedan', 'Electric', 'Pickup', 'Sedan', 'Sedan']}) categories = ['Sedan', 'Electric', 'Pickup'] df['MODEL'] = (df['MODEL'].astype('category') .cat.set_categories(categories)) result = pd.crosstab(df['MAKE'], df['MODEL']) expected_index = pd.Index(['Acura', 'Honda', 'Tesla'], name='MAKE') expected_columns = pd.CategoricalIndex(categories, categories=categories, ordered=False, name='MODEL') expected_data = [[2, 0, 0], [2, 0, 1], [0, 1, 0]] expected = pd.DataFrame(expected_data, index=expected_index, columns=expected_columns) tm.assert_frame_equal(result, expected) def test_crosstab_with_numpy_size(self): # GH 4003 df = pd.DataFrame({'A': ['one', 'one', 'two', 'three'] * 6, 'B': ['A', 'B', 'C'] * 8, 'C': ['foo', 'foo', 'foo', 'bar', 'bar', 'bar'] * 4, 'D': np.random.randn(24), 'E': np.random.randn(24)}) result = pd.crosstab(index=[df['A'], df['B']], columns=[df['C']], margins=True, aggfunc=np.size, values=df['D']) expected_index = pd.MultiIndex(levels=[['All', 'one', 'three', 'two'], ['', 'A', 'B', 'C']], labels=[[1, 1, 1, 2, 2, 2, 3, 3, 3, 0], [1, 2, 3, 1, 2, 3, 1, 2, 3, 0]], names=['A', 'B']) expected_column = pd.Index(['bar', 'foo', 'All'], dtype='object', name='C') expected_data = np.array([[2., 2., 4.], [2., 2., 4.], [2., 2., 4.], [2., np.nan, 2.], [np.nan, 2., 2.], [2., np.nan, 2.], [np.nan, 2., 2.], [2., np.nan, 2.], [np.nan, 2., 2.], [12., 12., 24.]]) expected = pd.DataFrame(expected_data, index=expected_index, columns=expected_column) tm.assert_frame_equal(result, expected) def test_crosstab_dup_index_names(self): # GH 13279 s = pd.Series(range(3), name='foo') result = pd.crosstab(s, s) expected_index = pd.Index(range(3), name='foo') expected = pd.DataFrame(np.eye(3, dtype=np.int64), index=expected_index, columns=expected_index) tm.assert_frame_equal(result, expected) @pytest.mark.parametrize("names", [['a', ('b', 'c')], [('a', 'b'), 'c']]) def test_crosstab_tuple_name(self, names): s1 = pd.Series(range(3), name=names[0]) s2 = pd.Series(range(1, 4), name=names[1]) mi = pd.MultiIndex.from_arrays([range(3), range(1, 4)], names=names) expected = pd.Series(1, index=mi).unstack(1, fill_value=0) result = pd.crosstab(s1, s2) tm.assert_frame_equal(result, expected) def test_crosstab_unsorted_order(self): df = pd.DataFrame({"b": [3, 1, 2], 'a': [5, 4, 6]}, index=['C', 'A', 'B']) result = pd.crosstab(df.index, [df.b, df.a]) e_idx = pd.Index(['A', 'B', 'C'], name='row_0') e_columns = pd.MultiIndex.from_tuples([(1, 4), (2, 6), (3, 5)], names=['b', 'a']) expected = pd.DataFrame([[1, 0, 0], [0, 1, 0], [0, 0, 1]], index=e_idx, columns=e_columns) tm.assert_frame_equal(result, expected)
bsd-3-clause
joshloyal/scikit-learn
sklearn/linear_model/tests/test_logistic.py
18
41552
import numpy as np import scipy.sparse as sp from scipy import linalg, optimize, sparse from sklearn.utils.testing import assert_almost_equal from sklearn.utils.testing import assert_array_equal from sklearn.utils.testing import assert_array_almost_equal from sklearn.utils.testing import assert_equal from sklearn.utils.testing import assert_greater from sklearn.utils.testing import assert_raises from sklearn.utils.testing import assert_true from sklearn.utils.testing import assert_warns from sklearn.utils.testing import raises from sklearn.utils.testing import ignore_warnings from sklearn.utils.testing import assert_raise_message from sklearn.exceptions import ConvergenceWarning from sklearn.utils import compute_class_weight from sklearn.utils.fixes import sp_version from sklearn.linear_model.logistic import ( LogisticRegression, logistic_regression_path, LogisticRegressionCV, _logistic_loss_and_grad, _logistic_grad_hess, _multinomial_grad_hess, _logistic_loss, ) from sklearn.model_selection import StratifiedKFold from sklearn.datasets import load_iris, make_classification from sklearn.metrics import log_loss from sklearn.preprocessing import LabelEncoder X = [[-1, 0], [0, 1], [1, 1]] X_sp = sp.csr_matrix(X) Y1 = [0, 1, 1] Y2 = [2, 1, 0] iris = load_iris() def check_predictions(clf, X, y): """Check that the model is able to fit the classification data""" n_samples = len(y) classes = np.unique(y) n_classes = classes.shape[0] predicted = clf.fit(X, y).predict(X) assert_array_equal(clf.classes_, classes) assert_equal(predicted.shape, (n_samples,)) assert_array_equal(predicted, y) probabilities = clf.predict_proba(X) assert_equal(probabilities.shape, (n_samples, n_classes)) assert_array_almost_equal(probabilities.sum(axis=1), np.ones(n_samples)) assert_array_equal(probabilities.argmax(axis=1), y) def test_predict_2_classes(): # Simple sanity check on a 2 classes dataset # Make sure it predicts the correct result on simple datasets. check_predictions(LogisticRegression(random_state=0), X, Y1) check_predictions(LogisticRegression(random_state=0), X_sp, Y1) check_predictions(LogisticRegression(C=100, random_state=0), X, Y1) check_predictions(LogisticRegression(C=100, random_state=0), X_sp, Y1) check_predictions(LogisticRegression(fit_intercept=False, random_state=0), X, Y1) check_predictions(LogisticRegression(fit_intercept=False, random_state=0), X_sp, Y1) def test_error(): # Test for appropriate exception on errors msg = "Penalty term must be positive" assert_raise_message(ValueError, msg, LogisticRegression(C=-1).fit, X, Y1) assert_raise_message(ValueError, msg, LogisticRegression(C="test").fit, X, Y1) for LR in [LogisticRegression, LogisticRegressionCV]: msg = "Tolerance for stopping criteria must be positive" assert_raise_message(ValueError, msg, LR(tol=-1).fit, X, Y1) assert_raise_message(ValueError, msg, LR(tol="test").fit, X, Y1) msg = "Maximum number of iteration must be positive" assert_raise_message(ValueError, msg, LR(max_iter=-1).fit, X, Y1) assert_raise_message(ValueError, msg, LR(max_iter="test").fit, X, Y1) def test_predict_3_classes(): check_predictions(LogisticRegression(C=10), X, Y2) check_predictions(LogisticRegression(C=10), X_sp, Y2) def test_predict_iris(): # Test logistic regression with the iris dataset n_samples, n_features = iris.data.shape target = iris.target_names[iris.target] # Test that both multinomial and OvR solvers handle # multiclass data correctly and give good accuracy # score (>0.95) for the training data. for clf in [LogisticRegression(C=len(iris.data)), LogisticRegression(C=len(iris.data), solver='lbfgs', multi_class='multinomial'), LogisticRegression(C=len(iris.data), solver='newton-cg', multi_class='multinomial'), LogisticRegression(C=len(iris.data), solver='sag', tol=1e-2, multi_class='ovr', random_state=42)]: clf.fit(iris.data, target) assert_array_equal(np.unique(target), clf.classes_) pred = clf.predict(iris.data) assert_greater(np.mean(pred == target), .95) probabilities = clf.predict_proba(iris.data) assert_array_almost_equal(probabilities.sum(axis=1), np.ones(n_samples)) pred = iris.target_names[probabilities.argmax(axis=1)] assert_greater(np.mean(pred == target), .95) def test_multinomial_validation(): for solver in ['lbfgs', 'newton-cg', 'sag']: lr = LogisticRegression(C=-1, solver=solver, multi_class='multinomial') assert_raises(ValueError, lr.fit, [[0, 1], [1, 0]], [0, 1]) def test_check_solver_option(): X, y = iris.data, iris.target for LR in [LogisticRegression, LogisticRegressionCV]: msg = ("Logistic Regression supports only liblinear, newton-cg, lbfgs" " and sag solvers, got wrong_name") lr = LR(solver="wrong_name") assert_raise_message(ValueError, msg, lr.fit, X, y) msg = "multi_class should be either multinomial or ovr, got wrong_name" lr = LR(solver='newton-cg', multi_class="wrong_name") assert_raise_message(ValueError, msg, lr.fit, X, y) # only 'liblinear' solver msg = "Solver liblinear does not support a multinomial backend." lr = LR(solver='liblinear', multi_class='multinomial') assert_raise_message(ValueError, msg, lr.fit, X, y) # all solvers except 'liblinear' for solver in ['newton-cg', 'lbfgs', 'sag']: msg = ("Solver %s supports only l2 penalties, got l1 penalty." % solver) lr = LR(solver=solver, penalty='l1') assert_raise_message(ValueError, msg, lr.fit, X, y) msg = ("Solver %s supports only dual=False, got dual=True" % solver) lr = LR(solver=solver, dual=True) assert_raise_message(ValueError, msg, lr.fit, X, y) def test_multinomial_binary(): # Test multinomial LR on a binary problem. target = (iris.target > 0).astype(np.intp) target = np.array(["setosa", "not-setosa"])[target] for solver in ['lbfgs', 'newton-cg', 'sag']: clf = LogisticRegression(solver=solver, multi_class='multinomial', random_state=42, max_iter=2000) clf.fit(iris.data, target) assert_equal(clf.coef_.shape, (1, iris.data.shape[1])) assert_equal(clf.intercept_.shape, (1,)) assert_array_equal(clf.predict(iris.data), target) mlr = LogisticRegression(solver=solver, multi_class='multinomial', random_state=42, fit_intercept=False) mlr.fit(iris.data, target) pred = clf.classes_[np.argmax(clf.predict_log_proba(iris.data), axis=1)] assert_greater(np.mean(pred == target), .9) def test_sparsify(): # Test sparsify and densify members. n_samples, n_features = iris.data.shape target = iris.target_names[iris.target] clf = LogisticRegression(random_state=0).fit(iris.data, target) pred_d_d = clf.decision_function(iris.data) clf.sparsify() assert_true(sp.issparse(clf.coef_)) pred_s_d = clf.decision_function(iris.data) sp_data = sp.coo_matrix(iris.data) pred_s_s = clf.decision_function(sp_data) clf.densify() pred_d_s = clf.decision_function(sp_data) assert_array_almost_equal(pred_d_d, pred_s_d) assert_array_almost_equal(pred_d_d, pred_s_s) assert_array_almost_equal(pred_d_d, pred_d_s) def test_inconsistent_input(): # Test that an exception is raised on inconsistent input rng = np.random.RandomState(0) X_ = rng.random_sample((5, 10)) y_ = np.ones(X_.shape[0]) y_[0] = 0 clf = LogisticRegression(random_state=0) # Wrong dimensions for training data y_wrong = y_[:-1] assert_raises(ValueError, clf.fit, X, y_wrong) # Wrong dimensions for test data assert_raises(ValueError, clf.fit(X_, y_).predict, rng.random_sample((3, 12))) def test_write_parameters(): # Test that we can write to coef_ and intercept_ clf = LogisticRegression(random_state=0) clf.fit(X, Y1) clf.coef_[:] = 0 clf.intercept_[:] = 0 assert_array_almost_equal(clf.decision_function(X), 0) @raises(ValueError) def test_nan(): # Test proper NaN handling. # Regression test for Issue #252: fit used to go into an infinite loop. Xnan = np.array(X, dtype=np.float64) Xnan[0, 1] = np.nan LogisticRegression(random_state=0).fit(Xnan, Y1) def test_consistency_path(): # Test that the path algorithm is consistent rng = np.random.RandomState(0) X = np.concatenate((rng.randn(100, 2) + [1, 1], rng.randn(100, 2))) y = [1] * 100 + [-1] * 100 Cs = np.logspace(0, 4, 10) f = ignore_warnings # can't test with fit_intercept=True since LIBLINEAR # penalizes the intercept for solver in ('lbfgs', 'newton-cg', 'liblinear', 'sag'): coefs, Cs, _ = f(logistic_regression_path)( X, y, Cs=Cs, fit_intercept=False, tol=1e-5, solver=solver, random_state=0) for i, C in enumerate(Cs): lr = LogisticRegression(C=C, fit_intercept=False, tol=1e-5, random_state=0) lr.fit(X, y) lr_coef = lr.coef_.ravel() assert_array_almost_equal(lr_coef, coefs[i], decimal=4, err_msg="with solver = %s" % solver) # test for fit_intercept=True for solver in ('lbfgs', 'newton-cg', 'liblinear', 'sag'): Cs = [1e3] coefs, Cs, _ = f(logistic_regression_path)( X, y, Cs=Cs, fit_intercept=True, tol=1e-6, solver=solver, intercept_scaling=10000., random_state=0) lr = LogisticRegression(C=Cs[0], fit_intercept=True, tol=1e-4, intercept_scaling=10000., random_state=0) lr.fit(X, y) lr_coef = np.concatenate([lr.coef_.ravel(), lr.intercept_]) assert_array_almost_equal(lr_coef, coefs[0], decimal=4, err_msg="with solver = %s" % solver) def test_liblinear_dual_random_state(): # random_state is relevant for liblinear solver only if dual=True X, y = make_classification(n_samples=20, random_state=0) lr1 = LogisticRegression(random_state=0, dual=True, max_iter=1, tol=1e-15) lr1.fit(X, y) lr2 = LogisticRegression(random_state=0, dual=True, max_iter=1, tol=1e-15) lr2.fit(X, y) lr3 = LogisticRegression(random_state=8, dual=True, max_iter=1, tol=1e-15) lr3.fit(X, y) # same result for same random state assert_array_almost_equal(lr1.coef_, lr2.coef_) # different results for different random states msg = "Arrays are not almost equal to 6 decimals" assert_raise_message(AssertionError, msg, assert_array_almost_equal, lr1.coef_, lr3.coef_) def test_logistic_loss_and_grad(): X_ref, y = make_classification(n_samples=20, random_state=0) n_features = X_ref.shape[1] X_sp = X_ref.copy() X_sp[X_sp < .1] = 0 X_sp = sp.csr_matrix(X_sp) for X in (X_ref, X_sp): w = np.zeros(n_features) # First check that our derivation of the grad is correct loss, grad = _logistic_loss_and_grad(w, X, y, alpha=1.) approx_grad = optimize.approx_fprime( w, lambda w: _logistic_loss_and_grad(w, X, y, alpha=1.)[0], 1e-3 ) assert_array_almost_equal(grad, approx_grad, decimal=2) # Second check that our intercept implementation is good w = np.zeros(n_features + 1) loss_interp, grad_interp = _logistic_loss_and_grad( w, X, y, alpha=1. ) assert_array_almost_equal(loss, loss_interp) approx_grad = optimize.approx_fprime( w, lambda w: _logistic_loss_and_grad(w, X, y, alpha=1.)[0], 1e-3 ) assert_array_almost_equal(grad_interp, approx_grad, decimal=2) def test_logistic_grad_hess(): rng = np.random.RandomState(0) n_samples, n_features = 50, 5 X_ref = rng.randn(n_samples, n_features) y = np.sign(X_ref.dot(5 * rng.randn(n_features))) X_ref -= X_ref.mean() X_ref /= X_ref.std() X_sp = X_ref.copy() X_sp[X_sp < .1] = 0 X_sp = sp.csr_matrix(X_sp) for X in (X_ref, X_sp): w = .1 * np.ones(n_features) # First check that _logistic_grad_hess is consistent # with _logistic_loss_and_grad loss, grad = _logistic_loss_and_grad(w, X, y, alpha=1.) grad_2, hess = _logistic_grad_hess(w, X, y, alpha=1.) assert_array_almost_equal(grad, grad_2) # Now check our hessian along the second direction of the grad vector = np.zeros_like(grad) vector[1] = 1 hess_col = hess(vector) # Computation of the Hessian is particularly fragile to numerical # errors when doing simple finite differences. Here we compute the # grad along a path in the direction of the vector and then use a # least-square regression to estimate the slope e = 1e-3 d_x = np.linspace(-e, e, 30) d_grad = np.array([ _logistic_loss_and_grad(w + t * vector, X, y, alpha=1.)[1] for t in d_x ]) d_grad -= d_grad.mean(axis=0) approx_hess_col = linalg.lstsq(d_x[:, np.newaxis], d_grad)[0].ravel() assert_array_almost_equal(approx_hess_col, hess_col, decimal=3) # Second check that our intercept implementation is good w = np.zeros(n_features + 1) loss_interp, grad_interp = _logistic_loss_and_grad(w, X, y, alpha=1.) loss_interp_2 = _logistic_loss(w, X, y, alpha=1.) grad_interp_2, hess = _logistic_grad_hess(w, X, y, alpha=1.) assert_array_almost_equal(loss_interp, loss_interp_2) assert_array_almost_equal(grad_interp, grad_interp_2) def test_logistic_cv(): # test for LogisticRegressionCV object n_samples, n_features = 50, 5 rng = np.random.RandomState(0) X_ref = rng.randn(n_samples, n_features) y = np.sign(X_ref.dot(5 * rng.randn(n_features))) X_ref -= X_ref.mean() X_ref /= X_ref.std() lr_cv = LogisticRegressionCV(Cs=[1.], fit_intercept=False, solver='liblinear') lr_cv.fit(X_ref, y) lr = LogisticRegression(C=1., fit_intercept=False) lr.fit(X_ref, y) assert_array_almost_equal(lr.coef_, lr_cv.coef_) assert_array_equal(lr_cv.coef_.shape, (1, n_features)) assert_array_equal(lr_cv.classes_, [-1, 1]) assert_equal(len(lr_cv.classes_), 2) coefs_paths = np.asarray(list(lr_cv.coefs_paths_.values())) assert_array_equal(coefs_paths.shape, (1, 3, 1, n_features)) assert_array_equal(lr_cv.Cs_.shape, (1, )) scores = np.asarray(list(lr_cv.scores_.values())) assert_array_equal(scores.shape, (1, 3, 1)) def test_multinomial_logistic_regression_string_inputs(): # Test with string labels for LogisticRegression(CV) n_samples, n_features, n_classes = 50, 5, 3 X_ref, y = make_classification(n_samples=n_samples, n_features=n_features, n_classes=n_classes, n_informative=3, random_state=0) y_str = LabelEncoder().fit(['bar', 'baz', 'foo']).inverse_transform(y) # For numerical labels, let y values be taken from set (-1, 0, 1) y = np.array(y) - 1 # Test for string labels lr = LogisticRegression(solver='lbfgs', multi_class='multinomial') lr_cv = LogisticRegressionCV(solver='lbfgs', multi_class='multinomial') lr_str = LogisticRegression(solver='lbfgs', multi_class='multinomial') lr_cv_str = LogisticRegressionCV(solver='lbfgs', multi_class='multinomial') lr.fit(X_ref, y) lr_cv.fit(X_ref, y) lr_str.fit(X_ref, y_str) lr_cv_str.fit(X_ref, y_str) assert_array_almost_equal(lr.coef_, lr_str.coef_) assert_equal(sorted(lr_str.classes_), ['bar', 'baz', 'foo']) assert_array_almost_equal(lr_cv.coef_, lr_cv_str.coef_) assert_equal(sorted(lr_str.classes_), ['bar', 'baz', 'foo']) assert_equal(sorted(lr_cv_str.classes_), ['bar', 'baz', 'foo']) # The predictions should be in original labels assert_equal(sorted(np.unique(lr_str.predict(X_ref))), ['bar', 'baz', 'foo']) assert_equal(sorted(np.unique(lr_cv_str.predict(X_ref))), ['bar', 'baz', 'foo']) # Make sure class weights can be given with string labels lr_cv_str = LogisticRegression( solver='lbfgs', class_weight={'bar': 1, 'baz': 2, 'foo': 0}, multi_class='multinomial').fit(X_ref, y_str) assert_equal(sorted(np.unique(lr_cv_str.predict(X_ref))), ['bar', 'baz']) def test_logistic_cv_sparse(): X, y = make_classification(n_samples=50, n_features=5, random_state=0) X[X < 1.0] = 0.0 csr = sp.csr_matrix(X) clf = LogisticRegressionCV(fit_intercept=True) clf.fit(X, y) clfs = LogisticRegressionCV(fit_intercept=True) clfs.fit(csr, y) assert_array_almost_equal(clfs.coef_, clf.coef_) assert_array_almost_equal(clfs.intercept_, clf.intercept_) assert_equal(clfs.C_, clf.C_) def test_intercept_logistic_helper(): n_samples, n_features = 10, 5 X, y = make_classification(n_samples=n_samples, n_features=n_features, random_state=0) # Fit intercept case. alpha = 1. w = np.ones(n_features + 1) grad_interp, hess_interp = _logistic_grad_hess(w, X, y, alpha) loss_interp = _logistic_loss(w, X, y, alpha) # Do not fit intercept. This can be considered equivalent to adding # a feature vector of ones, i.e column of one vectors. X_ = np.hstack((X, np.ones(10)[:, np.newaxis])) grad, hess = _logistic_grad_hess(w, X_, y, alpha) loss = _logistic_loss(w, X_, y, alpha) # In the fit_intercept=False case, the feature vector of ones is # penalized. This should be taken care of. assert_almost_equal(loss_interp + 0.5 * (w[-1] ** 2), loss) # Check gradient. assert_array_almost_equal(grad_interp[:n_features], grad[:n_features]) assert_almost_equal(grad_interp[-1] + alpha * w[-1], grad[-1]) rng = np.random.RandomState(0) grad = rng.rand(n_features + 1) hess_interp = hess_interp(grad) hess = hess(grad) assert_array_almost_equal(hess_interp[:n_features], hess[:n_features]) assert_almost_equal(hess_interp[-1] + alpha * grad[-1], hess[-1]) def test_ovr_multinomial_iris(): # Test that OvR and multinomial are correct using the iris dataset. train, target = iris.data, iris.target n_samples, n_features = train.shape # The cv indices from stratified kfold (where stratification is done based # on the fine-grained iris classes, i.e, before the classes 0 and 1 are # conflated) is used for both clf and clf1 n_cv = 2 cv = StratifiedKFold(n_cv) precomputed_folds = list(cv.split(train, target)) # Train clf on the original dataset where classes 0 and 1 are separated clf = LogisticRegressionCV(cv=precomputed_folds) clf.fit(train, target) # Conflate classes 0 and 1 and train clf1 on this modified dataset clf1 = LogisticRegressionCV(cv=precomputed_folds) target_copy = target.copy() target_copy[target_copy == 0] = 1 clf1.fit(train, target_copy) # Ensure that what OvR learns for class2 is same regardless of whether # classes 0 and 1 are separated or not assert_array_almost_equal(clf.scores_[2], clf1.scores_[2]) assert_array_almost_equal(clf.intercept_[2:], clf1.intercept_) assert_array_almost_equal(clf.coef_[2][np.newaxis, :], clf1.coef_) # Test the shape of various attributes. assert_equal(clf.coef_.shape, (3, n_features)) assert_array_equal(clf.classes_, [0, 1, 2]) coefs_paths = np.asarray(list(clf.coefs_paths_.values())) assert_array_almost_equal(coefs_paths.shape, (3, n_cv, 10, n_features + 1)) assert_equal(clf.Cs_.shape, (10, )) scores = np.asarray(list(clf.scores_.values())) assert_equal(scores.shape, (3, n_cv, 10)) # Test that for the iris data multinomial gives a better accuracy than OvR for solver in ['lbfgs', 'newton-cg', 'sag']: max_iter = 100 if solver == 'sag' else 15 clf_multi = LogisticRegressionCV( solver=solver, multi_class='multinomial', max_iter=max_iter, random_state=42, tol=1e-2, cv=2) clf_multi.fit(train, target) multi_score = clf_multi.score(train, target) ovr_score = clf.score(train, target) assert_greater(multi_score, ovr_score) # Test attributes of LogisticRegressionCV assert_equal(clf.coef_.shape, clf_multi.coef_.shape) assert_array_equal(clf_multi.classes_, [0, 1, 2]) coefs_paths = np.asarray(list(clf_multi.coefs_paths_.values())) assert_array_almost_equal(coefs_paths.shape, (3, n_cv, 10, n_features + 1)) assert_equal(clf_multi.Cs_.shape, (10, )) scores = np.asarray(list(clf_multi.scores_.values())) assert_equal(scores.shape, (3, n_cv, 10)) def test_logistic_regression_solvers(): X, y = make_classification(n_features=10, n_informative=5, random_state=0) ncg = LogisticRegression(solver='newton-cg', fit_intercept=False) lbf = LogisticRegression(solver='lbfgs', fit_intercept=False) lib = LogisticRegression(fit_intercept=False) sag = LogisticRegression(solver='sag', fit_intercept=False, random_state=42) ncg.fit(X, y) lbf.fit(X, y) sag.fit(X, y) lib.fit(X, y) assert_array_almost_equal(ncg.coef_, lib.coef_, decimal=3) assert_array_almost_equal(lib.coef_, lbf.coef_, decimal=3) assert_array_almost_equal(ncg.coef_, lbf.coef_, decimal=3) assert_array_almost_equal(sag.coef_, lib.coef_, decimal=3) assert_array_almost_equal(sag.coef_, ncg.coef_, decimal=3) assert_array_almost_equal(sag.coef_, lbf.coef_, decimal=3) def test_logistic_regression_solvers_multiclass(): X, y = make_classification(n_samples=20, n_features=20, n_informative=10, n_classes=3, random_state=0) tol = 1e-6 ncg = LogisticRegression(solver='newton-cg', fit_intercept=False, tol=tol) lbf = LogisticRegression(solver='lbfgs', fit_intercept=False, tol=tol) lib = LogisticRegression(fit_intercept=False, tol=tol) sag = LogisticRegression(solver='sag', fit_intercept=False, tol=tol, max_iter=1000, random_state=42) ncg.fit(X, y) lbf.fit(X, y) sag.fit(X, y) lib.fit(X, y) assert_array_almost_equal(ncg.coef_, lib.coef_, decimal=4) assert_array_almost_equal(lib.coef_, lbf.coef_, decimal=4) assert_array_almost_equal(ncg.coef_, lbf.coef_, decimal=4) assert_array_almost_equal(sag.coef_, lib.coef_, decimal=4) assert_array_almost_equal(sag.coef_, ncg.coef_, decimal=4) assert_array_almost_equal(sag.coef_, lbf.coef_, decimal=4) def test_logistic_regressioncv_class_weights(): for weight in [{0: 0.1, 1: 0.2}, {0: 0.1, 1: 0.2, 2: 0.5}]: n_classes = len(weight) for class_weight in (weight, 'balanced'): X, y = make_classification(n_samples=30, n_features=3, n_repeated=0, n_informative=3, n_redundant=0, n_classes=n_classes, random_state=0) clf_lbf = LogisticRegressionCV(solver='lbfgs', Cs=1, fit_intercept=False, class_weight=class_weight) clf_ncg = LogisticRegressionCV(solver='newton-cg', Cs=1, fit_intercept=False, class_weight=class_weight) clf_lib = LogisticRegressionCV(solver='liblinear', Cs=1, fit_intercept=False, class_weight=class_weight) clf_sag = LogisticRegressionCV(solver='sag', Cs=1, fit_intercept=False, class_weight=class_weight, tol=1e-5, max_iter=10000, random_state=0) clf_lbf.fit(X, y) clf_ncg.fit(X, y) clf_lib.fit(X, y) clf_sag.fit(X, y) assert_array_almost_equal(clf_lib.coef_, clf_lbf.coef_, decimal=4) assert_array_almost_equal(clf_ncg.coef_, clf_lbf.coef_, decimal=4) assert_array_almost_equal(clf_sag.coef_, clf_lbf.coef_, decimal=4) def test_logistic_regression_sample_weights(): X, y = make_classification(n_samples=20, n_features=5, n_informative=3, n_classes=2, random_state=0) sample_weight = y + 1 for LR in [LogisticRegression, LogisticRegressionCV]: # Test that passing sample_weight as ones is the same as # not passing them at all (default None) for solver in ['lbfgs', 'liblinear']: clf_sw_none = LR(solver=solver, fit_intercept=False, random_state=42) clf_sw_none.fit(X, y) clf_sw_ones = LR(solver=solver, fit_intercept=False, random_state=42) clf_sw_ones.fit(X, y, sample_weight=np.ones(y.shape[0])) assert_array_almost_equal( clf_sw_none.coef_, clf_sw_ones.coef_, decimal=4) # Test that sample weights work the same with the lbfgs, # newton-cg, and 'sag' solvers clf_sw_lbfgs = LR(solver='lbfgs', fit_intercept=False, random_state=42) clf_sw_lbfgs.fit(X, y, sample_weight=sample_weight) clf_sw_n = LR(solver='newton-cg', fit_intercept=False, random_state=42) clf_sw_n.fit(X, y, sample_weight=sample_weight) clf_sw_sag = LR(solver='sag', fit_intercept=False, tol=1e-10, random_state=42) # ignore convergence warning due to small dataset with ignore_warnings(): clf_sw_sag.fit(X, y, sample_weight=sample_weight) clf_sw_liblinear = LR(solver='liblinear', fit_intercept=False, random_state=42) clf_sw_liblinear.fit(X, y, sample_weight=sample_weight) assert_array_almost_equal( clf_sw_lbfgs.coef_, clf_sw_n.coef_, decimal=4) assert_array_almost_equal( clf_sw_lbfgs.coef_, clf_sw_sag.coef_, decimal=4) assert_array_almost_equal( clf_sw_lbfgs.coef_, clf_sw_liblinear.coef_, decimal=4) # Test that passing class_weight as [1,2] is the same as # passing class weight = [1,1] but adjusting sample weights # to be 2 for all instances of class 2 for solver in ['lbfgs', 'liblinear']: clf_cw_12 = LR(solver=solver, fit_intercept=False, class_weight={0: 1, 1: 2}, random_state=42) clf_cw_12.fit(X, y) clf_sw_12 = LR(solver=solver, fit_intercept=False, random_state=42) clf_sw_12.fit(X, y, sample_weight=sample_weight) assert_array_almost_equal( clf_cw_12.coef_, clf_sw_12.coef_, decimal=4) # Test the above for l1 penalty and l2 penalty with dual=True. # since the patched liblinear code is different. clf_cw = LogisticRegression( solver="liblinear", fit_intercept=False, class_weight={0: 1, 1: 2}, penalty="l1", tol=1e-5, random_state=42) clf_cw.fit(X, y) clf_sw = LogisticRegression( solver="liblinear", fit_intercept=False, penalty="l1", tol=1e-5, random_state=42) clf_sw.fit(X, y, sample_weight) assert_array_almost_equal(clf_cw.coef_, clf_sw.coef_, decimal=4) clf_cw = LogisticRegression( solver="liblinear", fit_intercept=False, class_weight={0: 1, 1: 2}, penalty="l2", dual=True, random_state=42) clf_cw.fit(X, y) clf_sw = LogisticRegression( solver="liblinear", fit_intercept=False, penalty="l2", dual=True, random_state=42) clf_sw.fit(X, y, sample_weight) assert_array_almost_equal(clf_cw.coef_, clf_sw.coef_, decimal=4) def _compute_class_weight_dictionary(y): # helper for returning a dictionary instead of an array classes = np.unique(y) class_weight = compute_class_weight("balanced", classes, y) class_weight_dict = dict(zip(classes, class_weight)) return class_weight_dict def test_logistic_regression_class_weights(): # Multinomial case: remove 90% of class 0 X = iris.data[45:, :] y = iris.target[45:] solvers = ("lbfgs", "newton-cg") class_weight_dict = _compute_class_weight_dictionary(y) for solver in solvers: clf1 = LogisticRegression(solver=solver, multi_class="multinomial", class_weight="balanced") clf2 = LogisticRegression(solver=solver, multi_class="multinomial", class_weight=class_weight_dict) clf1.fit(X, y) clf2.fit(X, y) assert_array_almost_equal(clf1.coef_, clf2.coef_, decimal=4) # Binary case: remove 90% of class 0 and 100% of class 2 X = iris.data[45:100, :] y = iris.target[45:100] solvers = ("lbfgs", "newton-cg", "liblinear") class_weight_dict = _compute_class_weight_dictionary(y) for solver in solvers: clf1 = LogisticRegression(solver=solver, multi_class="ovr", class_weight="balanced") clf2 = LogisticRegression(solver=solver, multi_class="ovr", class_weight=class_weight_dict) clf1.fit(X, y) clf2.fit(X, y) assert_array_almost_equal(clf1.coef_, clf2.coef_, decimal=6) def test_logistic_regression_convergence_warnings(): # Test that warnings are raised if model does not converge X, y = make_classification(n_samples=20, n_features=20, random_state=0) clf_lib = LogisticRegression(solver='liblinear', max_iter=2, verbose=1) assert_warns(ConvergenceWarning, clf_lib.fit, X, y) assert_equal(clf_lib.n_iter_, 2) def test_logistic_regression_multinomial(): # Tests for the multinomial option in logistic regression # Some basic attributes of Logistic Regression n_samples, n_features, n_classes = 50, 20, 3 X, y = make_classification(n_samples=n_samples, n_features=n_features, n_informative=10, n_classes=n_classes, random_state=0) # 'lbfgs' is used as a referenced solver = 'lbfgs' ref_i = LogisticRegression(solver=solver, multi_class='multinomial') ref_w = LogisticRegression(solver=solver, multi_class='multinomial', fit_intercept=False) ref_i.fit(X, y) ref_w.fit(X, y) assert_array_equal(ref_i.coef_.shape, (n_classes, n_features)) assert_array_equal(ref_w.coef_.shape, (n_classes, n_features)) for solver in ['sag', 'newton-cg']: clf_i = LogisticRegression(solver=solver, multi_class='multinomial', random_state=42, max_iter=1000, tol=1e-6) clf_w = LogisticRegression(solver=solver, multi_class='multinomial', random_state=42, max_iter=1000, tol=1e-6, fit_intercept=False) clf_i.fit(X, y) clf_w.fit(X, y) assert_array_equal(clf_i.coef_.shape, (n_classes, n_features)) assert_array_equal(clf_w.coef_.shape, (n_classes, n_features)) # Compare solutions between lbfgs and the other solvers assert_almost_equal(ref_i.coef_, clf_i.coef_, decimal=3) assert_almost_equal(ref_w.coef_, clf_w.coef_, decimal=3) assert_almost_equal(ref_i.intercept_, clf_i.intercept_, decimal=3) # Test that the path give almost the same results. However since in this # case we take the average of the coefs after fitting across all the # folds, it need not be exactly the same. for solver in ['lbfgs', 'newton-cg', 'sag']: clf_path = LogisticRegressionCV(solver=solver, max_iter=2000, tol=1e-6, multi_class='multinomial', Cs=[1.]) clf_path.fit(X, y) assert_array_almost_equal(clf_path.coef_, ref_i.coef_, decimal=3) assert_almost_equal(clf_path.intercept_, ref_i.intercept_, decimal=3) def test_multinomial_grad_hess(): rng = np.random.RandomState(0) n_samples, n_features, n_classes = 100, 5, 3 X = rng.randn(n_samples, n_features) w = rng.rand(n_classes, n_features) Y = np.zeros((n_samples, n_classes)) ind = np.argmax(np.dot(X, w.T), axis=1) Y[range(0, n_samples), ind] = 1 w = w.ravel() sample_weights = np.ones(X.shape[0]) grad, hessp = _multinomial_grad_hess(w, X, Y, alpha=1., sample_weight=sample_weights) # extract first column of hessian matrix vec = np.zeros(n_features * n_classes) vec[0] = 1 hess_col = hessp(vec) # Estimate hessian using least squares as done in # test_logistic_grad_hess e = 1e-3 d_x = np.linspace(-e, e, 30) d_grad = np.array([ _multinomial_grad_hess(w + t * vec, X, Y, alpha=1., sample_weight=sample_weights)[0] for t in d_x ]) d_grad -= d_grad.mean(axis=0) approx_hess_col = linalg.lstsq(d_x[:, np.newaxis], d_grad)[0].ravel() assert_array_almost_equal(hess_col, approx_hess_col) def test_liblinear_decision_function_zero(): # Test negative prediction when decision_function values are zero. # Liblinear predicts the positive class when decision_function values # are zero. This is a test to verify that we do not do the same. # See Issue: https://github.com/scikit-learn/scikit-learn/issues/3600 # and the PR https://github.com/scikit-learn/scikit-learn/pull/3623 X, y = make_classification(n_samples=5, n_features=5, random_state=0) clf = LogisticRegression(fit_intercept=False) clf.fit(X, y) # Dummy data such that the decision function becomes zero. X = np.zeros((5, 5)) assert_array_equal(clf.predict(X), np.zeros(5)) def test_liblinear_logregcv_sparse(): # Test LogRegCV with solver='liblinear' works for sparse matrices X, y = make_classification(n_samples=10, n_features=5, random_state=0) clf = LogisticRegressionCV(solver='liblinear') clf.fit(sparse.csr_matrix(X), y) def test_logreg_intercept_scaling(): # Test that the right error message is thrown when intercept_scaling <= 0 for i in [-1, 0]: clf = LogisticRegression(intercept_scaling=i) msg = ('Intercept scaling is %r but needs to be greater than 0.' ' To disable fitting an intercept,' ' set fit_intercept=False.' % clf.intercept_scaling) assert_raise_message(ValueError, msg, clf.fit, X, Y1) def test_logreg_intercept_scaling_zero(): # Test that intercept_scaling is ignored when fit_intercept is False clf = LogisticRegression(fit_intercept=False) clf.fit(X, Y1) assert_equal(clf.intercept_, 0.) def test_logreg_cv_penalty(): # Test that the correct penalty is passed to the final fit. X, y = make_classification(n_samples=50, n_features=20, random_state=0) lr_cv = LogisticRegressionCV(penalty="l1", Cs=[1.0], solver='liblinear') lr_cv.fit(X, y) lr = LogisticRegression(penalty="l1", C=1.0, solver='liblinear') lr.fit(X, y) assert_equal(np.count_nonzero(lr_cv.coef_), np.count_nonzero(lr.coef_)) def test_logreg_predict_proba_multinomial(): X, y = make_classification(n_samples=10, n_features=20, random_state=0, n_classes=3, n_informative=10) # Predicted probabilites using the true-entropy loss should give a # smaller loss than those using the ovr method. clf_multi = LogisticRegression(multi_class="multinomial", solver="lbfgs") clf_multi.fit(X, y) clf_multi_loss = log_loss(y, clf_multi.predict_proba(X)) clf_ovr = LogisticRegression(multi_class="ovr", solver="lbfgs") clf_ovr.fit(X, y) clf_ovr_loss = log_loss(y, clf_ovr.predict_proba(X)) assert_greater(clf_ovr_loss, clf_multi_loss) # Predicted probabilites using the soft-max function should give a # smaller loss than those using the logistic function. clf_multi_loss = log_loss(y, clf_multi.predict_proba(X)) clf_wrong_loss = log_loss(y, clf_multi._predict_proba_lr(X)) assert_greater(clf_wrong_loss, clf_multi_loss) @ignore_warnings def test_max_iter(): # Test that the maximum number of iteration is reached X, y_bin = iris.data, iris.target.copy() y_bin[y_bin == 2] = 0 solvers = ['newton-cg', 'liblinear', 'sag'] # old scipy doesn't have maxiter if sp_version >= (0, 12): solvers.append('lbfgs') for max_iter in range(1, 5): for solver in solvers: for multi_class in ['ovr', 'multinomial']: if solver == 'liblinear' and multi_class == 'multinomial': continue lr = LogisticRegression(max_iter=max_iter, tol=1e-15, multi_class=multi_class, random_state=0, solver=solver) lr.fit(X, y_bin) assert_equal(lr.n_iter_[0], max_iter) def test_n_iter(): # Test that self.n_iter_ has the correct format. X, y = iris.data, iris.target y_bin = y.copy() y_bin[y_bin == 2] = 0 n_Cs = 4 n_cv_fold = 2 for solver in ['newton-cg', 'liblinear', 'sag', 'lbfgs']: # OvR case n_classes = 1 if solver == 'liblinear' else np.unique(y).shape[0] clf = LogisticRegression(tol=1e-2, multi_class='ovr', solver=solver, C=1., random_state=42, max_iter=100) clf.fit(X, y) assert_equal(clf.n_iter_.shape, (n_classes,)) n_classes = np.unique(y).shape[0] clf = LogisticRegressionCV(tol=1e-2, multi_class='ovr', solver=solver, Cs=n_Cs, cv=n_cv_fold, random_state=42, max_iter=100) clf.fit(X, y) assert_equal(clf.n_iter_.shape, (n_classes, n_cv_fold, n_Cs)) clf.fit(X, y_bin) assert_equal(clf.n_iter_.shape, (1, n_cv_fold, n_Cs)) # multinomial case n_classes = 1 if solver in ('liblinear', 'sag'): break clf = LogisticRegression(tol=1e-2, multi_class='multinomial', solver=solver, C=1., random_state=42, max_iter=100) clf.fit(X, y) assert_equal(clf.n_iter_.shape, (n_classes,)) clf = LogisticRegressionCV(tol=1e-2, multi_class='multinomial', solver=solver, Cs=n_Cs, cv=n_cv_fold, random_state=42, max_iter=100) clf.fit(X, y) assert_equal(clf.n_iter_.shape, (n_classes, n_cv_fold, n_Cs)) clf.fit(X, y_bin) assert_equal(clf.n_iter_.shape, (1, n_cv_fold, n_Cs)) def test_warm_start(): # A 1-iteration second fit on same data should give almost same result # with warm starting, and quite different result without warm starting. # Warm starting does not work with liblinear solver. X, y = iris.data, iris.target solvers = ['newton-cg', 'sag'] # old scipy doesn't have maxiter if sp_version >= (0, 12): solvers.append('lbfgs') for warm_start in [True, False]: for fit_intercept in [True, False]: for solver in solvers: for multi_class in ['ovr', 'multinomial']: clf = LogisticRegression(tol=1e-4, multi_class=multi_class, warm_start=warm_start, solver=solver, random_state=42, max_iter=100, fit_intercept=fit_intercept) with ignore_warnings(category=ConvergenceWarning): clf.fit(X, y) coef_1 = clf.coef_ clf.max_iter = 1 clf.fit(X, y) cum_diff = np.sum(np.abs(coef_1 - clf.coef_)) msg = ("Warm starting issue with %s solver in %s mode " "with fit_intercept=%s and warm_start=%s" % (solver, multi_class, str(fit_intercept), str(warm_start))) if warm_start: assert_greater(2.0, cum_diff, msg) else: assert_greater(cum_diff, 2.0, msg)
bsd-3-clause
f3r/scikit-learn
examples/gaussian_process/plot_gpr_prior_posterior.py
104
2878
""" ========================================================================== Illustration of prior and posterior Gaussian process for different kernels ========================================================================== This example illustrates the prior and posterior of a GPR with different kernels. Mean, standard deviation, and 10 samples are shown for both prior and posterior. """ print(__doc__) # Authors: Jan Hendrik Metzen <[email protected]> # # License: BSD 3 clause import numpy as np from matplotlib import pyplot as plt from sklearn.gaussian_process import GaussianProcessRegressor from sklearn.gaussian_process.kernels import (RBF, Matern, RationalQuadratic, ExpSineSquared, DotProduct, ConstantKernel) kernels = [1.0 * RBF(length_scale=1.0, length_scale_bounds=(1e-1, 10.0)), 1.0 * RationalQuadratic(length_scale=1.0, alpha=0.1), 1.0 * ExpSineSquared(length_scale=1.0, periodicity=3.0, length_scale_bounds=(0.1, 10.0), periodicity_bounds=(1.0, 10.0)), ConstantKernel(0.1, (0.01, 10.0)) * (DotProduct(sigma_0=1.0, sigma_0_bounds=(0.0, 10.0)) ** 2), 1.0 * Matern(length_scale=1.0, length_scale_bounds=(1e-1, 10.0), nu=1.5)] for fig_index, kernel in enumerate(kernels): # Specify Gaussian Process gp = GaussianProcessRegressor(kernel=kernel) # Plot prior plt.figure(fig_index, figsize=(8, 8)) plt.subplot(2, 1, 1) X_ = np.linspace(0, 5, 100) y_mean, y_std = gp.predict(X_[:, np.newaxis], return_std=True) plt.plot(X_, y_mean, 'k', lw=3, zorder=9) plt.fill_between(X_, y_mean - y_std, y_mean + y_std, alpha=0.5, color='k') y_samples = gp.sample_y(X_[:, np.newaxis], 10) plt.plot(X_, y_samples, lw=1) plt.xlim(0, 5) plt.ylim(-3, 3) plt.title("Prior (kernel: %s)" % kernel, fontsize=12) # Generate data and fit GP rng = np.random.RandomState(4) X = rng.uniform(0, 5, 10)[:, np.newaxis] y = np.sin((X[:, 0] - 2.5) ** 2) gp.fit(X, y) # Plot posterior plt.subplot(2, 1, 2) X_ = np.linspace(0, 5, 100) y_mean, y_std = gp.predict(X_[:, np.newaxis], return_std=True) plt.plot(X_, y_mean, 'k', lw=3, zorder=9) plt.fill_between(X_, y_mean - y_std, y_mean + y_std, alpha=0.5, color='k') y_samples = gp.sample_y(X_[:, np.newaxis], 10) plt.plot(X_, y_samples, lw=1) plt.scatter(X[:, 0], y, c='r', s=50, zorder=10) plt.xlim(0, 5) plt.ylim(-3, 3) plt.title("Posterior (kernel: %s)\n Log-Likelihood: %.3f" % (gp.kernel_, gp.log_marginal_likelihood(gp.kernel_.theta)), fontsize=12) plt.tight_layout() plt.show()
bsd-3-clause
anntzer/scikit-learn
examples/linear_model/plot_nnls.py
15
2019
""" ========================== Non-negative least squares ========================== In this example, we fit a linear model with positive constraints on the regression coefficients and compare the estimated coefficients to a classic linear regression. """ print(__doc__) import numpy as np import matplotlib.pyplot as plt from sklearn.metrics import r2_score # %% # Generate some random data np.random.seed(42) n_samples, n_features = 200, 50 X = np.random.randn(n_samples, n_features) true_coef = 3 * np.random.randn(n_features) # Threshold coefficients to render them non-negative true_coef[true_coef < 0] = 0 y = np.dot(X, true_coef) # Add some noise y += 5 * np.random.normal(size=(n_samples, )) # %% # Split the data in train set and test set from sklearn.model_selection import train_test_split X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.5) # %% # Fit the Non-Negative least squares. from sklearn.linear_model import LinearRegression reg_nnls = LinearRegression(positive=True) y_pred_nnls = reg_nnls.fit(X_train, y_train).predict(X_test) r2_score_nnls = r2_score(y_test, y_pred_nnls) print("NNLS R2 score", r2_score_nnls) # %% # Fit an OLS. reg_ols = LinearRegression() y_pred_ols = reg_ols.fit(X_train, y_train).predict(X_test) r2_score_ols = r2_score(y_test, y_pred_ols) print("OLS R2 score", r2_score_ols) # %% # Comparing the regression coefficients between OLS and NNLS, we can observe # they are highly correlated (the dashed line is the identity relation), # but the non-negative constraint shrinks some to 0. # The Non-Negative Least squares inherently yield sparse results. fig, ax = plt.subplots() ax.plot(reg_ols.coef_, reg_nnls.coef_, linewidth=0, marker=".") low_x, high_x = ax.get_xlim() low_y, high_y = ax.get_ylim() low = max(low_x, low_y) high = min(high_x, high_y) ax.plot([low, high], [low, high], ls="--", c=".3", alpha=.5) ax.set_xlabel("OLS regression coefficients", fontweight="bold") ax.set_ylabel("NNLS regression coefficients", fontweight="bold")
bsd-3-clause
ambikeshwar1991/sandhi-2
module/gr36/gr-filter/examples/chirp_channelize.py
13
6948
#!/usr/bin/env python # # Copyright 2009,2012 Free Software Foundation, Inc. # # This file is part of GNU Radio # # GNU Radio is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # GNU Radio is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Radio; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # from gnuradio import gr, blks2 from gnuradio import filter import sys, time try: import scipy from scipy import fftpack except ImportError: print "Error: Program requires scipy (see: www.scipy.org)." sys.exit(1) try: import pylab from pylab import mlab except ImportError: print "Error: Program requires matplotlib (see: matplotlib.sourceforge.net)." sys.exit(1) class pfb_top_block(gr.top_block): def __init__(self): gr.top_block.__init__(self) self._N = 200000 # number of samples to use self._fs = 9000 # initial sampling rate self._M = 9 # Number of channels to channelize # Create a set of taps for the PFB channelizer self._taps = filter.firdes.low_pass_2(1, self._fs, 500, 20, attenuation_dB=10, window=filter.firdes.WIN_BLACKMAN_hARRIS) # Calculate the number of taps per channel for our own information tpc = scipy.ceil(float(len(self._taps)) / float(self._M)) print "Number of taps: ", len(self._taps) print "Number of channels: ", self._M print "Taps per channel: ", tpc repeated = True if(repeated): self.vco_input = gr.sig_source_f(self._fs, gr.GR_SIN_WAVE, 0.25, 110) else: amp = 100 data = scipy.arange(0, amp, amp/float(self._N)) self.vco_input = gr.vector_source_f(data, False) # Build a VCO controlled by either the sinusoid or single chirp tone # Then convert this to a complex signal self.vco = gr.vco_f(self._fs, 225, 1) self.f2c = gr.float_to_complex() self.head = gr.head(gr.sizeof_gr_complex, self._N) # Construct the channelizer filter self.pfb = filter.pfb.channelizer_ccf(self._M, self._taps) # Construct a vector sink for the input signal to the channelizer self.snk_i = gr.vector_sink_c() # Connect the blocks self.connect(self.vco_input, self.vco, self.f2c) self.connect(self.f2c, self.head, self.pfb) self.connect(self.f2c, self.snk_i) # Create a vector sink for each of M output channels of the filter and connect it self.snks = list() for i in xrange(self._M): self.snks.append(gr.vector_sink_c()) self.connect((self.pfb, i), self.snks[i]) def main(): tstart = time.time() tb = pfb_top_block() tb.run() tend = time.time() print "Run time: %f" % (tend - tstart) if 1: fig_in = pylab.figure(1, figsize=(16,9), facecolor="w") fig1 = pylab.figure(2, figsize=(16,9), facecolor="w") fig2 = pylab.figure(3, figsize=(16,9), facecolor="w") fig3 = pylab.figure(4, figsize=(16,9), facecolor="w") Ns = 650 Ne = 20000 fftlen = 8192 winfunc = scipy.blackman fs = tb._fs # Plot the input signal on its own figure d = tb.snk_i.data()[Ns:Ne] spin_f = fig_in.add_subplot(2, 1, 1) X,freq = mlab.psd(d, NFFT=fftlen, noverlap=fftlen/4, Fs=fs, window = lambda d: d*winfunc(fftlen), scale_by_freq=True) X_in = 10.0*scipy.log10(abs(fftpack.fftshift(X))) f_in = scipy.arange(-fs/2.0, fs/2.0, fs/float(X_in.size)) pin_f = spin_f.plot(f_in, X_in, "b") spin_f.set_xlim([min(f_in), max(f_in)+1]) spin_f.set_ylim([-200.0, 50.0]) spin_f.set_title("Input Signal", weight="bold") spin_f.set_xlabel("Frequency (Hz)") spin_f.set_ylabel("Power (dBW)") Ts = 1.0/fs Tmax = len(d)*Ts t_in = scipy.arange(0, Tmax, Ts) x_in = scipy.array(d) spin_t = fig_in.add_subplot(2, 1, 2) pin_t = spin_t.plot(t_in, x_in.real, "b") pin_t = spin_t.plot(t_in, x_in.imag, "r") spin_t.set_xlabel("Time (s)") spin_t.set_ylabel("Amplitude") Ncols = int(scipy.floor(scipy.sqrt(tb._M))) Nrows = int(scipy.floor(tb._M / Ncols)) if(tb._M % Ncols != 0): Nrows += 1 # Plot each of the channels outputs. Frequencies on Figure 2 and # time signals on Figure 3 fs_o = tb._fs / tb._M Ts_o = 1.0/fs_o Tmax_o = len(d)*Ts_o for i in xrange(len(tb.snks)): # remove issues with the transients at the beginning # also remove some corruption at the end of the stream # this is a bug, probably due to the corner cases d = tb.snks[i].data()[Ns:Ne] sp1_f = fig1.add_subplot(Nrows, Ncols, 1+i) X,freq = mlab.psd(d, NFFT=fftlen, noverlap=fftlen/4, Fs=fs_o, window = lambda d: d*winfunc(fftlen), scale_by_freq=True) X_o = 10.0*scipy.log10(abs(X)) f_o = freq p2_f = sp1_f.plot(f_o, X_o, "b") sp1_f.set_xlim([min(f_o), max(f_o)+1]) sp1_f.set_ylim([-200.0, 50.0]) sp1_f.set_title(("Channel %d" % i), weight="bold") sp1_f.set_xlabel("Frequency (Hz)") sp1_f.set_ylabel("Power (dBW)") x_o = scipy.array(d) t_o = scipy.arange(0, Tmax_o, Ts_o) sp2_o = fig2.add_subplot(Nrows, Ncols, 1+i) p2_o = sp2_o.plot(t_o, x_o.real, "b") p2_o = sp2_o.plot(t_o, x_o.imag, "r") sp2_o.set_xlim([min(t_o), max(t_o)+1]) sp2_o.set_ylim([-2, 2]) sp2_o.set_title(("Channel %d" % i), weight="bold") sp2_o.set_xlabel("Time (s)") sp2_o.set_ylabel("Amplitude") sp3 = fig3.add_subplot(1,1,1) p3 = sp3.plot(t_o, x_o.real) sp3.set_xlim([min(t_o), max(t_o)+1]) sp3.set_ylim([-2, 2]) sp3.set_title("All Channels") sp3.set_xlabel("Time (s)") sp3.set_ylabel("Amplitude") pylab.show() if __name__ == "__main__": try: main() except KeyboardInterrupt: pass
gpl-3.0
russel1237/scikit-learn
sklearn/svm/tests/test_bounds.py
280
2541
import nose from nose.tools import assert_equal, assert_true from sklearn.utils.testing import clean_warning_registry import warnings import numpy as np from scipy import sparse as sp from sklearn.svm.bounds import l1_min_c from sklearn.svm import LinearSVC from sklearn.linear_model.logistic import LogisticRegression dense_X = [[-1, 0], [0, 1], [1, 1], [1, 1]] sparse_X = sp.csr_matrix(dense_X) Y1 = [0, 1, 1, 1] Y2 = [2, 1, 0, 0] def test_l1_min_c(): losses = ['squared_hinge', 'log'] Xs = {'sparse': sparse_X, 'dense': dense_X} Ys = {'two-classes': Y1, 'multi-class': Y2} intercepts = {'no-intercept': {'fit_intercept': False}, 'fit-intercept': {'fit_intercept': True, 'intercept_scaling': 10}} for loss in losses: for X_label, X in Xs.items(): for Y_label, Y in Ys.items(): for intercept_label, intercept_params in intercepts.items(): check = lambda: check_l1_min_c(X, Y, loss, **intercept_params) check.description = ('Test l1_min_c loss=%r %s %s %s' % (loss, X_label, Y_label, intercept_label)) yield check def test_l2_deprecation(): clean_warning_registry() with warnings.catch_warnings(record=True) as w: assert_equal(l1_min_c(dense_X, Y1, "l2"), l1_min_c(dense_X, Y1, "squared_hinge")) assert_equal(w[0].category, DeprecationWarning) def check_l1_min_c(X, y, loss, fit_intercept=True, intercept_scaling=None): min_c = l1_min_c(X, y, loss, fit_intercept, intercept_scaling) clf = { 'log': LogisticRegression(penalty='l1'), 'squared_hinge': LinearSVC(loss='squared_hinge', penalty='l1', dual=False), }[loss] clf.fit_intercept = fit_intercept clf.intercept_scaling = intercept_scaling clf.C = min_c clf.fit(X, y) assert_true((np.asarray(clf.coef_) == 0).all()) assert_true((np.asarray(clf.intercept_) == 0).all()) clf.C = min_c * 1.01 clf.fit(X, y) assert_true((np.asarray(clf.coef_) != 0).any() or (np.asarray(clf.intercept_) != 0).any()) @nose.tools.raises(ValueError) def test_ill_posed_min_c(): X = [[0, 0], [0, 0]] y = [0, 1] l1_min_c(X, y) @nose.tools.raises(ValueError) def test_unsupported_loss(): l1_min_c(dense_X, Y1, 'l1')
bsd-3-clause
nmayorov/scikit-learn
examples/classification/plot_digits_classification.py
289
2397
""" ================================ Recognizing hand-written digits ================================ An example showing how the scikit-learn can be used to recognize images of hand-written digits. This example is commented in the :ref:`tutorial section of the user manual <introduction>`. """ print(__doc__) # Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org> # License: BSD 3 clause # Standard scientific Python imports import matplotlib.pyplot as plt # Import datasets, classifiers and performance metrics from sklearn import datasets, svm, metrics # The digits dataset digits = datasets.load_digits() # The data that we are interested in is made of 8x8 images of digits, let's # have a look at the first 3 images, stored in the `images` attribute of the # dataset. If we were working from image files, we could load them using # pylab.imread. Note that each image must have the same size. For these # images, we know which digit they represent: it is given in the 'target' of # the dataset. images_and_labels = list(zip(digits.images, digits.target)) for index, (image, label) in enumerate(images_and_labels[:4]): plt.subplot(2, 4, index + 1) plt.axis('off') plt.imshow(image, cmap=plt.cm.gray_r, interpolation='nearest') plt.title('Training: %i' % label) # To apply a classifier on this data, we need to flatten the image, to # turn the data in a (samples, feature) matrix: n_samples = len(digits.images) data = digits.images.reshape((n_samples, -1)) # Create a classifier: a support vector classifier classifier = svm.SVC(gamma=0.001) # We learn the digits on the first half of the digits classifier.fit(data[:n_samples / 2], digits.target[:n_samples / 2]) # Now predict the value of the digit on the second half: expected = digits.target[n_samples / 2:] predicted = classifier.predict(data[n_samples / 2:]) print("Classification report for classifier %s:\n%s\n" % (classifier, metrics.classification_report(expected, predicted))) print("Confusion matrix:\n%s" % metrics.confusion_matrix(expected, predicted)) images_and_predictions = list(zip(digits.images[n_samples / 2:], predicted)) for index, (image, prediction) in enumerate(images_and_predictions[:4]): plt.subplot(2, 4, index + 5) plt.axis('off') plt.imshow(image, cmap=plt.cm.gray_r, interpolation='nearest') plt.title('Prediction: %i' % prediction) plt.show()
bsd-3-clause
Eric89GXL/scikit-learn
sklearn/utils/sparsetools/tests/test_spanning_tree.py
11
2295
"""Test the minimum spanning tree function""" from __future__ import division, print_function, absolute_import import numpy as np from numpy.testing import assert_ import numpy.testing as npt from scipy.sparse import csr_matrix from sklearn.utils import minimum_spanning_tree def test_minimum_spanning_tree(): # Create a graph with two connected components. graph = [[0, 1, 0, 0, 0], [1, 0, 0, 0, 0], [0, 0, 0, 8, 5], [0, 0, 8, 0, 1], [0, 0, 5, 1, 0]] graph = np.asarray(graph) # Create the expected spanning tree. expected = [[0, 1, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 5], [0, 0, 0, 0, 1], [0, 0, 0, 0, 0]] expected = np.asarray(expected) # Ensure minimum spanning tree code gives this expected output. csgraph = csr_matrix(graph) mintree = minimum_spanning_tree(csgraph) npt.assert_array_equal(mintree.todense(), expected, 'Incorrect spanning tree found.') # Ensure that the original graph was not modified. npt.assert_array_equal(csgraph.todense(), graph, 'Original graph was modified.') # Now let the algorithm modify the csgraph in place. mintree = minimum_spanning_tree(csgraph, overwrite=True) npt.assert_array_equal(mintree.todense(), expected, 'Graph was not properly modified to contain MST.') np.random.seed(1234) for N in (5, 10, 15, 20): # Create a random graph. graph = 3 + np.random.random((N, N)) csgraph = csr_matrix(graph) # The spanning tree has at most N - 1 edges. mintree = minimum_spanning_tree(csgraph) assert_(mintree.nnz < N) # Set the sub diagonal to 1 to create a known spanning tree. idx = np.arange(N - 1) graph[idx, idx + 1] = 1 csgraph = csr_matrix(graph) mintree = minimum_spanning_tree(csgraph) # We expect to see this pattern in the spanning tree and otherwise # have this zero. expected = np.zeros((N, N)) expected[idx, idx + 1] = 1 npt.assert_array_equal(mintree.todense(), expected, 'Incorrect spanning tree found.')
bsd-3-clause
vascotenner/holoviews
holoviews/plotting/bokeh/util.py
1
11041
from distutils.version import LooseVersion from collections import defaultdict import numpy as np try: from matplotlib import colors import matplotlib.cm as cm except ImportError: cm, colors = None, None import bokeh bokeh_version = LooseVersion(bokeh.__version__) from bokeh.core.enums import Palette from bokeh.document import Document from bokeh.models.plots import Plot from bokeh.models import GlyphRenderer from bokeh.models.widgets import DataTable, Tabs from bokeh.plotting import Figure if bokeh_version >= '0.12': from bokeh.layouts import WidgetBox from ...core.options import abbreviated_exception # Conversion between matplotlib and bokeh markers markers = {'s': {'marker': 'square'}, 'd': {'marker': 'diamond'}, '^': {'marker': 'triangle', 'orientation': 0}, '>': {'marker': 'triangle', 'orientation': np.pi/2}, 'v': {'marker': 'triangle', 'orientation': np.pi}, '<': {'marker': 'triangle', 'orientation': -np.pi/2}, '1': {'marker': 'triangle', 'orientation': 0}, '2': {'marker': 'triangle', 'orientation': np.pi/2}, '3': {'marker': 'triangle', 'orientation': np.pi}, '4': {'marker': 'triangle', 'orientation': -np.pi/2}} # List of models that do not update correctly and must be ignored # Should only include models that have no direct effect on the display # and can therefore be safely ignored. Axes currently fail saying # LinearAxis.computed_bounds cannot be updated IGNORED_MODELS = ['LinearAxis', 'LogAxis'] # Where to look for the ignored models LOCATIONS = ['new', 'below'] # Model priority order to ensure some types are updated before others MODEL_PRIORITY = ['Range1d', 'Title', 'Image', 'LinearColorMapper', 'Plot', 'Range1d', 'LinearAxis', 'ColumnDataSource'] def rgb2hex(rgb): """ Convert RGB(A) tuple to hex. """ if len(rgb) > 3: rgb = rgb[:-1] return "#{0:02x}{1:02x}{2:02x}".format(*(int(v*255) for v in rgb)) def mplcmap_to_palette(cmap): """ Converts a matplotlib colormap to palette of RGB hex strings." """ if colors is None: raise ValueError("Using cmaps on objects requires matplotlib.") with abbreviated_exception(): colormap = cm.get_cmap(cmap) #choose any matplotlib colormap here return [rgb2hex(m) for m in colormap(np.arange(colormap.N))] def get_cmap(cmap): """ Returns matplotlib cmap generated from bokeh palette or directly accessed from matplotlib. """ with abbreviated_exception(): rgb_vals = getattr(Palette, cmap, None) if rgb_vals: return colors.ListedColormap(rgb_vals, name=cmap) return cm.get_cmap(cmap) def mpl_to_bokeh(properties): """ Utility to process style properties converting any matplotlib specific options to their nearest bokeh equivalent. """ new_properties = {} for k, v in properties.items(): if k == 's': new_properties['size'] = v elif k == 'marker': new_properties.update(markers.get(v, {'marker': v})) elif k == 'color' or k.endswith('_color'): with abbreviated_exception(): v = colors.ColorConverter.colors.get(v, v) if isinstance(v, tuple): with abbreviated_exception(): v = rgb2hex(v) new_properties[k] = v else: new_properties[k] = v new_properties.pop('cmap', None) return new_properties def layout_padding(plots): """ Temporary workaround to allow empty plots in a row of a bokeh GridPlot type. Should be removed when https://github.com/bokeh/bokeh/issues/2891 is resolved. """ widths, heights = defaultdict(int), defaultdict(int) for r, row in enumerate(plots): for c, p in enumerate(row): if p is not None: width = p.plot_width if isinstance(p, Plot) else p.width height = p.plot_height if isinstance(p, Plot) else p.height widths[c] = max(widths[c], width) heights[r] = max(heights[r], height) expanded_plots = [] for r, row in enumerate(plots): expanded_plots.append([]) for c, p in enumerate(row): if p is None: p = Figure(plot_width=widths[c], plot_height=heights[r]) p.text(x=0, y=0, text=[' ']) p.xaxis.visible = False p.yaxis.visible = False p.outline_line_color = None p.xgrid.grid_line_color = None p.ygrid.grid_line_color = None expanded_plots[r].append(p) return expanded_plots def convert_datetime(time): return time.astype('datetime64[s]').astype(float)*1000 def models_to_json(models): """ Convert list of bokeh models into json to update plot(s). """ json_data, ids = [], [] for plotobj in models: if plotobj.ref['id'] in ids: continue else: ids.append(plotobj.ref['id']) json = plotobj.to_json(False) json.pop('tool_events', None) json.pop('renderers', None) json_data.append({'id': plotobj.ref['id'], 'type': plotobj.ref['type'], 'data': json}) return json_data def refs(json): """ Finds all the references to other objects in the json representation of a bokeh Document. """ result = {} for obj in json['roots']['references']: result[obj['id']] = obj return result def compute_static_patch(document, models, json=None): """ Computes a patch to update an existing document without diffing the json first, making it suitable for static updates between arbitrary frames. Note that this only supports changed attributes and will break if new models have been added since the plot was first created. """ references = refs(json if json else document.to_json()) requested_updates = [m.ref['id'] for m in models] value_refs = {} events = [] update_types = defaultdict(list) for ref_id, obj in references.items(): if ref_id not in requested_updates: continue if obj['type'] in MODEL_PRIORITY: priority = MODEL_PRIORITY.index(obj['type']) else: priority = float('inf') for key, val in obj['attributes'].items(): event = Document._event_for_attribute_change(references, obj, key, val, value_refs) events.append((priority, event)) update_types[obj['type']].append(key) events = [delete_refs(e, LOCATIONS, IGNORED_MODELS) for _, e in sorted(events, key=lambda x: x[0])] value_refs = {ref_id: val for ref_id, val in value_refs.items()} value_refs = delete_refs(value_refs, LOCATIONS, IGNORED_MODELS) return dict(events=events, references=list(value_refs.values())) def delete_refs(obj, locs, delete): """ Delete all references to specific model types by recursively traversing the object and looking for the models to be deleted in the supplied locations. Note: Can be deleted once bokeh stops raising errors when updating LinearAxis.computed_bounds """ if isinstance(obj, dict): if 'type' in obj and obj['type'] in delete: return None for k, v in obj.items(): if k in locs: ref = delete_refs(v, locs, delete) if ref: obj[k] = ref else: del obj[k] else: obj[k] = v return obj elif isinstance(obj, list): objs = [delete_refs(v, locs, delete) for v in obj] return [o for o in objs if o is not None] else: return obj def hsv_to_rgb(hsv): """ Vectorized HSV to RGB conversion, adapted from: http://stackoverflow.com/questions/24852345/hsv-to-rgb-color-conversion """ h, s, v = (hsv[..., i] for i in range(3)) shape = h.shape i = np.int_(h*6.) f = h*6.-i q = f t = 1.-f i = np.ravel(i) f = np.ravel(f) i%=6 t = np.ravel(t) q = np.ravel(q) s = np.ravel(s) v = np.ravel(v) clist = (1-s*np.vstack([np.zeros_like(f),np.ones_like(f),q,t]))*v #0:v 1:p 2:q 3:t order = np.array([[0,3,1],[2,0,1],[1,0,3],[1,2,0],[3,1,0],[0,1,2]]) rgb = clist[order[i], np.arange(np.prod(shape))[:,None]] return rgb.reshape(shape+(3,)) def update_plot(old, new): """ Updates an existing plot or figure with a new plot, useful for bokeh charts and mpl conversions, which do not allow updating an existing plot easily. ALERT: Should be replaced once bokeh supports it directly """ old_renderers = old.select(type=GlyphRenderer) new_renderers = new.select(type=GlyphRenderer) old.x_range.update(**new.x_range.properties_with_values()) old.y_range.update(**new.y_range.properties_with_values()) updated = [] for new_r in new_renderers: for old_r in old_renderers: if type(old_r.glyph) == type(new_r.glyph): old_renderers.pop(old_renderers.index(old_r)) new_props = new_r.properties_with_values() source = new_props.pop('data_source') old_r.glyph.update(**new_r.glyph.properties_with_values()) old_r.update(**new_props) old_r.data_source.data.update(source.data) updated.append(old_r) break for old_r in old_renderers: if old_r not in updated: emptied = {k: [] for k in old_r.data_source.data} old_r.data_source.data.update(emptied) def pad_plots(plots, padding=0.85): """ Accepts a grid of bokeh plots in form of a list of lists and wraps any DataTable or Tabs in a WidgetBox with appropriate padding. Required to avoid overlap in gridplot. """ widths = [] for row in plots: row_widths = [] for p in row: if isinstance(p, Tabs): width = np.max([p.width if isinstance(p, DataTable) else t.child.plot_width for t in p.tabs]) for p in p.tabs: p.width = int(padding*width) elif isinstance(p, DataTable): width = p.width p.width = int(padding*width) elif p: width = p.plot_width else: width = 0 row_widths.append(width) widths.append(row_widths) plots = [[WidgetBox(p, width=w) if isinstance(p, (DataTable, Tabs)) else p for p, w in zip(row, ws)] for row, ws in zip(plots, widths)] total_width = np.max([np.sum(row) for row in widths]) return plots, total_width
bsd-3-clause
Djabbz/scikit-learn
examples/mixture/plot_gmm.py
248
2817
""" ================================= Gaussian Mixture Model Ellipsoids ================================= Plot the confidence ellipsoids of a mixture of two Gaussians with EM and variational Dirichlet process. Both models have access to five components with which to fit the data. Note that the EM model will necessarily use all five components while the DP model will effectively only use as many as are needed for a good fit. This is a property of the Dirichlet Process prior. Here we can see that the EM model splits some components arbitrarily, because it is trying to fit too many components, while the Dirichlet Process model adapts it number of state automatically. This example doesn't show it, as we're in a low-dimensional space, but another advantage of the Dirichlet process model is that it can fit full covariance matrices effectively even when there are less examples per cluster than there are dimensions in the data, due to regularization properties of the inference algorithm. """ import itertools import numpy as np from scipy import linalg import matplotlib.pyplot as plt import matplotlib as mpl from sklearn import mixture # Number of samples per component n_samples = 500 # Generate random sample, two components np.random.seed(0) C = np.array([[0., -0.1], [1.7, .4]]) X = np.r_[np.dot(np.random.randn(n_samples, 2), C), .7 * np.random.randn(n_samples, 2) + np.array([-6, 3])] # Fit a mixture of Gaussians with EM using five components gmm = mixture.GMM(n_components=5, covariance_type='full') gmm.fit(X) # Fit a Dirichlet process mixture of Gaussians using five components dpgmm = mixture.DPGMM(n_components=5, covariance_type='full') dpgmm.fit(X) color_iter = itertools.cycle(['r', 'g', 'b', 'c', 'm']) for i, (clf, title) in enumerate([(gmm, 'GMM'), (dpgmm, 'Dirichlet Process GMM')]): splot = plt.subplot(2, 1, 1 + i) Y_ = clf.predict(X) for i, (mean, covar, color) in enumerate(zip( clf.means_, clf._get_covars(), color_iter)): v, w = linalg.eigh(covar) u = w[0] / linalg.norm(w[0]) # as the DP will not use every component it has access to # unless it needs it, we shouldn't plot the redundant # components. if not np.any(Y_ == i): continue plt.scatter(X[Y_ == i, 0], X[Y_ == i, 1], .8, color=color) # Plot an ellipse to show the Gaussian component angle = np.arctan(u[1] / u[0]) angle = 180 * angle / np.pi # convert to degrees ell = mpl.patches.Ellipse(mean, v[0], v[1], 180 + angle, color=color) ell.set_clip_box(splot.bbox) ell.set_alpha(0.5) splot.add_artist(ell) plt.xlim(-10, 10) plt.ylim(-3, 6) plt.xticks(()) plt.yticks(()) plt.title(title) plt.show()
bsd-3-clause
MartinDelzant/scikit-learn
sklearn/metrics/cluster/tests/test_bicluster.py
394
1770
"""Testing for bicluster metrics module""" import numpy as np from sklearn.utils.testing import assert_equal, assert_almost_equal from sklearn.metrics.cluster.bicluster import _jaccard from sklearn.metrics import consensus_score def test_jaccard(): a1 = np.array([True, True, False, False]) a2 = np.array([True, True, True, True]) a3 = np.array([False, True, True, False]) a4 = np.array([False, False, True, True]) assert_equal(_jaccard(a1, a1, a1, a1), 1) assert_equal(_jaccard(a1, a1, a2, a2), 0.25) assert_equal(_jaccard(a1, a1, a3, a3), 1.0 / 7) assert_equal(_jaccard(a1, a1, a4, a4), 0) def test_consensus_score(): a = [[True, True, False, False], [False, False, True, True]] b = a[::-1] assert_equal(consensus_score((a, a), (a, a)), 1) assert_equal(consensus_score((a, a), (b, b)), 1) assert_equal(consensus_score((a, b), (a, b)), 1) assert_equal(consensus_score((a, b), (b, a)), 1) assert_equal(consensus_score((a, a), (b, a)), 0) assert_equal(consensus_score((a, a), (a, b)), 0) assert_equal(consensus_score((b, b), (a, b)), 0) assert_equal(consensus_score((b, b), (b, a)), 0) def test_consensus_score_issue2445(): ''' Different number of biclusters in A and B''' a_rows = np.array([[True, True, False, False], [False, False, True, True], [False, False, False, True]]) a_cols = np.array([[True, True, False, False], [False, False, True, True], [False, False, False, True]]) idx = [0, 2] s = consensus_score((a_rows, a_cols), (a_rows[idx], a_cols[idx])) # B contains 2 of the 3 biclusters in A, so score should be 2/3 assert_almost_equal(s, 2.0/3.0)
bsd-3-clause
timbalam/GroopM
groopm/plot.py
1
51299
#!/usr/bin/env python ############################################################################### # # # plot.py # # # # Data visualisation # # # # Copyright (C) Michael Imelfort, Tim Lamberton # # # ############################################################################### # # # .d8888b. 888b d888 # # d88P Y88b 8888b d8888 # # 888 888 88888b.d88888 # # 888 888d888 .d88b. .d88b. 88888b. 888Y88888P888 # # 888 88888 888P" d88""88b d88""88b 888 "88b 888 Y888P 888 # # 888 888 888 888 888 888 888 888 888 888 Y8P 888 # # Y88b d88P 888 Y88..88P Y88..88P 888 d88P 888 " 888 # # "Y8888P88 888 "Y88P" "Y88P" 88888P" 888 888 # # 888 # # 888 # # 888 # # # ############################################################################### # # # This program is free software: you can redistribute it and/or modify # # it under the terms of the GNU General Public License as published by # # the Free Software Foundation, either version 3 of the License, or # # (at your option) any later version. # # # # This program is distributed in the hope that it will be useful, # # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # # GNU General Public License for more details. # # # # You should have received a copy of the GNU General Public License # # along with this program. If not, see <http://www.gnu.org/licenses/>. # # # ############################################################################### __author__ = "Michael Imelfort, Tim Lamberton" __copyright__ = "Copyright 2012-2015" __credits__ = ["Michael Imelfort", "Tim Lamberton"] __license__ = "GPL3" __maintainer__ = "Tim Lamberton" __email__ = "[email protected]" __status__ = "Development" ############################################################################### import os import sys import colorsys import operator import numpy as np import numpy.linalg as np_linalg import scipy.spatial.distance as sp_distance import scipy.cluster.hierarchy as sp_hierarchy import scipy.stats as sp_stats import matplotlib.pyplot as plt import matplotlib.colors as plt_colors import matplotlib.cm as plt_cm import matplotlib.colorbar as plt_colorbar import matplotlib.lines as plt_lines import matplotlib.markers as plt_markers import matplotlib.patches as plt_patches from mpl_toolkits.mplot3d import axes3d, Axes3D # GroopM imports from utils import makeSurePathExists, split_contiguous, group_iterator from groopmExceptions import BinNotFoundException, invalidParameter from profileManager import ProfileManager from binManager import BinManager import distance from cluster import (ProfileDistanceEngine, StreamingProfileDistanceEngine, FileCacher, MarkerCheckCQE, MarkerCheckFCE ) from classification import BinClassifier import hierarchy from data3 import ClassificationEngine from extract import BinReader np.seterr(all='raise') ############################################################################### ############################################################################### ############################################################################### ############################################################################### class ExplorePlotManager: """Plot and highlight contigs from bins near a contig""" def __init__(self, dbFileName, folder=None): self._pm = ProfileManager(dbFileName) self._dbFileName = dbFileName self._outDir = os.getcwd() if folder == "" else folder # make the dir if need be if self._outDir is not None: makeSurePathExists(self._outDir) def loadProfile(self, timer): return self._pm.loadData(timer, loadMarkers=True, loadBins=True, loadReachability=True) def plot(self, timer, centres=None, centre_type="bin", origin="mediod", colorMap="HSV", prefix="BIN", surface=False, rawDistances=False, groupfile="", separator=",", savedDistsPrefix="", keepDists=False ): profile = self.loadProfile(timer) if centre_type not in ["bin", "group"]: raise invalidParameter("centre_type", centre_type) bm = BinManager(profile) if centre_type=="bin": if centres is None or len(centres)==0: centres = bm.getBids() else: bm.checkBids(centres) group_list = None if groupfile!="": print " Parsing group assignments" group_list = GroupAssignmentParser().parse(groupfile, separator, profile.contigNames) print " %s" % timer.getTimeStamp() if centre_type=="group": if group_list is None: raise ValueError("`group_list` parameter argument cannot be None if `centre_type` argument parameter is `group`.") if centres is None or len(centres)==0: centres = np.setdiff1d(np.unique(group_list), [""]) else: missing_groups = np.in1d(centres, group_list, invert=True) if np.any(missing_groups): print ("WARNING: No contig(s) assigned to group(s) {0}.".format(",".join(groups[missing_groups]))) if savedDistsPrefix=="": savedDistsPrefix = self._dbFileName+".dists" cacher = FileCacher(savedDistsPrefix) print " Initialising plotter" fplot = ContigExplorerPlotter(profile, colourmap=colorMap, cacher=cacher, surface=surface, rawDistances=rawDistances, origin=origin ) print " %s" % timer.getTimeStamp() first_plot = True queue = [] for i in range(len(centres)-1,-1,-1): queue.append(centres[i]) categories = profile.binIds if centre_type=="bin" else group_list validate = lambda x: x in categories while len(queue) > 0: if self._outDir is not None: centre = queue.pop() fileName = os.path.join(self._outDir, "{0}_{1}.png".format(prefix, centre)) else: if not first_plot: current = self.promptOnPlot(queue[-1], centre_type=centre_type, validate=validate) if current is None: break if current!=queue[-1]: queue.append(current) centre = queue.pop() fileName = "" first_plot = False is_central = categories==centre highlight_markers = np.unique(profile.mapping.markerNames[is_central[profile.mapping.rowIndices]]) highlight_groups = [] if group_list is None else np.setdiff1d(np.unique(group_list[is_central]), [""]) highlight_bins = np.setdiff1d(np.unique(profile.binIds[is_central]), [0]) fplot.plot(fileName=fileName, centre=centre, centre_type=centre_type, highlight_bins=highlight_bins, highlight_markers=highlight_markers, highlight_groups=highlight_groups, group_list=group_list) if self._outDir is not None: print " %s" % timer.getTimeStamp() if not keepDists: try: cacher.cleanup() except: raise def promptOnPlot(self, centre, centre_type="bin", validate=lambda _: True, minimal=False): """Check that the user wants to continue interactive plotting""" input_not_ok = True while(input_not_ok): if(minimal): option = raw_input(" Enter {0} id, or q to quit, or enter to continue:".format(centre_type)) else: option = raw_input(""" The next plot is {0} Enter {1} id, or q to quit, or enter to continue:""".format(centre, centre_type)) try: centre = int(option) if validate(centre): print "****************************************************************" return bid else: print("Error, no {1} with id '{0}'".format(centre, centre_type)) minimal=True except ValueError: if option.upper() in ["Q", ""]: print "****************************************************************" if(option.upper() == "Q"): print("Operation cancelled") return None else: return bid else: print("Error, unrecognised choice '{0}'".format(option)) minimal=True class ReachabilityPlotManager: """Plot and highlight contigs from a bin""" def __init__(self, dbFileName, folder=None): self._pm = ProfileManager(dbFileName) self._outDir = os.getcwd() if folder == "" else folder # make the dir if need be if self._outDir is not None: makeSurePathExists(self._outDir) def loadProfile(self, timer): return self._pm.loadData(timer, loadBins=True, loadMarkers=True, loadReachability=True) return profile def plot(self, timer, bids=None, label="tag", filename="REACH.png", groupfile="", separator="" ): profile = self.loadProfile(timer) bm = BinManager(profile) if bids is None or len(bids) == 0: bids = bm.getBids() show="all" else: bm.checkBids(bids) show="bids" group_list = None if groupfile!="": print " Parsing group assignments" group_list = GroupAssignmentParser().parse(groupfile, separator, profile.contigNames) print " %s" % timer.getTimeStamp() print " Initialising plotter" fplot = ProfileReachabilityPlotter(profile) print " %s" % timer.getTimeStamp() if os.path.splitext(filename)[1] != '.png': filename+='.png' fileName = "" if self._outDir is None else os.path.join(self._outDir, filename) is_in_bin = np.in1d(profile.binIds, bids) highlight_markers = np.unique(profile.mapping.markerNames[is_in_bin[profile.mapping.rowIndices]]) highlight_groups = [] if group_list is None else np.unique(group_list[is_in_bin]) fplot.plot(fileName=fileName, bids=bids, highlight_markers=highlight_markers, highlight_groups=highlight_groups, group_list=group_list, show="bids", label="bid") if self._outDir is not None: print " %s" % timer.getTimeStamp() # Basic plotting tools class GenericPlotter: def plot(self, fileName=""): # plot contigs in coverage space fig = plt.figure() self.plotOnFig(fig) if(fileName != ""): try: fig.set_size_inches(15,15) plt.savefig(fileName,dpi=300) except: print "Error saving image:", fileName, sys.exc_info()[0] raise else: print "Plotting" try: plt.show() except: print "Error showing image", sys.exc_info()[0] raise plt.close(fig) del fig def plotOnFig(self, fig): pass class Plotter2D(GenericPlotter): def plotOnFig(self, fig): ax = fig.add_subplot(111) self.plotOnAx(ax, fig) def plotOnAx(self, ax, fig): pass class Plotter3D(GenericPlotter): def plotOnFig(self, fig): ax = fig.add_subplot(111, projection='3d') self.plotOnAx(ax, fig) def plotOnAx(self, ax, fig): pass # Plot types class FeatureAxisPlotter: def __init__(self, x, y, colours, sizes, edgecolours, markers, legend_data=None, z=None, xticks=None, yticks=None, xticklabels=None, yticklabels=None, xlim=None, ylim=None, zlim=None, xscale=None, yscale=None, xlabel="", ylabel="", zlabel=""): """ Parameters ------ x: array_like, shape (n,) y: array_like, shape (n,) colours: color or sequence of color sizes: scalar or array_like, shape (n,) colourmap: Colormap edgecolours: color or sequence of color z: array_like, shape (n,), optional xlabel: string, optional ylabel: string, optional zlabel: string, optional """ self.x = x self.y = y self.z = z self.sizes = sizes self.colours = colours self.edgecolours = edgecolours self.markers = markers self.legend_data = legend_data self.xticks = xticks self.xticklabels = xticklabels self.xlim = xlim self.xscale = xscale self.yticks = yticks self.ylim = ylim self.yticklabels = yticklabels self.yscale = yscale self.zlim = zlim self.xlabel = xlabel self.ylabel = ylabel self.zlabel = zlabel def __call__(self, ax, fig): coords = (self.x, self.y) if self.z is not None: coords += (self.z,) marker_sets = [(slice(None), '.')] if self.markers is None else self.markers for (ix, marker) in marker_sets: sc = ax.scatter(*[x[ix] for x in coords], c=self.colours[ix], s=self.sizes[ix], marker=marker) sc.set_edgecolors(self.edgecolours[ix]) sc.set_edgecolors = sc.set_facecolors = lambda *args:None if self.legend_data is not None and len(self.legend_data)>0: (labels, data) = zip(*self.legend_data) proxies = [plt_lines.Line2D([0], [0], linestyle="none", markersize=15, **dat) for dat in data] ax.legend(proxies, labels, numpoints=1, loc='lower right', bbox_to_anchor=(1., 0.96), ncol=min(len(labels) // 4, 4)+1) #ax.set_xmargin(0.05) #ax.set_ymargin(0.05) ax.autoscale(True, tight=True) if self.xticks is not None: ax.set_xticks(self.xticks) if self.xticklabels is not None: ax.set_xticklabels(self.xticklabels) if self.xscale is not None: ax.set_xscale(self.xscale) if self.yticks is not None: ax.set_yticks(self.yticks) if self.yticklabels is not None: ax.set_yticklabels(self.yticklabels) if self.yscale is not None: ax.set_yscale(self.yscale) ax.set_xlabel(self.xlabel) if self.xlim is not None: ax.set_xlim(self.xlim) ax.set_ylabel(self.ylabel) if self.ylim is not None: ax.set_ylim(self.ylim) ax.tick_params(labelsize='11') if len(coords) == 3: ax.set_zlabel(self.zlabel) if self.zlim is not None: ax.set_zlim(self.zlim) class FeaturePlotter(Plotter2D): def __init__(self, *args, **kwargs): self.plotOnAx = FeatureAxisPlotter(*args, **kwargs) class SurfacePlotter(Plotter3D): def __init__(self, *args, **kwargs): self.plotOnAx = FeatureAxisPlotter(*args, **kwargs) class DendrogramAxisPlotter: def __init__(self, Z, link_colour_func, leaf_label_func, colourbar=None, xlabel="", ylabel=""): """ Fields ------ Z: linkage matrix, shape(n, 4) link_colour_func: callable leaf_label_func: callable xlabel: string ylabel: string """ self.Z = Z self.link_colour_func = link_colour_func self.leaf_label_func = leaf_label_func self.xlabel = xlabel self.ylabel = ylabel self.colourbar = colourbar def __call__(self, ax, fig): sp_hierarchy.dendrogram(self.Z, ax=ax, p=4000, truncate_mode='lastp', #distance_sort='ascending', color_threshold=0, leaf_label_func=self.leaf_label_func, link_color_func=self.link_colour_func) ax.set_xlabel(self.xlabel) ax.set_ylabel(self.ylabel) if self.colourbar is not None: fig.colorbar(self.colourbar, ax=ax) class DendrogramPlotter(Plotter2D): def __init__(self, *args, **kwargs): self.plotOnAx = DendrogramAxisPlotter(*args, **kwargs) class BarAxisPlotter: def __init__(self, height, colours, xticks=[], xticklabels=[], xlabel="", ylabel="", xlim=None, ylim=None, text=[], text_alignment="center", text_rotation="horizontal", vlines=[], legend_data=None, colourbar=None): self.y = height self.colours = colours self.xlabel = xlabel self.ylabel = ylabel self.xticks = xticks self.xticklabels = xticklabels self.xlim = xlim self.ylim = ylim self.text = text self.text_alignment = text_alignment self.text_rotation = text_rotation self.vlines = vlines self.legend_data = legend_data self.colourbar = colourbar def __call__(self, ax, fig): y = self.y x = np.arange(len(y)) bc = ax.bar(x, y, width=1, color=self.colours, linewidth=0) ax.set_xlabel(self.xlabel) ax.set_ylabel(self.ylabel) if self.xlim is not None: ax.set_xlim(self.xlim) if self.ylim is not None: ax.set_ylim(self.ylim) ax.autoscale(True, axis='x', tight=True) ax.set_xticks(self.xticks) ax.set_xticklabels(self.xticklabels, rotation="horizontal") ax.tick_params(axis="x", length=2.5, direction="out", width=1.2, top='off') ax.tick_params(axis="y", labelsize='11') for (x, y, text, dat) in self.text: ax.text(x, y, text, va="bottom", ha=self.text_alignment, rotation=self.text_rotation, rotation_mode="anchor", **dat) for (x, dat) in self.vlines: ax.axvline(x, **dat) if self.legend_data is not None and len(self.legend_data)>0: (labels, color) = zip(*self.legend_data) proxies = [plt_patches.Rectangle((0,0), 0, 0, fill=True, color=clr) for clr in color] ax.legend(proxies, labels, numpoints=1, loc='lower right', bbox_to_anchor=(1., 0.96), ncol=min(len(labels) // 4, 4)+1 ) if self.colourbar is not None: fig.colorbar(self.colourbar, ax=ax) class BarPlotter(Plotter2D): def __init__(self, *args, **kwargs): self.plotOnAx = BarAxisPlotter(*args, **kwargs) # GroopM plotting tools class ProfileReachabilityPlotter: """ Bar plot of reachability distances between points in traversal order. """ def __init__(self, profile): self._profile = profile self._bc = BinClassifier(self._profile.mapping) def plot(self, bids, highlight_groups=[], highlight_markers=[], highlight_taxstring=[], group_list=None, label="tag", show="bids", limit=20, fileName=""): h = self._profile.reachDists o = self._profile.reachOrder n = len(o) # find bin contigs obids = self._profile.binIds[o] (first_binned_pos, last_binned_pos) = split_contiguous(obids, filter_groups=[0]) selected_of_bins = np.flatnonzero(np.in1d(obids[first_binned_pos], bids)) # find region containing selected and neighbouring bins first_of_bins = 0 last_of_bins = len(first_binned_pos)-1 if show=="bids": min_selected_of_bins = selected_of_bins.min() max_selected_of_bins = selected_of_bins.max() first_of_bins = max(first_of_bins, min_selected_of_bins-1) last_of_bins = min(last_of_bins, max_selected_of_bins+1) oZ = hierarchy.linkage_from_reachability(np.arange(n), h) num_obs = sp_hierarchy.num_obs_linkage(oZ) parent = hierarchy.embed_nodes(oZ, first_binned_pos[[first_of_bins, last_of_bins]])[-1] (_r, nodes) = sp_hierarchy.to_tree(oZ, rd=True) region_pos = nodes[parent].pre_order(lambda x: x.id) retry = True while retry: retry = False #assert np.all(np.diff(np.sort(region_pos))==1) region_start_pos = np.min(region_pos) region_end_pos = np.max(region_pos)+1 is_region_bin = np.logical_and(first_binned_pos >= region_start_pos, last_binned_pos <= region_end_pos) num_bins_left = np.count_nonzero(is_region_bin[:min_selected_of_bins]) num_bins_right = np.count_nonzero(is_region_bin[max_selected_of_bins+1:]) too_many_bins = num_bins_left+num_bins_right > limit if too_many_bins and parent > num_obs: left_embed_child = int(oZ[parent - num_obs, 0]) left_region_pos = nodes[left_embed_child].pre_order(lambda x: x.id) if np.all(np.in1d(first_binned_pos[selected_of_bins], left_region_pos)): parent = left_embed_child region_pos = left_region_pos retry = True continue right_embed_child = int(oZ[parent - num_obs, 1]) right_region_pos = nodes[right_embed_child].pre_order(lambda x: x.id) if np.all(np.in1d(first_binned_pos[selected_of_bins], right_region_pos)): parent = right_embed_child region_pos = right_region_pos retry = True continue if region_start_pos > 0: region_start_pos -= 1 if region_end_pos < n: region_end_pos += 1 region_height = h[region_start_pos:region_end_pos] region_indices = o[region_start_pos:region_end_pos] mask = np.zeros(self._profile.numContigs, dtype=bool) mask[region_indices] = True first_binned_region = first_binned_pos[is_region_bin] - region_start_pos last_binned_region = last_binned_pos[is_region_bin] - region_start_pos region_bids = obids[region_start_pos:region_end_pos] selected_of_region_bins = np.flatnonzero(np.in1d(region_bids[first_binned_region], bids)) elif show=="all": mask=None region_start_pos = 0 region_end_pos = n region_height = h region_indices = o first_binned_region = first_binned_pos last_binned_region = last_binned_pos region_bids = obids selected_of_region_bins = selected_of_bins unselected_of_region_bins = np.setdiff1d(np.arange(len(first_binned_region)), selected_of_region_bins) # ticks with empty labels for contigs with marker hits he = ProfileHighlightEngine(self._profile) (tick_groups, tick_labels) = he.getHighlighted(markers=highlight_markers, mask=mask, highlight_per_marker=False) xticks = [i for i in np.flatnonzero(tick_groups[region_indices,0])] xticklabels = ["" for _ in xticks] xlabel = "contigs in traversal order" # colouring based on group membership sm = plt_cm.ScalarMappable(norm=plt_colors.Normalize(1, 10), cmap=getColorMap('Highlight1')) #(group_ids, group_labels) = he.getHighlighted(bids=bids, mask=mask) (group_ids, group_labels) = he.getHighlighted(groups=highlight_groups, mask=mask, group_list=group_list) colours = sm.to_rgba(group_ids[region_indices,0]) colours[group_ids[region_indices,0]==0] = plt_colors.colorConverter.to_rgba('cyan') legend_data = [(format_label(l), sm.to_rgba(i)) for (l, i) in zip(group_labels, range(1, len(group_labels)+1))] # alternate colouring of lines for different bins # red and black for selected bins, greys for unselected num_bins = len(first_binned_region) #linecolour_ids = np.tile([0,1], num_bins) #linecolour_ids[2*selected_of_region_bins] += 2 #linecolour_ids[2*selected_of_region_bins+1] += 2 #linecolour_ids = np.zeros(num_bins, dtype=int) #linecolour_ids[unselected_of_region_bins] = np.arange(len(unselected_of_region_bins)) % 2 #linecolour_ids[selected_of_region_bins] = (np.arange(len(selected_of_region_bins)) % 2) + 2 #linecolourmap = plt_colors.ListedColormap(['0.6', '0.7', 'g', 'orange']) linesm = plt_cm.ScalarMappable(norm=plt_colors.Normalize(0, 12), cmap=plt_cm.get_cmap('Paired')) selected_lines = np.zeros(num_bins, dtype=bool) selected_lines[selected_of_region_bins] = True vlines = [tup for tups in (((s, dict(c=linesm.to_rgba(2*i), linewidth=i+1, linestyle=':')), (e, dict(c=linesm.to_rgba(2*i+1), linewidth=i+1, linestyle='-'))) for (i, s, e) in zip(selected_lines, first_binned_region, last_binned_region-1)) for tup in tups] #linecolours = linesm.to_rgba(linecolour_ids) #linewidths = np.full(num_bins, 1, dtype=int) #linewidths[selected_of_region_bins] = 2 #linestyles = np.array([':', '-']) #vlines = zip( # first_binned_region, # [dict(c=clr, linewidth=w, linestyle=linestyles[0]) for (clr, w) in zip(linecolours, linewidths)] # )+zip( # last_binned_region-1, # [dict(c=clr, linewidth=w, linestyle=linestyles[1]) for (clr, w) in zip(linecolours, linewidths)] # ) # label stretches with bin ids group_centers = (first_binned_region+last_binned_region-1)*0.5 group_heights = np.array([region_height[a:b].max() for (a, b) in ((s+1,e) if s+1<e else (s,e+1) for (s, e) in zip(first_binned_region, last_binned_region))]) # group_heights = [region_height.max()]*len(first_binned_region) if label=="bid": group_labels = region_bids[first_binned_region].astype(str) text_alignment = "center" text_rotation = "horizontal" elif label=="tag": mapping_bids = self._profile.binIds[self._profile.mapping.rowIndices] group_labels = np.array(["?" if tag=="" else tag for tag in (self._bc.consensusTag(np.flatnonzero(mapping_bids==bid)) for bid in region_bids[first_binned_region])]) text_alignment = "right" text_rotation = -60 else: raise ValueError("Parameter value for 'label' argument must be one of 'bid', 'tag'. Got '%s'." % label) fontsize = np.full(num_bins, 11, dtype=int) fontsize[selected_of_region_bins] = 14 textcolour_id = np.ones(num_bins, dtype=int) textcolour_id[selected_of_region_bins] += 2 textcolours = linesm.to_rgba(textcolour_id) text = zip(group_centers, group_heights, group_labels, [dict(color=clr, fontsize=size) for (clr, size) in zip(textcolours, fontsize)]) hplot = BarPlotter( height=h[region_start_pos+1:region_end_pos], colours=colours[1:], xlabel=xlabel, ylabel="reachability of closest untraversed contig", xticks=xticks, xticklabels=xticklabels, text=text, text_alignment=text_alignment, text_rotation=text_rotation, vlines=vlines, legend_data=legend_data, ) hplot.plot(fileName) class ContigExplorerPlotter: def __init__(self, profile, colourmap='HSV', cacher=None, rawDistances=False, surface=False, origin="mediod", fun=lambda a: a): self._profile = profile self._colourmap = getColorMap(colourmap) self._surface = surface self._rawDistances = rawDistances self._fun = fun self._origin = origin covProfiles = self._profile.covProfiles kmerSigs = self._profile.kmerSigs * (self._profile.contigLengths[:, None] - 3) + 1 kmerSigs = distance.logratio(kmerSigs, axis=1, mode="centered") if not self._rawDistances or self._origin=="mediod": if cacher is None: de = ProfileDistanceEngine() else: de = StreamingProfileDistanceEngine(cacher=cacher, size=int(2**31-1)) (x, y) = de.makeRanks(covProfiles, kmerSigs, self._profile.contigLengths ) scale_factor = 200. / (self._profile.contigLengths.sum()**2-(self._profile.contigLengths**2).sum()) x *= scale_factor y *= scale_factor n = self._profile.numContigs def getRankCoords(i, j): condensed_indices = distance.condensed_index(n, i, j) return (x[condensed_indices], y[condensed_indices]) self._getRankCoords = getRankCoords if self._rawDistances: def getCoords(i,j): x = sp_distance.cdist(covProfiles[[i]], covProfiles[[j]], metric="euclidean") y = sp_distance.cdist(kmerSigs[[i]], kmerSigs[[j]], metric="euclidean") return (x, y) self._getCoords = getCoords self._xlabel = "TMC distance" self._ylabel = "T-Freq distance" else: self._getCoords = self._getRankCoords self._xlabel = "TMC pairwise distance percentile" self._ylabel = "T-Freq pairwise distance percentile" def _get_origin(self, indices, mode="max_length"): if mode=="mediod": #if self._rawDistances: # raise ValueError("`mode` argument parameter value `mediod` is not appropriate for ContigExplorerPlotter with `rawDistances` flag set.") (i, j) = distance.pairs(len(indices)) (x, y) = self._getRankCoords(indices[i], indices[j]) choice = distance.mediod(self._fun(x) + self._fun(y)) label = "mediod" elif mode=="max_density": h = self._profile.reachDists o = self._profile.reachOrder indices = np.flatnonzero(np.in1d(o, indices)) choice = np.argmax(h[indices]) label = "core contig" return ([indices[choice]], label) elif mode=="max_coverage": choice = np.argmax(self._profile.normCoverages[indices]) label = "highest coverage" elif mode=="max_length": choice = np.argmax(self._profile.contigLengths[indices]) label = "longest" else: raise invalidParameterValue('mode', mode) return (indices[choice], label) def plot(self, centre, centre_type="bin", highlight_bins=[], highlight_groups=[], highlight_markers=[], highlight_taxstrings=[], group_list=None, fileName=""): if centre_type=="bin": indices = np.flatnonzero(self._profile.binIds == centre) elif centre_type=="group": indices = np.flatnonzero(group_list == centre) else: raise invalidParameter('centre_type', centre_type) (origin, origin_label) = self._get_origin(indices, mode=self._origin) if centre_type=="bin": origin_label = "bin {0} {1}".format(centre, origin_label) else: origin_label = "{0} {1}".format(format_label(centre), origin_label) n = self._profile.numContigs # sizes s = 20*(2**np.log10(self._profile.contigLengths / np.min(self._profile.contigLengths))) # colorize he = ProfileHighlightEngine(self._profile) (marker_groups, marker_labels) = he.getHighlighted(bids=highlight_bins) (edge_groups, edge_labels) = he.getHighlighted(groups=highlight_groups, group_list=group_list) (colour_groups, colour_labels) = he.getHighlighted(markers=highlight_markers, highlight_per_marker=False) legend_data = [] edgesm = plt_cm.ScalarMappable(norm=plt_colors.Normalize(1, 10), cmap=getColorMap('Highlight1')) edgecolours = edgesm.to_rgba(edge_groups[:,0]) edgecolours[edge_groups[:,0]==0] = plt_colors.colorConverter.to_rgba('k') legend_data.extend([(format_label(label), dict(markeredgecolor=edgesm.to_rgba(i), c="w", marker=".")) for (i, label) in enumerate(edge_labels, 1)]) sm = plt_cm.ScalarMappable(plt_colors.Normalize(vmin=0., vmax=1.), self._colourmap) c = sm.to_rgba(self._profile.contigGCs) coloursm = plt_cm.ScalarMappable(norm=plt_colors.Normalize(1,9), cmap=getColorMap('Highlight2')) is_coloured = colour_groups[:,0]>0 c[is_coloured] = coloursm.to_rgba(colour_groups[is_coloured,0]) is_coloured_plain_edge = np.logical_and(is_coloured, edge_groups[:,0]==0) edgecolours[is_coloured_plain_edge] = c[is_coloured_plain_edge] legend_data.extend([(format_label("{0} present in {1}".format(l, centre_type)), dict(c=coloursm.to_rgba(i), marker=".")) for (i, l) in enumerate(colour_labels, 1)]) marker_list = ['o', '^', 's', 'v', 'D'] markers = [(marker_groups[:,0]==i, marker_list[i % len(marker_list)]) for i in range(len(marker_labels)+1)] legend_data.extend([(format_label(l), dict(marker=marker_list[i % len(marker_list)], c="w")) for (i, l) in enumerate(marker_labels, 1)]) # load distances others = np.array([i for i in range(n) if i!=origin]) x = np.zeros(n, dtype=float) y = np.zeros(n, dtype=float) (x[others], y[others]) = self._getCoords(origin, others) # apply visual transformation xlabel = "{0} from {1}".format(self._xlabel, origin_label) ylabel = "{0} from {1}".format(self._ylabel, origin_label) if self._rawDistances: x += 50./self._profile.contigLengths xticks = None xticklabels = None xscale = "log" xlim = [10**(np.fix(np.log10(x.min()))-1), 10**(np.fix(np.log10(x.max()))+1)] yticks = None yticklabels = None ylim = None else: x = np.sqrt(self._fun(x)) y = np.sqrt(self._fun(y)) xticks = np.sqrt(np.linspace(0, 100, 5)) xticklabels = np.linspace(0, 100, 5).astype(str) xscale = None #xlim = [-0.5, 10.5] xlim = None yticks = np.sqrt(np.linspace(0, 100, 5)) yticklabels = np.linspace(0, 100, 5).astype(str) ylim = None if self._surface: z = self.profile.normCoverages.flatten() fplot = SurfacePlotter(x, y, z=z, colours=c, sizes=s, edgecolours=edgecolours, legend_data=legend_data, xticks=xticks, yticks=yticks, xticklabels=xticklabels, yticklabels=yticklabels, xscale=xscale, xlim=xlim, xlabel=xlabel, ylabel=ylabel, zlabel="absolute TMC") else: fplot = FeaturePlotter(x, y, colours=c, sizes=s, edgecolours=edgecolours, markers=markers, legend_data=legend_data, xticks=xticks, yticks=yticks, xticklabels=xticklabels, yticklabels=yticklabels, xscale=xscale, xlim=xlim, ylim=ylim, xlabel=xlabel, ylabel=ylabel) fplot.plot(fileName) class GroupManager: def __init__(self, n, mask=None): self._n = n self._mask = mask self._labels = [] self._group_members = [] def addGroup(self, indices, label): is_member = np.zeros(self._n, dtype=bool) is_member[indices] = True if self._mask is None else self._mask[indices] if np.any(is_member): self._group_members.append(is_member) self._labels.append(label) def getGroups(self): if len(self._group_members)==0: return (np.zeros((self._n, 1), dtype=int), np.array([])) ngroups = len(self._group_members) group_ids = np.transpose(self._group_members).astype(int) * np.arange(1,ngroups+1)[None,:] group_ids[group_ids==0] = ngroups+1 sorted_group_ids = np.sort(group_ids, axis=1) sorted_group_ids[sorted_group_ids==ngroups+1] = 0 return (sorted_group_ids, self._labels) def getGroupIntersections(self): if len(self._group_members)==0: return (np.zeros(self._n, dtype=int), np.array([])) flipped_group_members = [m for m in self._group_members] flipped_group_members.reverse() order = np.lexsort(flipped_group_members) sorted_group_intersections = np.fliplr(np.array(flipped_group_members).T[order]) # flag first unique group intersections flag_first = np.concatenate(([np.any(sorted_group_intersections[0])], np.any(sorted_group_intersections[1:]!=sorted_group_intersections[:-1], axis=1))) group_intersection_ids = np.empty(self._n, dtype=int) group_intersection_ids[order] = np.cumsum(flag_first) labels = np.array(self._labels) group_intersection_labels = np.array(["/".join(labels[row]) for row in sorted_group_intersections[flag_first]]) # reverse priority of group intersections nzids = group_intersection_ids!=0 group_intersection_ids *= -1 group_intersection_ids[nzids] += np.count_nonzero(flag_first)+1 group_intersection_labels = np.flipud(group_intersection_labels) return (group_intersection_ids[:, None], group_intersection_labels) class ProfileHighlightEngine: def __init__(self, profile): self._profile = profile def getHighlighted(self, bids=[], markers=[], taxstrings=[], groups=[], group_list=None, mask=None, highlight_per_bid=True, highlight_per_marker=True, highlight_per_group=True, highlight_per_taxstring=True, highlight_intersections=False): # verify highlight inputs BinManager(self._profile).checkBids(bids) if group_list is not None: groups = np.asarray(groups) missing_groups = np.in1d(groups, group_list, invert=True) if np.any(missing_groups): print ("WARNING: No contig(s) assigned to group(s) {0}.".format(",".join(groups[missing_groups]))) markers = np.asarray(markers) missing_markers = np.in1d(markers, self._profile.mapping.markerNames, invert=True) if np.any(missing_markers): print ("WARNING: No hits in database to marker(s) {0}.".format(",".join(markers[missing_markers]))) # highlight groups and labels n = self._profile.numContigs gm = GroupManager(n, mask=mask) if groups is not None and len(groups) > 0: if group_list is None or len(group_list) != n: raise ValueError("ERROR: Expected parameter `group_list` to be an array of length {0}.".format(n)) if highlight_per_group: for group in groups: if group=="": continue gm.addGroup(np.flatnonzero(group_list==group), group) else: negroups = groups[groups!=""] gm.addGroup(np.flatnonzero(np.in1d(group_list, negroups)), "groups") if highlight_per_bid: for bid in bids: if bid == 0: continue gm.addGroup(np.flatnonzero(self._profile.binIds==bid), "bin {0}".format(bid)) else: nzbids = bids[bids!=0] gm.addGroup(np.flatnonzero(np.in1d(self._profile.binIds, nzbids)), "bins") if highlight_per_marker: for marker in markers: select_mappings = self._profile.mapping.markerNames==marker gm.addGroup(self._profile.mapping.rowIndices[select_mappings], "scg {0}".format(marker)) else: select_mappings = np.in1d(self._profile.mapping.markerNames, markers) gm.addGroup(self._profile.mapping.rowIndices[select_mappings], "scgs") if highlight_per_taxstring: for taxstring in taxstrings: select_mappings = self._profile.mapping.classification.getPrefixed(taxstring) gm.addGroup(self._profile.mapping.rowIndices[select_mappings], "\"{0}\"".format(taxstring)) else: select_mappings = np.zeros(n, dtype=bool) for taxstring in taxstrings: select_mappings = np.logical_or(select_mappings, self._profile.mapping.classification.getPrefixed(taxstring)) gm.addGroup(self._profile.mapping.rowIndices[select_mappings], "taxons") return gm.getGroupIntersections() if highlight_intersections else gm.getGroups() #------------------------------------------------------------------------------ # Helpers def get_bin_tree(bids, d): bids = np.asarray(bids) d = np.asarray(d) # find bin contigs (first_binned_indices, last_binned_indices) = split_contiguous(bids, filter_groups=[0]) split_obs = np.concat(([0], [np.arange(s, e)[np.argmax(d[s:e])] for (s, e) in zip(last_binned_indices[:-1], first_binned_indices[1:])])) Z = hierarchy.linkage_from_reachability(np.arange(len(split_obs)), d[split_obs]) return (Z, split_obs) class GroupAssignmentParser: def parse(self, filename, separator, cids): br = BinReader() try: with open(filename, "r") as f: try: (con_names, con_groups) = br.parse(f, separator) contig_groups = dict(zip(con_names, con_groups)) except: print "Error parsing group assignments" raise except: print "Could not parse group assignment file:", filename, sys.exc_info()[0] raise return np.array([contig_groups.get(cid, "") for cid in cids]) def format_label(label): return "{0}...{1}".format(label[:8], label[-18:]) if len(label)>30 else label def getColorMap(colorMapStr): if colorMapStr == 'HSV': S = 1.0 V = 1.0 return plt_colors.LinearSegmentedColormap.from_list('GC', [colorsys.hsv_to_rgb((1.0 + np.sin(np.pi * (val/1000.0) - np.pi/2))/2., S, V) for val in xrange(0, 1000)], N=1000) elif colorMapStr == 'Highlight1': return plt_cm.get_cmap('Set1') #return plt_colors.ListedColormap(["k", "r", "b", "g", "orange", "darkturquoise", "m"]) elif colorMapStr == 'Highlight2': return plt_cm.get_cmap('Dark2') #return plt_colors.ListedColormap(['cyan', 'dimgrey', 'orangered', 'indigo', 'goldenrod']) elif colorMapStr == 'Accent': return plt_cm.get_cmap('Accent') elif colorMapStr == 'Blues': return plt_cm.get_cmap('Blues') elif colorMapStr == 'Spectral': return plt_cm.get_cmap('spectral') elif colorMapStr == 'Sequential': return plt_cm.get_cmap('copper') elif colorMapStr == 'Grayscale': return plt_cm.get_cmap('gist_yarg') elif colorMapStr == 'Discrete': discrete_map = [(0,0,0)] discrete_map.append((0,0,0)) discrete_map.append((0,0,0)) discrete_map.append((0,0,0)) discrete_map.append((141/255.0,211/255.0,199/255.0)) discrete_map.append((255/255.0,255/255.0,179/255.0)) discrete_map.append((190/255.0,186/255.0,218/255.0)) discrete_map.append((251/255.0,128/255.0,114/255.0)) discrete_map.append((128/255.0,177/255.0,211/255.0)) discrete_map.append((253/255.0,180/255.0,98/255.0)) discrete_map.append((179/255.0,222/255.0,105/255.0)) discrete_map.append((252/255.0,205/255.0,229/255.0)) discrete_map.append((217/255.0,217/255.0,217/255.0)) discrete_map.append((188/255.0,128/255.0,189/255.0)) discrete_map.append((204/255.0,235/255.0,197/255.0)) discrete_map.append((255/255.0,237/255.0,111/255.0)) discrete_map.append((1,1,1)) discrete_map.append((0,0,0)) discrete_map.append((0,0,0)) discrete_map.append((0,0,0)) return plt_colors.LinearSegmentedColormap.from_list('GC_DISCRETE', discrete_map, N=20) elif colorMapStr == 'DiscretePaired': discrete_map = [(0,0,0)] discrete_map.append((0,0,0)) discrete_map.append((0,0,0)) discrete_map.append((0,0,0)) discrete_map.append((166/255.0,206/255.0,227/255.0)) discrete_map.append((31/255.0,120/255.0,180/255.0)) discrete_map.append((178/255.0,223/255.0,138/255.0)) discrete_map.append((51/255.0,160/255.0,44/255.0)) discrete_map.append((251/255.0,154/255.0,153/255.0)) discrete_map.append((227/255.0,26/255.0,28/255.0)) discrete_map.append((253/255.0,191/255.0,111/255.0)) discrete_map.append((255/255.0,127/255.0,0/255.0)) discrete_map.append((202/255.0,178/255.0,214/255.0)) discrete_map.append((106/255.0,61/255.0,154/255.0)) discrete_map.append((255/255.0,255/255.0,179/255.0)) discrete_map.append((217/255.0,95/255.0,2/255.0)) discrete_map.append((1,1,1)) discrete_map.append((0,0,0)) discrete_map.append((0,0,0)) discrete_map.append((0,0,0)) return plt_colors.LinearSegmentedColormap.from_list('GC_DISCRETE', discrete_map, N=20) ############################################################################### ############################################################################### ############################################################################### ###############################################################################
gpl-3.0
dr-leo/pandaSDMX
pandasdmx/writer/xml.py
1
13596
"""SDMXML v2.1 writer.""" # Contents of this file are organized in the order: # # - Utility methods and global variables. # - writer functions for pandasdmx.message classes, in the same order as message.py # - writer functions for pandasdmx.model classes, in the same order as model.py from itertools import chain from typing import Iterable, cast from lxml import etree from lxml.builder import ElementMaker import pandasdmx.urn from pandasdmx import message, model from pandasdmx.format.xml import NS, qname, tag_for_class from pandasdmx.writer.base import BaseWriter _element_maker = ElementMaker(nsmap={k: v for k, v in NS.items() if v is not None}) writer = BaseWriter("XML") def Element(name, *args, **kwargs): # Remove None kwargs = dict(filter(lambda kv: kv[1] is not None, kwargs.items())) return _element_maker(qname(name), *args, **kwargs) def to_xml(obj, **kwargs): """Convert an SDMX *obj* to SDMX-ML. Parameters ---------- kwargs Passed to :meth:`lxml.etree.to_string`, e.g. `pretty_print` = :obj:`True`. Raises ------ NotImplementedError If writing specific objects to SDMX-ML has not been implemented in :mod:`sdmx`. """ return etree.tostring(writer.recurse(obj), **kwargs) def reference(obj, parent=None, tag=None, style="URN"): """Write a reference to `obj`.""" tag = tag or tag_for_class(obj.__class__) elem = Element(tag) if style == "URN": ref = Element(":URN", obj.urn) elif style == "Ref": if isinstance(obj, model.MaintainableArtefact): ma = obj else: # TODO handle references to non-maintainable children of parent # objects if not parent: for is_ in chain( writer._message.concept_scheme.values(), writer._message.category_scheme.values(), ): if obj in is_: parent = is_ break if not parent: raise NotImplementedError( f"Cannot write reference to {repr(obj)} without parent" ) ma = parent args = { "id": obj.id, "maintainableParentID": ma.id if parent else None, "maintainableParentVersion": ma.version if parent else None, "agencyID": getattr(ma.maintainer, "id", None), "version": ma.version, "package": model.PACKAGE[obj.__class__], "class": etree.QName(tag_for_class(obj.__class__)).localname, } ref = Element(":Ref", **args) else: # pragma: no cover raise ValueError(style) elem.append(ref) return elem # Writers for pandasdmx.message classes @writer def _dm(obj: message.DataMessage): elem = Element("mes:GenericData") header = writer.recurse(obj.header) elem.append(header) # Add DSD references to header for ds in obj.data: attrib = dict() dsd_ref = None if ds.structured_by: attrib["structureID"] = ds.structured_by.id # Reference by URN if possible, otherwise with a <Ref> tag style = "URN" if ds.structured_by.urn else "Ref" dsd_ref = reference(ds.structured_by, tag="com:Structure", style=style) if isinstance(obj.observation_dimension, model.DimensionComponent): attrib["dimensionAtObservation"] = obj.observation_dimension.id header.append(Element("mes:Structure", **attrib)) header[-1].append(dsd_ref) elem.append(writer.recurse(ds)) return elem @writer def _sm(obj: message.StructureMessage): # Store a reference to the overal Message for writing references setattr(writer, "_message", obj) elem = Element("mes:Structure") # Empty header element elem.append(writer.recurse(obj.header)) structures = Element("mes:Structures") elem.append(structures) for attr, tag in [ # Order is important here to avoid forward references ("organisation_scheme", "OrganisationSchemes"), ("dataflow", "Dataflows"), ("category_scheme", "CategorySchemes"), ("categorisation", "Categorisations"), ("codelist", "Codelists"), ("concept_scheme", "Concepts"), ("structure", "DataStructures"), ("constraint", "Constraints"), ("provisionagreement", "ProvisionAgreements"), ]: if not len(getattr(obj, attr)): continue container = Element(f"str:{tag}") container.extend(writer.recurse(s) for s in getattr(obj, attr).values()) structures.append(container) return elem @writer def _header(obj: message.Header): elem = Element("mes:Header") elem.append(Element("mes:Test", str(obj.test).lower())) if obj.id: elem.append(Element("mes:ID", obj.id)) if obj.prepared: elem.append(Element("mes:Prepared", obj.prepared.isoformat())) if obj.sender: elem.append(writer.recurse(obj.sender, _tag="mes:Sender")) if obj.source: elem.extend(i11lstring(obj.source, "mes:Source")) if obj.receiver: elem.append(writer.recurse(obj.receiver, _tag="mes:Receiver")) return elem # Writers for pandasdmx.model classes # §3.2: Base structures def i11lstring(obj, name): """InternationalString. Returns a list of elements with name `name`. """ elems = [] for locale, label in obj.localizations.items(): child = Element(name, label) child.set(qname("xml", "lang"), locale) elems.append(child) return elems @writer def _a(obj: model.Annotation): elem = Element("com:Annotation") if obj.id: elem.attrib["id"] = obj.id if obj.type: elem.append(Element("com:AnnotationType", obj.type)) elem.extend(i11lstring(obj.text, "com:AnnotationText")) return elem def annotable(obj, **kwargs): cls = kwargs.pop("_tag", tag_for_class(obj.__class__)) try: elem = Element(cls, **kwargs) except AttributeError: # pragma: no cover print(repr(obj), cls, kwargs) raise if len(obj.annotations): e_anno = Element("com:Annotations") e_anno.extend(writer.recurse(a) for a in obj.annotations) elem.append(e_anno) return elem def identifiable(obj, **kwargs): kwargs.setdefault("id", obj.id) try: kwargs.setdefault( "urn", obj.urn or pandasdmx.urn.make(obj, kwargs.pop("parent", None)) ) except (AttributeError, ValueError): pass return annotable(obj, **kwargs) def nameable(obj, **kwargs): elem = identifiable(obj, **kwargs) elem.extend(i11lstring(obj.name, "com:Name")) elem.extend(i11lstring(obj.description, "com:Description")) return elem def maintainable(obj, **kwargs): kwargs.setdefault("version", obj.version) kwargs.setdefault("isExternalReference", str(obj.is_external_reference).lower()) kwargs.setdefault("isFinal", str(obj.is_final).lower()) kwargs.setdefault("agencyID", getattr(obj.maintainer, "id", None)) return nameable(obj, **kwargs) # §3.5: Item Scheme @writer def _item(obj: model.Item, **kwargs): elem = nameable(obj, **kwargs) if obj.parent: # Reference to parent Item e_parent = Element("str:Parent") e_parent.append(Element(":Ref", id=obj.parent.id)) elem.append(e_parent) return elem @writer def _is(obj: model.ItemScheme): elem = maintainable(obj) elem.extend(writer.recurse(i) for i in obj.items.values()) return elem # §3.6: Structure @writer def _facet(obj: model.Facet): # TODO textType should be CamelCase return Element("str:TextFormat", textType=getattr(obj.value_type, "name", None)) @writer def _rep(obj: model.Representation, tag, style="URN"): elem = Element(f"str:{tag}") if obj.enumerated: elem.append(reference(obj.enumerated, tag="str:Enumeration", style=style)) if obj.non_enumerated: elem.extend(writer.recurse(facet) for facet in obj.non_enumerated) return elem # §4.4: Concept Scheme @writer def _concept(obj: model.Concept, **kwargs): elem = _item(obj, **kwargs) if obj.core_representation: elem.append(writer.recurse(obj.core_representation, "CoreRepresentation")) return elem # §3.3: Basic Inheritance @writer def _component(obj: model.Component): elem = identifiable(obj) if obj.concept_identity: elem.append( reference(obj.concept_identity, tag="str:ConceptIdentity", style="Ref",) ) if obj.local_representation: elem.append( writer.recurse(obj.local_representation, "LocalRepresentation", style="Ref") ) # DataAttribute only try: elem.append(writer.recurse(cast(model.DataAttribute, obj).related_to)) except AttributeError: pass return elem @writer def _cl(obj: model.ComponentList): elem = identifiable(obj) elem.extend(writer.recurse(c) for c in obj.components) return elem # §4.5: CategoryScheme @writer def _cat(obj: model.Categorisation): elem = maintainable(obj) elem.extend( [ reference(obj.artefact, tag="str:Source", style="Ref"), reference(obj.category, tag="str:Target", style="Ref"), ] ) return elem # §10.3: Constraints @writer def _ms(obj: model.MemberSelection): elem = Element("com:KeyValue", id=obj.values_for.id) elem.extend(Element("com:Value", mv.value) for mv in obj.values) return elem @writer def _cr(obj: model.CubeRegion): elem = Element("str:CubeRegion", include=str(obj.included).lower()) elem.extend(writer.recurse(ms) for ms in obj.member.values()) return elem @writer def _cc(obj: model.ContentConstraint): elem = maintainable( obj, type=obj.role.role.name.replace("allowable", "allowed").title() ) # Constraint attachment for ca in obj.content: elem.append(Element("str:ConstraintAttachment")) elem[-1].append(reference(ca, style="Ref")) elem.extend(writer.recurse(dcr) for dcr in obj.data_content_region) return elem # §5.2: Data Structure Definition @writer def _nsr(obj: model.NoSpecifiedRelationship): elem = Element("str:AttributeRelationship") elem.append(Element("str:None")) return elem @writer def _pmr(obj: model.PrimaryMeasureRelationship): elem = Element("str:AttributeRelationship") elem.append(Element("str:PrimaryMeasure")) elem[-1].append(Element(":Ref", id="(not implemented)")) return elem @writer def _dr(obj: model.DimensionRelationship): elem = Element("str:AttributeRelationship") for dim in obj.dimensions: elem.append(Element("str:Dimension")) elem[-1].append(Element(":Ref", id=dim.id)) return elem @writer def _gr(obj: model.GroupRelationship): elem = Element("str:AttributeRelationship") elem.append(Element("str:Group")) elem[-1].append(Element(":Ref", id=getattr(obj.group_key, "id", None))) return elem @writer def _gdd(obj: model.GroupDimensionDescriptor): elem = identifiable(obj) for dim in obj.components: elem.append(Element("str:GroupDimension")) elem[-1].append(Element("str:DimensionReference")) elem[-1][0].append(Element(":Ref", id=dim.id)) return elem @writer def _dsd(obj: model.DataStructureDefinition): elem = maintainable(obj) elem.append(Element("str:DataStructureComponents")) # Write in a specific order elem[-1].append(writer.recurse(obj.dimensions)) for group in obj.group_dimensions.values(): elem[-1].append(writer.recurse(group)) elem[-1].append(writer.recurse(obj.attributes)) elem[-1].append(writer.recurse(obj.measures)) return elem @writer def _dfd(obj: model.DataflowDefinition): elem = maintainable(obj) elem.append(reference(obj.structure, tag="str:Structure", style="Ref")) return elem # §5.4: Data Set def _av(obj: Iterable[model.AttributeValue]): for av in obj: assert av.value_for yield Element("gen:Value", id=av.value_for.id, value=av.value) @writer def _sk(obj: model.SeriesKey): elem = [] elem.append(Element("gen:SeriesKey")) elem[-1].extend( Element("gen:Value", id=kv.value_for.id, value=kv.value) for kv in obj ) if len(obj.attrib): elem.append(Element("gen:Attributes")) elem[-1].extend(_av(obj.attrib.values())) return tuple(elem) @writer def _obs(obj: model.Observation): elem = Element("gen:Obs") assert obj.dimension and len(obj.dimension) == 1 elem.append(Element("gen:ObsDimension", value=obj.dimension.values[0].value)) elem.append(Element("gen:ObsValue", value=obj.value)) if len(obj.attached_attribute): elem.append(Element("gen:Attributes")) elem[-1].extend(_av(obj.attached_attribute.values())) return elem @writer def _ds(obj: model.DataSet): if len(obj.group): raise NotImplementedError("to_xml() for DataSet with groups") attrib = dict() if obj.action: attrib["action"] = str(obj.action) if obj.structured_by: attrib["structureRef"] = obj.structured_by.id elem = Element("mes:DataSet", **attrib) for sk, observations in obj.series.items(): elem.append(Element("gen:Series")) elem[-1].extend(writer.recurse(sk)) elem[-1].extend(writer.recurse(obs) for obs in observations) return elem
apache-2.0
FrauBluher/ShR2
Web Stack/webapp/management/commands/email_interval.py
2
8924
from django.core.management.base import BaseCommand, CommandError from django.contrib.auth.models import User from webapp.models import Notification from django.core.exceptions import ObjectDoesNotExist import boto3 from botocore.exceptions import ClientError from django.conf import settings from matplotlib import pyplot as plt import numpy as np from django.template import Context, Template import os from time import gmtime, strftime from influxdb.influxdb08 import client as influxdb from microdata.models import Device from sets import Set import re import datetime from calendar import monthrange import random from math import factorial from webapp.models import IntervalNotification class Object: def __init__(self, device, value, hungriest): self.device = device self.value = value self.hungriest = hungriest def get_average_usage(user, notification): start = 'now() - 1w' unit = 'h' time_interval = notification.recurrences.occurrences()[1] - notification.recurrences.occurrences()[0] if time_interval == datetime.timedelta(days=30): start = 'now() - 1M' unit = 'd' elif time_interval == datetime.timedelta(days=1): start = 'now() - 1d' unit = 'm' elif time_interval == datetime.timedelta(days=365): start = 'now() - 1y' unit = 'd' stop = 'now()' db = influxdb.InfluxDBClient(settings.INFLUXDB_URI,8086,'root','root','seads') result = db.query('list series')[0] averages = {} for device in Device.objects.filter(owner=user): appliances = Set() for series in result['points']: rg = re.compile('device.'+str(device.serial)) if re.match(rg, series[1]): appliance = series[1].split('device.'+str(device.serial)+'.') if (len(appliance) < 2): continue else: appliances.add(appliance[-1]) average_wattage = 0 hungriest_appliance = [None, 0] for appliance in appliances: try: wattage = db.query('select * from 1'+unit+'.device.'+str(device.serial)+'.'+appliance +\ ' where time > '+start+' and time < '+stop)[0]['points'][0][2] average_wattage += wattage if wattage > hungriest_appliance[1]: hungriest_appliance = [appliance, int(wattage)] except: pass averages[str(device.serial)] = [int(average_wattage), hungriest_appliance] return averages def render_chart(user, notification): date_today = datetime.datetime.today() date_gmtime = gmtime() randbits = str(random.getrandbits(128)) start = 'now() - 1w' unit = 'h' time_interval = notification.recurrences.occurrences()[1] - notification.recurrences.occurrences()[0] interval_keyword = 'weekly' if time_interval == datetime.timedelta(days=30): start = 'now() - 1M' unit = 'd' interval_keyword = 'monthly' elif time_interval == datetime.timedelta(days=1): start = 'now() - 1d' unit = 'm' interval_keyword = 'daily' elif time_interval == datetime.timedelta(days=365): start = 'now() - 1y' unit = 'd' interval_keyword = 'annually' stop = 'now()' db = influxdb.InfluxDBClient(settings.INFLUXDB_URI,8086,'root','root','seads') fig = plt.figure(figsize=(10, 5), dpi=100) # 1000px * 500px figure plt.ylabel('Watts') for device in Device.objects.filter(owner=user): points = {} result = db.query('list series')[0] appliances = Set() for series in result['points']: rg = re.compile('device.'+str(device.serial)) if re.match(rg, series[1]): appliance = series[1].split('device.'+str(device.serial)+'.') if (len(appliance) < 2): continue else: appliances.add(appliance[-1]) for appliance in appliances: query = 'select * from 1'+unit+'.device.'+str(device.serial)+'.'+appliance+' where time > '+start+' and time < '+stop try: group = db.query(query) except: continue if (len(group)): group = group[0]['points'] for s in group: if s[0] in points: points[s[0]] += s[2] else: points[s[0]] = s[2] y = [] for key, value in points.iteritems(): y.append(value) x = 0 if interval_keyword == 'monthly' or interval_keyword == 'annually': x = np.array([date_today - datetime.timedelta(days=i) for i in range(len(y))]) elif interval_keyword == 'weekly': x = np.array([date_today - datetime.timedelta(hours=i) for i in range(len(y))]) elif interval_keyword == 'daily': x = np.array([date_today - datetime.timedelta(minutes=i) for i in range(len(y))]) if (len(y) > 0): plt.plot(x, y, label=device) plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3, ncol=2, mode="expand", borderaxespad=0.) filepath = settings.STATIC_PATH+'/webapp/img/' filename = interval_keyword + '_' + str(user.pk)+'_'+randbits+'_plot.png' plt.savefig(filepath + filename, bbox_inches="tight") s3 = boto3.resource('s3') data = open(filepath + filename, 'rb') bucket = s3.Bucket(settings.S3_BUCKET) expires = datetime.datetime.today() + datetime.timedelta(days=90) bucket.put_object(Key='email/'+filename, Body=data, ACL='public-read', Expires=str(expires)) resource_url = 'https://'+settings.S3_BUCKET+'.s3.amazonaws.com/email/'+filename os.remove(filepath + filename) return [resource_url, strftime("%a, %d %b %Y %H:%M:%S +0000", date_gmtime)] class Command(BaseCommand): help = \ """ Launches the mail service to send usage information based on the provided interval. This should never be run more than once per day. Intervals: """ for n in IntervalNotification.objects.all(): help += n.recurrences.rrules[0].to_text() + " | " def handle(self, *args, **options): ses = boto3.client('ses') # Save the current date and time today = datetime.datetime.today() # Loop over all users for user in User.objects.all(): # Loop over all notifications for notification in user.usersettings.interval_notification.all(): if notification.recurrences.occurrences()[0].day == today.day: # specified notification is scheduled to run today try: destination = {'ToAddresses': [user.email]} text = "" f = notification.email_body f.open(mode='r') text = f.read() f.close() plot_url, str_time = render_chart(user, notification) average_objects = [] averages = get_average_usage(user, notification) for key, value in averages.iteritems(): average_objects.append(Object(Device.objects.get(serial=key), value[0], value[1])) template = Template(text) rule = notification.recurrences.rrules[0].to_text() context = Context({ 'time': str_time, 'organization': settings.ORG_NAME, 'base_url': settings.BASE_URL, 'interval': str(rule).title(), 'interval_lower': rule, 'user_firstname': user.first_name, 'plot_location': plot_url, 'average_objects': average_objects, 'devices': Device.objects.filter(owner=user), }) message = { 'Subject': { 'Data': settings.ORG_NAME + ' ' + str(rule).title() + ' Consumption Details' }, 'Body': { 'Html': { 'Data': template.render(context) } } } print "" print "Sending email to "+user.username print "Time:" + str_time print settings.ORG_NAME + ' ' + str(rule).title() + ' Consumption Details' print "===============================" ses.send_email(Source=settings.SES_EMAIL, Destination=destination, Message=message, ReturnPath=settings.SES_EMAIL ) except ObjectDoesNotExist: # user has no usersettings. Skip user. pass except ClientError: # user has no email or is not verified. Skip for now. pass
mit
YzPaul3/h2o-3
h2o-py/tests/testdir_algos/glm/pyunit_link_functions_gamma_glm.py
8
2455
from __future__ import division from __future__ import print_function from past.utils import old_div import sys sys.path.insert(1,"../../../") import h2o from tests import pyunit_utils import pandas as pd import zipfile import statsmodels.api as sm from h2o.estimators.glm import H2OGeneralizedLinearEstimator def link_functions_gamma(): print("Read in prostate data.") h2o_data = h2o.import_file(path=pyunit_utils.locate("smalldata/prostate/prostate_complete.csv.zip")) h2o_data.head() sm_data = pd.read_csv(zipfile.ZipFile(pyunit_utils.locate("smalldata/prostate/prostate_complete.csv.zip")). open("prostate_complete.csv")).as_matrix() sm_data_response = sm_data[:,5] sm_data_features = sm_data[:,[1,2,3,4,6,7,8,9]] print("Testing for family: GAMMA") print("Set variables for h2o.") myY = "DPROS" myX = ["ID","AGE","RACE","GLEASON","DCAPS","PSA","VOL","CAPSULE"] print("Create models with canonical link: INVERSE") h2o_model_in = H2OGeneralizedLinearEstimator(family="gamma", link="inverse",alpha=0.5, Lambda=0) h2o_model_in.train(x=myX, y=myY, training_frame=h2o_data) sm_model_in = sm.GLM(endog=sm_data_response, exog=sm_data_features, family=sm.families.Gamma(sm.families.links.inverse_power)).fit() print("Compare model deviances for link function inverse") h2o_deviance_in = old_div(h2o_model_in.residual_deviance(), h2o_model_in.null_deviance()) sm_deviance_in = old_div(sm_model_in.deviance, sm_model_in.null_deviance) assert h2o_deviance_in - sm_deviance_in < 0.01, "expected h2o to have an equivalent or better deviance measures" print("Create models with canonical link: LOG") h2o_model_log = H2OGeneralizedLinearEstimator(family="gamma", link="log",alpha=0.5, Lambda=0) h2o_model_log.train(x=myX, y=myY, training_frame=h2o_data) sm_model_log = sm.GLM(endog=sm_data_response, exog=sm_data_features, family=sm.families.Gamma(sm.families.links.log)).fit() print("Compare model deviances for link function log") h2o_deviance_log = old_div(h2o_model_log.residual_deviance(), h2o_model_log.null_deviance()) sm_deviance_log = old_div(sm_model_log.deviance, sm_model_log.null_deviance) assert h2o_deviance_log - sm_deviance_log < 0.01, "expected h2o to have an equivalent or better deviance measures" if __name__ == "__main__": pyunit_utils.standalone_test(link_functions_gamma) else: link_functions_gamma()
apache-2.0
rubikloud/scikit-learn
sklearn/utils/tests/test_extmath.py
2
20990
# Authors: Olivier Grisel <[email protected]> # Mathieu Blondel <[email protected]> # Denis Engemann <[email protected]> # # License: BSD 3 clause import numpy as np from scipy import sparse from scipy import linalg from scipy import stats from sklearn.utils.testing import assert_equal from sklearn.utils.testing import assert_almost_equal from sklearn.utils.testing import assert_array_equal from sklearn.utils.testing import assert_array_almost_equal from sklearn.utils.testing import assert_true from sklearn.utils.testing import assert_false from sklearn.utils.testing import assert_greater from sklearn.utils.testing import assert_raises from sklearn.utils.testing import skip_if_32bit from sklearn.utils.extmath import density from sklearn.utils.extmath import logsumexp from sklearn.utils.extmath import norm, squared_norm from sklearn.utils.extmath import randomized_svd from sklearn.utils.extmath import row_norms from sklearn.utils.extmath import weighted_mode from sklearn.utils.extmath import cartesian from sklearn.utils.extmath import log_logistic from sklearn.utils.extmath import fast_dot, _fast_dot from sklearn.utils.extmath import svd_flip from sklearn.utils.extmath import _incremental_mean_and_var from sklearn.utils.extmath import _deterministic_vector_sign_flip from sklearn.utils.extmath import softmax from sklearn.datasets.samples_generator import make_low_rank_matrix def test_density(): rng = np.random.RandomState(0) X = rng.randint(10, size=(10, 5)) X[1, 2] = 0 X[5, 3] = 0 X_csr = sparse.csr_matrix(X) X_csc = sparse.csc_matrix(X) X_coo = sparse.coo_matrix(X) X_lil = sparse.lil_matrix(X) for X_ in (X_csr, X_csc, X_coo, X_lil): assert_equal(density(X_), density(X)) def test_uniform_weights(): # with uniform weights, results should be identical to stats.mode rng = np.random.RandomState(0) x = rng.randint(10, size=(10, 5)) weights = np.ones(x.shape) for axis in (None, 0, 1): mode, score = stats.mode(x, axis) mode2, score2 = weighted_mode(x, weights, axis) assert_true(np.all(mode == mode2)) assert_true(np.all(score == score2)) def test_random_weights(): # set this up so that each row should have a weighted mode of 6, # with a score that is easily reproduced mode_result = 6 rng = np.random.RandomState(0) x = rng.randint(mode_result, size=(100, 10)) w = rng.random_sample(x.shape) x[:, :5] = mode_result w[:, :5] += 1 mode, score = weighted_mode(x, w, axis=1) assert_array_equal(mode, mode_result) assert_array_almost_equal(score.ravel(), w[:, :5].sum(1)) def test_logsumexp(): # Try to add some smallish numbers in logspace x = np.array([1e-40] * 1000000) logx = np.log(x) assert_almost_equal(np.exp(logsumexp(logx)), x.sum()) X = np.vstack([x, x]) logX = np.vstack([logx, logx]) assert_array_almost_equal(np.exp(logsumexp(logX, axis=0)), X.sum(axis=0)) assert_array_almost_equal(np.exp(logsumexp(logX, axis=1)), X.sum(axis=1)) def test_randomized_svd_low_rank(): # Check that extmath.randomized_svd is consistent with linalg.svd n_samples = 100 n_features = 500 rank = 5 k = 10 # generate a matrix X of approximate effective rank `rank` and no noise # component (very structured signal): X = make_low_rank_matrix(n_samples=n_samples, n_features=n_features, effective_rank=rank, tail_strength=0.0, random_state=0) assert_equal(X.shape, (n_samples, n_features)) # compute the singular values of X using the slow exact method U, s, V = linalg.svd(X, full_matrices=False) # compute the singular values of X using the fast approximate method Ua, sa, Va = randomized_svd(X, k) assert_equal(Ua.shape, (n_samples, k)) assert_equal(sa.shape, (k,)) assert_equal(Va.shape, (k, n_features)) # ensure that the singular values of both methods are equal up to the real # rank of the matrix assert_almost_equal(s[:k], sa) # check the singular vectors too (while not checking the sign) assert_almost_equal(np.dot(U[:, :k], V[:k, :]), np.dot(Ua, Va)) # check the sparse matrix representation X = sparse.csr_matrix(X) # compute the singular values of X using the fast approximate method Ua, sa, Va = randomized_svd(X, k) assert_almost_equal(s[:rank], sa[:rank]) def test_norm_squared_norm(): X = np.random.RandomState(42).randn(50, 63) X *= 100 # check stability X += 200 assert_almost_equal(np.linalg.norm(X.ravel()), norm(X)) assert_almost_equal(norm(X) ** 2, squared_norm(X), decimal=6) assert_almost_equal(np.linalg.norm(X), np.sqrt(squared_norm(X)), decimal=6) def test_row_norms(): X = np.random.RandomState(42).randn(100, 100) sq_norm = (X ** 2).sum(axis=1) assert_array_almost_equal(sq_norm, row_norms(X, squared=True), 5) assert_array_almost_equal(np.sqrt(sq_norm), row_norms(X)) Xcsr = sparse.csr_matrix(X, dtype=np.float32) assert_array_almost_equal(sq_norm, row_norms(Xcsr, squared=True), 5) assert_array_almost_equal(np.sqrt(sq_norm), row_norms(Xcsr)) def test_randomized_svd_low_rank_with_noise(): # Check that extmath.randomized_svd can handle noisy matrices n_samples = 100 n_features = 500 rank = 5 k = 10 # generate a matrix X wity structure approximate rank `rank` and an # important noisy component X = make_low_rank_matrix(n_samples=n_samples, n_features=n_features, effective_rank=rank, tail_strength=0.5, random_state=0) assert_equal(X.shape, (n_samples, n_features)) # compute the singular values of X using the slow exact method _, s, _ = linalg.svd(X, full_matrices=False) # compute the singular values of X using the fast approximate method # without the iterated power method _, sa, _ = randomized_svd(X, k, n_iter=0) # the approximation does not tolerate the noise: assert_greater(np.abs(s[:k] - sa).max(), 0.05) # compute the singular values of X using the fast approximate method with # iterated power method _, sap, _ = randomized_svd(X, k, n_iter=5) # the iterated power method is helping getting rid of the noise: assert_almost_equal(s[:k], sap, decimal=3) def test_randomized_svd_infinite_rank(): # Check that extmath.randomized_svd can handle noisy matrices n_samples = 100 n_features = 500 rank = 5 k = 10 # let us try again without 'low_rank component': just regularly but slowly # decreasing singular values: the rank of the data matrix is infinite X = make_low_rank_matrix(n_samples=n_samples, n_features=n_features, effective_rank=rank, tail_strength=1.0, random_state=0) assert_equal(X.shape, (n_samples, n_features)) # compute the singular values of X using the slow exact method _, s, _ = linalg.svd(X, full_matrices=False) # compute the singular values of X using the fast approximate method # without the iterated power method _, sa, _ = randomized_svd(X, k, n_iter=0) # the approximation does not tolerate the noise: assert_greater(np.abs(s[:k] - sa).max(), 0.1) # compute the singular values of X using the fast approximate method with # iterated power method _, sap, _ = randomized_svd(X, k, n_iter=5) # the iterated power method is still managing to get most of the structure # at the requested rank assert_almost_equal(s[:k], sap, decimal=3) def test_randomized_svd_transpose_consistency(): # Check that transposing the design matrix has limit impact n_samples = 100 n_features = 500 rank = 4 k = 10 X = make_low_rank_matrix(n_samples=n_samples, n_features=n_features, effective_rank=rank, tail_strength=0.5, random_state=0) assert_equal(X.shape, (n_samples, n_features)) U1, s1, V1 = randomized_svd(X, k, n_iter=3, transpose=False, random_state=0) U2, s2, V2 = randomized_svd(X, k, n_iter=3, transpose=True, random_state=0) U3, s3, V3 = randomized_svd(X, k, n_iter=3, transpose='auto', random_state=0) U4, s4, V4 = linalg.svd(X, full_matrices=False) assert_almost_equal(s1, s4[:k], decimal=3) assert_almost_equal(s2, s4[:k], decimal=3) assert_almost_equal(s3, s4[:k], decimal=3) assert_almost_equal(np.dot(U1, V1), np.dot(U4[:, :k], V4[:k, :]), decimal=2) assert_almost_equal(np.dot(U2, V2), np.dot(U4[:, :k], V4[:k, :]), decimal=2) # in this case 'auto' is equivalent to transpose assert_almost_equal(s2, s3) def test_svd_flip(): # Check that svd_flip works in both situations, and reconstructs input. rs = np.random.RandomState(1999) n_samples = 20 n_features = 10 X = rs.randn(n_samples, n_features) # Check matrix reconstruction U, S, V = linalg.svd(X, full_matrices=False) U1, V1 = svd_flip(U, V, u_based_decision=False) assert_almost_equal(np.dot(U1 * S, V1), X, decimal=6) # Check transposed matrix reconstruction XT = X.T U, S, V = linalg.svd(XT, full_matrices=False) U2, V2 = svd_flip(U, V, u_based_decision=True) assert_almost_equal(np.dot(U2 * S, V2), XT, decimal=6) # Check that different flip methods are equivalent under reconstruction U_flip1, V_flip1 = svd_flip(U, V, u_based_decision=True) assert_almost_equal(np.dot(U_flip1 * S, V_flip1), XT, decimal=6) U_flip2, V_flip2 = svd_flip(U, V, u_based_decision=False) assert_almost_equal(np.dot(U_flip2 * S, V_flip2), XT, decimal=6) def test_randomized_svd_sign_flip(): a = np.array([[2.0, 0.0], [0.0, 1.0]]) u1, s1, v1 = randomized_svd(a, 2, flip_sign=True, random_state=41) for seed in range(10): u2, s2, v2 = randomized_svd(a, 2, flip_sign=True, random_state=seed) assert_almost_equal(u1, u2) assert_almost_equal(v1, v2) assert_almost_equal(np.dot(u2 * s2, v2), a) assert_almost_equal(np.dot(u2.T, u2), np.eye(2)) assert_almost_equal(np.dot(v2.T, v2), np.eye(2)) def test_randomized_svd_sign_flip_with_transpose(): # Check if the randomized_svd sign flipping is always done based on u # irrespective of transpose. # See https://github.com/scikit-learn/scikit-learn/issues/5608 # for more details. def max_loading_is_positive(u, v): """ returns bool tuple indicating if the values maximising np.abs are positive across all rows for u and across all columns for v. """ u_based = (np.abs(u).max(axis=0) == u.max(axis=0)).all() v_based = (np.abs(v).max(axis=1) == v.max(axis=1)).all() return u_based, v_based mat = np.arange(10 * 8).reshape(10, -1) # Without transpose u_flipped, _, v_flipped = randomized_svd(mat, 3, flip_sign=True) u_based, v_based = max_loading_is_positive(u_flipped, v_flipped) assert_true(u_based) assert_false(v_based) # With transpose u_flipped_with_transpose, _, v_flipped_with_transpose = randomized_svd( mat, 3, flip_sign=True, transpose=True) u_based, v_based = max_loading_is_positive( u_flipped_with_transpose, v_flipped_with_transpose) assert_true(u_based) assert_false(v_based) def test_cartesian(): # Check if cartesian product delivers the right results axes = (np.array([1, 2, 3]), np.array([4, 5]), np.array([6, 7])) true_out = np.array([[1, 4, 6], [1, 4, 7], [1, 5, 6], [1, 5, 7], [2, 4, 6], [2, 4, 7], [2, 5, 6], [2, 5, 7], [3, 4, 6], [3, 4, 7], [3, 5, 6], [3, 5, 7]]) out = cartesian(axes) assert_array_equal(true_out, out) # check single axis x = np.arange(3) assert_array_equal(x[:, np.newaxis], cartesian((x,))) def test_logistic_sigmoid(): # Check correctness and robustness of logistic sigmoid implementation naive_logistic = lambda x: 1 / (1 + np.exp(-x)) naive_log_logistic = lambda x: np.log(naive_logistic(x)) x = np.linspace(-2, 2, 50) assert_array_almost_equal(log_logistic(x), naive_log_logistic(x)) extreme_x = np.array([-100., 100.]) assert_array_almost_equal(log_logistic(extreme_x), [-100, 0]) def test_fast_dot(): # Check fast dot blas wrapper function if fast_dot is np.dot: return rng = np.random.RandomState(42) A = rng.random_sample([2, 10]) B = rng.random_sample([2, 10]) try: linalg.get_blas_funcs(['gemm'])[0] has_blas = True except (AttributeError, ValueError): has_blas = False if has_blas: # Test _fast_dot for invalid input. # Maltyped data. for dt1, dt2 in [['f8', 'f4'], ['i4', 'i4']]: assert_raises(ValueError, _fast_dot, A.astype(dt1), B.astype(dt2).T) # Malformed data. # ndim == 0 E = np.empty(0) assert_raises(ValueError, _fast_dot, E, E) # ndim == 1 assert_raises(ValueError, _fast_dot, A, A[0]) # ndim > 2 assert_raises(ValueError, _fast_dot, A.T, np.array([A, A])) # min(shape) == 1 assert_raises(ValueError, _fast_dot, A, A[0, :][None, :]) # test for matrix mismatch error assert_raises(ValueError, _fast_dot, A, A) # Test cov-like use case + dtypes. for dtype in ['f8', 'f4']: A = A.astype(dtype) B = B.astype(dtype) # col < row C = np.dot(A.T, A) C_ = fast_dot(A.T, A) assert_almost_equal(C, C_, decimal=5) C = np.dot(A.T, B) C_ = fast_dot(A.T, B) assert_almost_equal(C, C_, decimal=5) C = np.dot(A, B.T) C_ = fast_dot(A, B.T) assert_almost_equal(C, C_, decimal=5) # Test square matrix * rectangular use case. A = rng.random_sample([2, 2]) for dtype in ['f8', 'f4']: A = A.astype(dtype) B = B.astype(dtype) C = np.dot(A, B) C_ = fast_dot(A, B) assert_almost_equal(C, C_, decimal=5) C = np.dot(A.T, B) C_ = fast_dot(A.T, B) assert_almost_equal(C, C_, decimal=5) if has_blas: for x in [np.array([[d] * 10] * 2) for d in [np.inf, np.nan]]: assert_raises(ValueError, _fast_dot, x, x.T) def test_incremental_variance_update_formulas(): # Test Youngs and Cramer incremental variance formulas. # Doggie data from http://www.mathsisfun.com/data/standard-deviation.html A = np.array([[600, 470, 170, 430, 300], [600, 470, 170, 430, 300], [600, 470, 170, 430, 300], [600, 470, 170, 430, 300]]).T idx = 2 X1 = A[:idx, :] X2 = A[idx:, :] old_means = X1.mean(axis=0) old_variances = X1.var(axis=0) old_sample_count = X1.shape[0] final_means, final_variances, final_count = \ _incremental_mean_and_var(X2, old_means, old_variances, old_sample_count) assert_almost_equal(final_means, A.mean(axis=0), 6) assert_almost_equal(final_variances, A.var(axis=0), 6) assert_almost_equal(final_count, A.shape[0]) @skip_if_32bit def test_incremental_variance_numerical_stability(): # Test Youngs and Cramer incremental variance formulas. def np_var(A): return A.var(axis=0) # Naive one pass variance computation - not numerically stable # https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance def one_pass_var(X): n = X.shape[0] exp_x2 = (X ** 2).sum(axis=0) / n expx_2 = (X.sum(axis=0) / n) ** 2 return exp_x2 - expx_2 # Two-pass algorithm, stable. # We use it as a benchmark. It is not an online algorithm # https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Two-pass_algorithm def two_pass_var(X): mean = X.mean(axis=0) Y = X.copy() return np.mean((Y - mean)**2, axis=0) # Naive online implementation # https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Online_algorithm # This works only for chunks for size 1 def naive_mean_variance_update(x, last_mean, last_variance, last_sample_count): updated_sample_count = (last_sample_count + 1) samples_ratio = last_sample_count / float(updated_sample_count) updated_mean = x / updated_sample_count + last_mean * samples_ratio updated_variance = last_variance * samples_ratio + \ (x - last_mean) * (x - updated_mean) / updated_sample_count return updated_mean, updated_variance, updated_sample_count # We want to show a case when one_pass_var has error > 1e-3 while # _batch_mean_variance_update has less. tol = 200 n_features = 2 n_samples = 10000 x1 = np.array(1e8, dtype=np.float64) x2 = np.log(1e-5, dtype=np.float64) A0 = x1 * np.ones((n_samples // 2, n_features), dtype=np.float64) A1 = x2 * np.ones((n_samples // 2, n_features), dtype=np.float64) A = np.vstack((A0, A1)) # Older versions of numpy have different precision # In some old version, np.var is not stable if np.abs(np_var(A) - two_pass_var(A)).max() < 1e-6: stable_var = np_var else: stable_var = two_pass_var # Naive one pass var: >tol (=1063) assert_greater(np.abs(stable_var(A) - one_pass_var(A)).max(), tol) # Starting point for online algorithms: after A0 # Naive implementation: >tol (436) mean, var, n = A0[0, :], np.zeros(n_features), n_samples // 2 for i in range(A1.shape[0]): mean, var, n = \ naive_mean_variance_update(A1[i, :], mean, var, n) assert_equal(n, A.shape[0]) # the mean is also slightly unstable assert_greater(np.abs(A.mean(axis=0) - mean).max(), 1e-6) assert_greater(np.abs(stable_var(A) - var).max(), tol) # Robust implementation: <tol (177) mean, var, n = A0[0, :], np.zeros(n_features), n_samples // 2 for i in range(A1.shape[0]): mean, var, n = \ _incremental_mean_and_var(A1[i, :].reshape((1, A1.shape[1])), mean, var, n) assert_equal(n, A.shape[0]) assert_array_almost_equal(A.mean(axis=0), mean) assert_greater(tol, np.abs(stable_var(A) - var).max()) def test_incremental_variance_ddof(): # Test that degrees of freedom parameter for calculations are correct. rng = np.random.RandomState(1999) X = rng.randn(50, 10) n_samples, n_features = X.shape for batch_size in [11, 20, 37]: steps = np.arange(0, X.shape[0], batch_size) if steps[-1] != X.shape[0]: steps = np.hstack([steps, n_samples]) for i, j in zip(steps[:-1], steps[1:]): batch = X[i:j, :] if i == 0: incremental_means = batch.mean(axis=0) incremental_variances = batch.var(axis=0) # Assign this twice so that the test logic is consistent incremental_count = batch.shape[0] sample_count = batch.shape[0] else: result = _incremental_mean_and_var( batch, incremental_means, incremental_variances, sample_count) (incremental_means, incremental_variances, incremental_count) = result sample_count += batch.shape[0] calculated_means = np.mean(X[:j], axis=0) calculated_variances = np.var(X[:j], axis=0) assert_almost_equal(incremental_means, calculated_means, 6) assert_almost_equal(incremental_variances, calculated_variances, 6) assert_equal(incremental_count, sample_count) def test_vector_sign_flip(): # Testing that sign flip is working & largest value has positive sign data = np.random.RandomState(36).randn(5, 5) max_abs_rows = np.argmax(np.abs(data), axis=1) data_flipped = _deterministic_vector_sign_flip(data) max_rows = np.argmax(data_flipped, axis=1) assert_array_equal(max_abs_rows, max_rows) signs = np.sign(data[range(data.shape[0]), max_abs_rows]) assert_array_equal(data, data_flipped * signs[:, np.newaxis]) def test_softmax(): rng = np.random.RandomState(0) X = rng.randn(3, 5) exp_X = np.exp(X) sum_exp_X = np.sum(exp_X, axis=1).reshape((-1, 1)) assert_array_almost_equal(softmax(X), exp_X / sum_exp_X)
bsd-3-clause
great-expectations/great_expectations
tests/integration/fixtures/yellow_trip_data_pandas_fixture/two_batch_requests_two_validators.py
1
1358
from great_expectations.core.batch import BatchRequest from great_expectations.data_context.data_context import DataContext from great_expectations.validator.validation_graph import MetricConfiguration context = DataContext() suite = context.get_expectation_suite("yellow_trip_data_validations") # Get February BatchRequest and Validator batch_request_february = BatchRequest( datasource_name="taxi_pandas", data_connector_name="monthly", data_asset_name="my_reports", data_connector_query={"index": -2}, ) validator_february = context.get_validator( batch_request=batch_request_february, expectation_suite=suite ) # Get the table row count for February february_table_row_count = validator_february.get_metric( MetricConfiguration("table.row_count", metric_domain_kwargs={}) ) # Get March BatchRequest and Validator batch_request_march = BatchRequest( datasource_name="taxi_pandas", data_connector_name="monthly", data_asset_name="my_reports", data_connector_query={"index": -1}, ) validator_march = context.get_validator( batch_request=batch_request_march, expectation_suite=suite ) # Create a row count expectation based on the February row count, and validate it against the March row count result = validator_march.expect_table_row_count_to_equal(value=february_table_row_count) assert result["success"]
apache-2.0
cheeseywhiz/cheeseywhiz
AvgImg/imglib.py
1
6217
import os as _os import sys as _sys from matplotlib import pyplot as _plt def load_global(inst): global g g = inst class ImageIter: def __init__(self, fname=None, px_map=None): if fname is not None: self.px_map = _plt.imread(fname) elif px_map is not None: self.px_map = px_map self.width, self.height, self.color_len = self.px_map.shape def __getitem__(self, row): return self.px_map[row] def __iter__(self): return ( (row, col, self.__getitem__(row)[col]) for row in range(self.width) for col in range(self.height) ) def colormaps(): try: _plt.get_cmap('') except ValueError as error: cmaps_str = sorted(str(error).split(':')[1][1:].split(', ')) row_length = 0 row = [] rows = [] for cmap in cmaps_str: row_length += len(cmap + ', ') if row_length < 80: row.append(cmap) else: rows.append( ', '.join(row) + ',' ) row.clear() row.append(cmap) row_length = len(cmap + ', ') return '\n'.join(rows)[:-1] def parse_argv(argv, good_flags): """\ argv is the list of arguments passed into the command line. good_flags is a list of expected flags to parse and is used to build the initial structure. Return a dictionary with the following structure: { '-flag1': value, '-flag2': value, '+flag3': value, ..., 'positional' : ('python3', ...) # e.g. } Each flag's value starts off as False. If the flag was not specified, the value in the dictionary stays as False. If the flag was specified with an option, (e.g. -f=123) the option (e.g. 123) is stored under the flag in the dictionary. If the flag was specified without an option (e.g. -h), None is stored in the dictionary. 'positional' holds user-specified arguments that did not start with + or - while retaining the order they were specified in the command.\ """ options = { key: False for key in good_flags } positional = [] for arg in argv: if arg[0] in ['-', '+']: if '=' in arg: arg, value = arg.split('=') options[arg] = value else: options[arg] = None if arg not in good_flags: _sys.exit(f'Unknown flag {arg}') else: positional.append(arg) options['positional'] = tuple(positional) return options def trim(argv): msgs = [] verbose = False good_flags = [ '-h', '-hc', '-avg', '-c', '-i', '-o', '+o', '-s', '-v', ] flags = parse_argv(argv, good_flags) if flags['-h'] is None or len(argv) == 1: print(g.__DOC__) _sys.exit(0) if flags['-hc'] is None: print(colormaps()) _sys.exit(0) if flags['-avg']: avg_func_name = flags['-avg'] msgs.append(f'With {avg_func_name} as average') elif flags['-avg'] is None: _sys.exit('Missing required -avg option') elif not flags['-avg']: avg_func_name = 'mean' if flags['-c']: cmap_name = flags['-c'] g.CMAP = cmap_name g.GRAYSCALE = True msgs.append(f'With color map {cmap_name}') elif flags['-c'] is None: _sys.exit('Missing required -c option') elif not flags['-c']: # Defaults set in global pass if flags['-i'] is None: g.INVERT = not g.INVERT msgs.append(f'{"With" if g.INVERT else "Without"} inverted colors') elif not flags['-i']: # Default set in global pass if flags['-o']: fname = _os.path.abspath(_os.path.expanduser(flags['-o'])) msgs.append(f'Output image at {fname}') elif flags['-o'] is None: _sys.exit('Missing required output filename') elif not flags['-o']: fname = _os.path.abspath('output.png') if flags['+o'] is None: fname = '/tmp/output.png' elif not flags['+o']: pass if flags['-s']: exit_func_name = flags['-s'] msgs.append(f'Opening with {exit_func_name}') elif flags['-s'] is None: _sys.exit('Missing required -s option') elif not flags['-s']: exit_func_name = 'done' if flags['-v'] is None: verbose = True msgs.insert(0, 'Parsed options:') avg_func = getattr(_Avg, avg_func_name) exit_func = getattr(_Exit, exit_func_name) try: given_dir = flags['positional'][1] except IndexError: _sys.exit('Missing required source directory') imgs_dir = _os.path.abspath(_os.path.expanduser(given_dir)) msgs.append(f'Averaging {imgs_dir}/*') if verbose: print(msgs[0]) for msg in msgs[1:]: print(4 * ' ' + msg) return imgs_dir, avg_func, fname, exit_func def _average_f(f): def if_invert(num): if g.INVERT: return int(256 - num) else: return int(num) def if_grayscale(nums): if g.GRAYSCALE: return sum(nums) / len(nums) else: return nums def decorated_f(tuples, *args, **kwargs): def avg(tuples, i): return f([tup[i] for tup in tuples], *args, **kwargs) return if_grayscale([ if_invert(avg(tuples, i)) for i in range(len(tuples[0])) ]) return decorated_f class _Avg: @staticmethod @_average_f def mean(nums): return sum(nums) / len(nums) @staticmethod @_average_f def median(nums): return sorted(nums)[len(nums) // 2] @staticmethod @_average_f def max(nums): return max(nums) @staticmethod @_average_f def min(nums): return min(nums) class _Exit: @staticmethod def feh(fname=None, **kwargs): _Exit.done() from subprocess import run run(f'feh {fname}', shell=True) @staticmethod def mpl(array=None, **kwargs): _Exit.done() _plt.imshow(array, cmap=g.CMAP) _plt.show() @staticmethod def done(**kwargs): pass # print('Done')
mit
rishikksh20/scikit-learn
sklearn/linear_model/setup.py
83
1719
import os from os.path import join import numpy from sklearn._build_utils import get_blas_info def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('linear_model', parent_package, top_path) cblas_libs, blas_info = get_blas_info() if os.name == 'posix': cblas_libs.append('m') config.add_extension('cd_fast', sources=['cd_fast.pyx'], libraries=cblas_libs, include_dirs=[join('..', 'src', 'cblas'), numpy.get_include(), blas_info.pop('include_dirs', [])], extra_compile_args=blas_info.pop('extra_compile_args', []), **blas_info) config.add_extension('sgd_fast', sources=['sgd_fast.pyx'], include_dirs=[join('..', 'src', 'cblas'), numpy.get_include(), blas_info.pop('include_dirs', [])], libraries=cblas_libs, extra_compile_args=blas_info.pop('extra_compile_args', []), **blas_info) config.add_extension('sag_fast', sources=['sag_fast.pyx'], include_dirs=numpy.get_include()) # add other directories config.add_subpackage('tests') return config if __name__ == '__main__': from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
bsd-3-clause
rmcgibbo/scipy
scipy/signal/fir_filter_design.py
25
20184
"""Functions for FIR filter design.""" from __future__ import division, print_function, absolute_import from math import ceil, log import numpy as np from numpy.fft import irfft from scipy.special import sinc from . import sigtools __all__ = ['kaiser_beta', 'kaiser_atten', 'kaiserord', 'firwin', 'firwin2', 'remez'] # Some notes on function parameters: # # `cutoff` and `width` are given as a numbers between 0 and 1. These # are relative frequencies, expressed as a fraction of the Nyquist rate. # For example, if the Nyquist rate is 2KHz, then width=0.15 is a width # of 300 Hz. # # The `order` of a FIR filter is one less than the number of taps. # This is a potential source of confusion, so in the following code, # we will always use the number of taps as the parameterization of # the 'size' of the filter. The "number of taps" means the number # of coefficients, which is the same as the length of the impulse # response of the filter. def kaiser_beta(a): """Compute the Kaiser parameter `beta`, given the attenuation `a`. Parameters ---------- a : float The desired attenuation in the stopband and maximum ripple in the passband, in dB. This should be a *positive* number. Returns ------- beta : float The `beta` parameter to be used in the formula for a Kaiser window. References ---------- Oppenheim, Schafer, "Discrete-Time Signal Processing", p.475-476. """ if a > 50: beta = 0.1102 * (a - 8.7) elif a > 21: beta = 0.5842 * (a - 21) ** 0.4 + 0.07886 * (a - 21) else: beta = 0.0 return beta def kaiser_atten(numtaps, width): """Compute the attenuation of a Kaiser FIR filter. Given the number of taps `N` and the transition width `width`, compute the attenuation `a` in dB, given by Kaiser's formula: a = 2.285 * (N - 1) * pi * width + 7.95 Parameters ---------- numtaps : int The number of taps in the FIR filter. width : float The desired width of the transition region between passband and stopband (or, in general, at any discontinuity) for the filter. Returns ------- a : float The attenuation of the ripple, in dB. See Also -------- kaiserord, kaiser_beta """ a = 2.285 * (numtaps - 1) * np.pi * width + 7.95 return a def kaiserord(ripple, width): """ Design a Kaiser window to limit ripple and width of transition region. Parameters ---------- ripple : float Positive number specifying maximum ripple in passband (dB) and minimum ripple in stopband. width : float Width of transition region (normalized so that 1 corresponds to pi radians / sample). Returns ------- numtaps : int The length of the kaiser window. beta : float The beta parameter for the kaiser window. See Also -------- kaiser_beta, kaiser_atten Notes ----- There are several ways to obtain the Kaiser window: - ``signal.kaiser(numtaps, beta, sym=0)`` - ``signal.get_window(beta, numtaps)`` - ``signal.get_window(('kaiser', beta), numtaps)`` The empirical equations discovered by Kaiser are used. References ---------- Oppenheim, Schafer, "Discrete-Time Signal Processing", p.475-476. """ A = abs(ripple) # in case somebody is confused as to what's meant if A < 8: # Formula for N is not valid in this range. raise ValueError("Requested maximum ripple attentuation %f is too " "small for the Kaiser formula." % A) beta = kaiser_beta(A) # Kaiser's formula (as given in Oppenheim and Schafer) is for the filter # order, so we have to add 1 to get the number of taps. numtaps = (A - 7.95) / 2.285 / (np.pi * width) + 1 return int(ceil(numtaps)), beta def firwin(numtaps, cutoff, width=None, window='hamming', pass_zero=True, scale=True, nyq=1.0): """ FIR filter design using the window method. This function computes the coefficients of a finite impulse response filter. The filter will have linear phase; it will be Type I if `numtaps` is odd and Type II if `numtaps` is even. Type II filters always have zero response at the Nyquist rate, so a ValueError exception is raised if firwin is called with `numtaps` even and having a passband whose right end is at the Nyquist rate. Parameters ---------- numtaps : int Length of the filter (number of coefficients, i.e. the filter order + 1). `numtaps` must be even if a passband includes the Nyquist frequency. cutoff : float or 1D array_like Cutoff frequency of filter (expressed in the same units as `nyq`) OR an array of cutoff frequencies (that is, band edges). In the latter case, the frequencies in `cutoff` should be positive and monotonically increasing between 0 and `nyq`. The values 0 and `nyq` must not be included in `cutoff`. width : float or None, optional If `width` is not None, then assume it is the approximate width of the transition region (expressed in the same units as `nyq`) for use in Kaiser FIR filter design. In this case, the `window` argument is ignored. window : string or tuple of string and parameter values, optional Desired window to use. See `scipy.signal.get_window` for a list of windows and required parameters. pass_zero : bool, optional If True, the gain at the frequency 0 (i.e. the "DC gain") is 1. Otherwise the DC gain is 0. scale : bool, optional Set to True to scale the coefficients so that the frequency response is exactly unity at a certain frequency. That frequency is either: - 0 (DC) if the first passband starts at 0 (i.e. pass_zero is True) - `nyq` (the Nyquist rate) if the first passband ends at `nyq` (i.e the filter is a single band highpass filter); center of first passband otherwise nyq : float, optional Nyquist frequency. Each frequency in `cutoff` must be between 0 and `nyq`. Returns ------- h : (numtaps,) ndarray Coefficients of length `numtaps` FIR filter. Raises ------ ValueError If any value in `cutoff` is less than or equal to 0 or greater than or equal to `nyq`, if the values in `cutoff` are not strictly monotonically increasing, or if `numtaps` is even but a passband includes the Nyquist frequency. See also -------- scipy.signal.firwin2 Examples -------- Low-pass from 0 to f:: >>> from scipy import signal >>> signal.firwin(numtaps, f) Use a specific window function:: >>> signal.firwin(numtaps, f, window='nuttall') High-pass ('stop' from 0 to f):: >>> signal.firwin(numtaps, f, pass_zero=False) Band-pass:: >>> signal.firwin(numtaps, [f1, f2], pass_zero=False) Band-stop:: >>> signal.firwin(numtaps, [f1, f2]) Multi-band (passbands are [0, f1], [f2, f3] and [f4, 1]):: >>> signal.firwin(numtaps, [f1, f2, f3, f4]) Multi-band (passbands are [f1, f2] and [f3,f4]):: >>> signal.firwin(numtaps, [f1, f2, f3, f4], pass_zero=False) """ # The major enhancements to this function added in November 2010 were # developed by Tom Krauss (see ticket #902). cutoff = np.atleast_1d(cutoff) / float(nyq) # Check for invalid input. if cutoff.ndim > 1: raise ValueError("The cutoff argument must be at most " "one-dimensional.") if cutoff.size == 0: raise ValueError("At least one cutoff frequency must be given.") if cutoff.min() <= 0 or cutoff.max() >= 1: raise ValueError("Invalid cutoff frequency: frequencies must be " "greater than 0 and less than nyq.") if np.any(np.diff(cutoff) <= 0): raise ValueError("Invalid cutoff frequencies: the frequencies " "must be strictly increasing.") if width is not None: # A width was given. Find the beta parameter of the Kaiser window # and set `window`. This overrides the value of `window` passed in. atten = kaiser_atten(numtaps, float(width) / nyq) beta = kaiser_beta(atten) window = ('kaiser', beta) pass_nyquist = bool(cutoff.size & 1) ^ pass_zero if pass_nyquist and numtaps % 2 == 0: raise ValueError("A filter with an even number of coefficients must " "have zero response at the Nyquist rate.") # Insert 0 and/or 1 at the ends of cutoff so that the length of cutoff # is even, and each pair in cutoff corresponds to passband. cutoff = np.hstack(([0.0] * pass_zero, cutoff, [1.0] * pass_nyquist)) # `bands` is a 2D array; each row gives the left and right edges of # a passband. bands = cutoff.reshape(-1, 2) # Build up the coefficients. alpha = 0.5 * (numtaps - 1) m = np.arange(0, numtaps) - alpha h = 0 for left, right in bands: h += right * sinc(right * m) h -= left * sinc(left * m) # Get and apply the window function. from .signaltools import get_window win = get_window(window, numtaps, fftbins=False) h *= win # Now handle scaling if desired. if scale: # Get the first passband. left, right = bands[0] if left == 0: scale_frequency = 0.0 elif right == 1: scale_frequency = 1.0 else: scale_frequency = 0.5 * (left + right) c = np.cos(np.pi * m * scale_frequency) s = np.sum(h * c) h /= s return h # Original version of firwin2 from scipy ticket #457, submitted by "tash". # # Rewritten by Warren Weckesser, 2010. def firwin2(numtaps, freq, gain, nfreqs=None, window='hamming', nyq=1.0, antisymmetric=False): """ FIR filter design using the window method. From the given frequencies `freq` and corresponding gains `gain`, this function constructs an FIR filter with linear phase and (approximately) the given frequency response. Parameters ---------- numtaps : int The number of taps in the FIR filter. `numtaps` must be less than `nfreqs`. freq : array_like, 1D The frequency sampling points. Typically 0.0 to 1.0 with 1.0 being Nyquist. The Nyquist frequency can be redefined with the argument `nyq`. The values in `freq` must be nondecreasing. A value can be repeated once to implement a discontinuity. The first value in `freq` must be 0, and the last value must be `nyq`. gain : array_like The filter gains at the frequency sampling points. Certain constraints to gain values, depending on the filter type, are applied, see Notes for details. nfreqs : int, optional The size of the interpolation mesh used to construct the filter. For most efficient behavior, this should be a power of 2 plus 1 (e.g, 129, 257, etc). The default is one more than the smallest power of 2 that is not less than `numtaps`. `nfreqs` must be greater than `numtaps`. window : string or (string, float) or float, or None, optional Window function to use. Default is "hamming". See `scipy.signal.get_window` for the complete list of possible values. If None, no window function is applied. nyq : float, optional Nyquist frequency. Each frequency in `freq` must be between 0 and `nyq` (inclusive). antisymmetric : bool, optional Whether resulting impulse response is symmetric/antisymmetric. See Notes for more details. Returns ------- taps : ndarray The filter coefficients of the FIR filter, as a 1-D array of length `numtaps`. See also -------- scipy.signal.firwin Notes ----- From the given set of frequencies and gains, the desired response is constructed in the frequency domain. The inverse FFT is applied to the desired response to create the associated convolution kernel, and the first `numtaps` coefficients of this kernel, scaled by `window`, are returned. The FIR filter will have linear phase. The type of filter is determined by the value of 'numtaps` and `antisymmetric` flag. There are four possible combinations: - odd `numtaps`, `antisymmetric` is False, type I filter is produced - even `numtaps`, `antisymmetric` is False, type II filter is produced - odd `numtaps`, `antisymmetric` is True, type III filter is produced - even `numtaps`, `antisymmetric` is True, type IV filter is produced Magnitude response of all but type I filters are subjects to following constraints: - type II -- zero at the Nyquist frequency - type III -- zero at zero and Nyquist frequencies - type IV -- zero at zero frequency .. versionadded:: 0.9.0 References ---------- .. [1] Oppenheim, A. V. and Schafer, R. W., "Discrete-Time Signal Processing", Prentice-Hall, Englewood Cliffs, New Jersey (1989). (See, for example, Section 7.4.) .. [2] Smith, Steven W., "The Scientist and Engineer's Guide to Digital Signal Processing", Ch. 17. http://www.dspguide.com/ch17/1.htm Examples -------- A lowpass FIR filter with a response that is 1 on [0.0, 0.5], and that decreases linearly on [0.5, 1.0] from 1 to 0: >>> from scipy import signal >>> taps = signal.firwin2(150, [0.0, 0.5, 1.0], [1.0, 1.0, 0.0]) >>> print(taps[72:78]) [-0.02286961 -0.06362756 0.57310236 0.57310236 -0.06362756 -0.02286961] """ if len(freq) != len(gain): raise ValueError('freq and gain must be of same length.') if nfreqs is not None and numtaps >= nfreqs: raise ValueError(('ntaps must be less than nfreqs, but firwin2 was ' 'called with ntaps=%d and nfreqs=%s') % (numtaps, nfreqs)) if freq[0] != 0 or freq[-1] != nyq: raise ValueError('freq must start with 0 and end with `nyq`.') d = np.diff(freq) if (d < 0).any(): raise ValueError('The values in freq must be nondecreasing.') d2 = d[:-1] + d[1:] if (d2 == 0).any(): raise ValueError('A value in freq must not occur more than twice.') if antisymmetric: if numtaps % 2 == 0: ftype = 4 else: ftype = 3 else: if numtaps % 2 == 0: ftype = 2 else: ftype = 1 if ftype == 2 and gain[-1] != 0.0: raise ValueError("A Type II filter must have zero gain at the " "Nyquist rate.") elif ftype == 3 and (gain[0] != 0.0 or gain[-1] != 0.0): raise ValueError("A Type III filter must have zero gain at zero " "and Nyquist rates.") elif ftype == 4 and gain[0] != 0.0: raise ValueError("A Type IV filter must have zero gain at zero rate.") if nfreqs is None: nfreqs = 1 + 2 ** int(ceil(log(numtaps, 2))) # Tweak any repeated values in freq so that interp works. eps = np.finfo(float).eps for k in range(len(freq)): if k < len(freq) - 1 and freq[k] == freq[k + 1]: freq[k] = freq[k] - eps freq[k + 1] = freq[k + 1] + eps # Linearly interpolate the desired response on a uniform mesh `x`. x = np.linspace(0.0, nyq, nfreqs) fx = np.interp(x, freq, gain) # Adjust the phases of the coefficients so that the first `ntaps` of the # inverse FFT are the desired filter coefficients. shift = np.exp(-(numtaps - 1) / 2. * 1.j * np.pi * x / nyq) if ftype > 2: shift *= 1j fx2 = fx * shift # Use irfft to compute the inverse FFT. out_full = irfft(fx2) if window is not None: # Create the window to apply to the filter coefficients. from .signaltools import get_window wind = get_window(window, numtaps, fftbins=False) else: wind = 1 # Keep only the first `numtaps` coefficients in `out`, and multiply by # the window. out = out_full[:numtaps] * wind if ftype == 3: out[out.size // 2] = 0.0 return out def remez(numtaps, bands, desired, weight=None, Hz=1, type='bandpass', maxiter=25, grid_density=16): """ Calculate the minimax optimal filter using the Remez exchange algorithm. Calculate the filter-coefficients for the finite impulse response (FIR) filter whose transfer function minimizes the maximum error between the desired gain and the realized gain in the specified frequency bands using the Remez exchange algorithm. Parameters ---------- numtaps : int The desired number of taps in the filter. The number of taps is the number of terms in the filter, or the filter order plus one. bands : array_like A monotonic sequence containing the band edges in Hz. All elements must be non-negative and less than half the sampling frequency as given by `Hz`. desired : array_like A sequence half the size of bands containing the desired gain in each of the specified bands. weight : array_like, optional A relative weighting to give to each band region. The length of `weight` has to be half the length of `bands`. Hz : scalar, optional The sampling frequency in Hz. Default is 1. type : {'bandpass', 'differentiator', 'hilbert'}, optional The type of filter: 'bandpass' : flat response in bands. This is the default. 'differentiator' : frequency proportional response in bands. 'hilbert' : filter with odd symmetry, that is, type III (for even order) or type IV (for odd order) linear phase filters. maxiter : int, optional Maximum number of iterations of the algorithm. Default is 25. grid_density : int, optional Grid density. The dense grid used in `remez` is of size ``(numtaps + 1) * grid_density``. Default is 16. Returns ------- out : ndarray A rank-1 array containing the coefficients of the optimal (in a minimax sense) filter. See Also -------- freqz : Compute the frequency response of a digital filter. References ---------- .. [1] J. H. McClellan and T. W. Parks, "A unified approach to the design of optimum FIR linear phase digital filters", IEEE Trans. Circuit Theory, vol. CT-20, pp. 697-701, 1973. .. [2] J. H. McClellan, T. W. Parks and L. R. Rabiner, "A Computer Program for Designing Optimum FIR Linear Phase Digital Filters", IEEE Trans. Audio Electroacoust., vol. AU-21, pp. 506-525, 1973. Examples -------- We want to construct a filter with a passband at 0.2-0.4 Hz, and stop bands at 0-0.1 Hz and 0.45-0.5 Hz. Note that this means that the behavior in the frequency ranges between those bands is unspecified and may overshoot. >>> from scipy import signal >>> bpass = signal.remez(72, [0, 0.1, 0.2, 0.4, 0.45, 0.5], [0, 1, 0]) >>> freq, response = signal.freqz(bpass) >>> ampl = np.abs(response) >>> import matplotlib.pyplot as plt >>> fig = plt.figure() >>> ax1 = fig.add_subplot(111) >>> ax1.semilogy(freq/(2*np.pi), ampl, 'b-') # freq in Hz >>> plt.show() """ # Convert type try: tnum = {'bandpass': 1, 'differentiator': 2, 'hilbert': 3}[type] except KeyError: raise ValueError("Type must be 'bandpass', 'differentiator', " "or 'hilbert'") # Convert weight if weight is None: weight = [1] * len(desired) bands = np.asarray(bands).copy() return sigtools._remez(numtaps, bands, desired, weight, tnum, Hz, maxiter, grid_density)
bsd-3-clause
PatrickOReilly/scikit-learn
sklearn/linear_model/tests/test_passive_aggressive.py
169
8809
import numpy as np import scipy.sparse as sp from sklearn.utils.testing import assert_less from sklearn.utils.testing import assert_greater from sklearn.utils.testing import assert_array_almost_equal, assert_array_equal from sklearn.utils.testing import assert_almost_equal from sklearn.utils.testing import assert_raises from sklearn.base import ClassifierMixin from sklearn.utils import check_random_state from sklearn.datasets import load_iris from sklearn.linear_model import PassiveAggressiveClassifier from sklearn.linear_model import PassiveAggressiveRegressor iris = load_iris() random_state = check_random_state(12) indices = np.arange(iris.data.shape[0]) random_state.shuffle(indices) X = iris.data[indices] y = iris.target[indices] X_csr = sp.csr_matrix(X) class MyPassiveAggressive(ClassifierMixin): def __init__(self, C=1.0, epsilon=0.01, loss="hinge", fit_intercept=True, n_iter=1, random_state=None): self.C = C self.epsilon = epsilon self.loss = loss self.fit_intercept = fit_intercept self.n_iter = n_iter def fit(self, X, y): n_samples, n_features = X.shape self.w = np.zeros(n_features, dtype=np.float64) self.b = 0.0 for t in range(self.n_iter): for i in range(n_samples): p = self.project(X[i]) if self.loss in ("hinge", "squared_hinge"): loss = max(1 - y[i] * p, 0) else: loss = max(np.abs(p - y[i]) - self.epsilon, 0) sqnorm = np.dot(X[i], X[i]) if self.loss in ("hinge", "epsilon_insensitive"): step = min(self.C, loss / sqnorm) elif self.loss in ("squared_hinge", "squared_epsilon_insensitive"): step = loss / (sqnorm + 1.0 / (2 * self.C)) if self.loss in ("hinge", "squared_hinge"): step *= y[i] else: step *= np.sign(y[i] - p) self.w += step * X[i] if self.fit_intercept: self.b += step def project(self, X): return np.dot(X, self.w) + self.b def test_classifier_accuracy(): for data in (X, X_csr): for fit_intercept in (True, False): clf = PassiveAggressiveClassifier(C=1.0, n_iter=30, fit_intercept=fit_intercept, random_state=0) clf.fit(data, y) score = clf.score(data, y) assert_greater(score, 0.79) def test_classifier_partial_fit(): classes = np.unique(y) for data in (X, X_csr): clf = PassiveAggressiveClassifier(C=1.0, fit_intercept=True, random_state=0) for t in range(30): clf.partial_fit(data, y, classes) score = clf.score(data, y) assert_greater(score, 0.79) def test_classifier_refit(): # Classifier can be retrained on different labels and features. clf = PassiveAggressiveClassifier().fit(X, y) assert_array_equal(clf.classes_, np.unique(y)) clf.fit(X[:, :-1], iris.target_names[y]) assert_array_equal(clf.classes_, iris.target_names) def test_classifier_correctness(): y_bin = y.copy() y_bin[y != 1] = -1 for loss in ("hinge", "squared_hinge"): clf1 = MyPassiveAggressive(C=1.0, loss=loss, fit_intercept=True, n_iter=2) clf1.fit(X, y_bin) for data in (X, X_csr): clf2 = PassiveAggressiveClassifier(C=1.0, loss=loss, fit_intercept=True, n_iter=2, shuffle=False) clf2.fit(data, y_bin) assert_array_almost_equal(clf1.w, clf2.coef_.ravel(), decimal=2) def test_classifier_undefined_methods(): clf = PassiveAggressiveClassifier() for meth in ("predict_proba", "predict_log_proba", "transform"): assert_raises(AttributeError, lambda x: getattr(clf, x), meth) def test_class_weights(): # Test class weights. X2 = np.array([[-1.0, -1.0], [-1.0, 0], [-.8, -1.0], [1.0, 1.0], [1.0, 0.0]]) y2 = [1, 1, 1, -1, -1] clf = PassiveAggressiveClassifier(C=0.1, n_iter=100, class_weight=None, random_state=100) clf.fit(X2, y2) assert_array_equal(clf.predict([[0.2, -1.0]]), np.array([1])) # we give a small weights to class 1 clf = PassiveAggressiveClassifier(C=0.1, n_iter=100, class_weight={1: 0.001}, random_state=100) clf.fit(X2, y2) # now the hyperplane should rotate clock-wise and # the prediction on this point should shift assert_array_equal(clf.predict([[0.2, -1.0]]), np.array([-1])) def test_partial_fit_weight_class_balanced(): # partial_fit with class_weight='balanced' not supported clf = PassiveAggressiveClassifier(class_weight="balanced") assert_raises(ValueError, clf.partial_fit, X, y, classes=np.unique(y)) def test_equal_class_weight(): X2 = [[1, 0], [1, 0], [0, 1], [0, 1]] y2 = [0, 0, 1, 1] clf = PassiveAggressiveClassifier(C=0.1, n_iter=1000, class_weight=None) clf.fit(X2, y2) # Already balanced, so "balanced" weights should have no effect clf_balanced = PassiveAggressiveClassifier(C=0.1, n_iter=1000, class_weight="balanced") clf_balanced.fit(X2, y2) clf_weighted = PassiveAggressiveClassifier(C=0.1, n_iter=1000, class_weight={0: 0.5, 1: 0.5}) clf_weighted.fit(X2, y2) # should be similar up to some epsilon due to learning rate schedule assert_almost_equal(clf.coef_, clf_weighted.coef_, decimal=2) assert_almost_equal(clf.coef_, clf_balanced.coef_, decimal=2) def test_wrong_class_weight_label(): # ValueError due to wrong class_weight label. X2 = np.array([[-1.0, -1.0], [-1.0, 0], [-.8, -1.0], [1.0, 1.0], [1.0, 0.0]]) y2 = [1, 1, 1, -1, -1] clf = PassiveAggressiveClassifier(class_weight={0: 0.5}) assert_raises(ValueError, clf.fit, X2, y2) def test_wrong_class_weight_format(): # ValueError due to wrong class_weight argument type. X2 = np.array([[-1.0, -1.0], [-1.0, 0], [-.8, -1.0], [1.0, 1.0], [1.0, 0.0]]) y2 = [1, 1, 1, -1, -1] clf = PassiveAggressiveClassifier(class_weight=[0.5]) assert_raises(ValueError, clf.fit, X2, y2) clf = PassiveAggressiveClassifier(class_weight="the larch") assert_raises(ValueError, clf.fit, X2, y2) def test_regressor_mse(): y_bin = y.copy() y_bin[y != 1] = -1 for data in (X, X_csr): for fit_intercept in (True, False): reg = PassiveAggressiveRegressor(C=1.0, n_iter=50, fit_intercept=fit_intercept, random_state=0) reg.fit(data, y_bin) pred = reg.predict(data) assert_less(np.mean((pred - y_bin) ** 2), 1.7) def test_regressor_partial_fit(): y_bin = y.copy() y_bin[y != 1] = -1 for data in (X, X_csr): reg = PassiveAggressiveRegressor(C=1.0, fit_intercept=True, random_state=0) for t in range(50): reg.partial_fit(data, y_bin) pred = reg.predict(data) assert_less(np.mean((pred - y_bin) ** 2), 1.7) def test_regressor_correctness(): y_bin = y.copy() y_bin[y != 1] = -1 for loss in ("epsilon_insensitive", "squared_epsilon_insensitive"): reg1 = MyPassiveAggressive(C=1.0, loss=loss, fit_intercept=True, n_iter=2) reg1.fit(X, y_bin) for data in (X, X_csr): reg2 = PassiveAggressiveRegressor(C=1.0, loss=loss, fit_intercept=True, n_iter=2, shuffle=False) reg2.fit(data, y_bin) assert_array_almost_equal(reg1.w, reg2.coef_.ravel(), decimal=2) def test_regressor_undefined_methods(): reg = PassiveAggressiveRegressor() for meth in ("transform",): assert_raises(AttributeError, lambda x: getattr(reg, x), meth)
bsd-3-clause
GiggleLiu/tba
lattice/tests/test_bond.py
1
3059
import time,pdb,sys sys.path.insert(0,'../') from numpy import * from matplotlib.pyplot import * from numpy.testing import dec,assert_,assert_raises,assert_almost_equal,assert_allclose from bond import * vdim=2 ############# utilities ################## def _setup_globals(dim): global vdim vdim=dim def _assert_same_collection(bc1,bc2): assert_(bc1==bc2) def _random_bond(nsite=10): atom1=random.randint(0,nsite) atom2=random.randint(0,nsite) bondv=random.random(vdim) return Bond(atom1,atom2,bondv) def _random_bc(nsite=10,nbond=100): atom1s=random.randint(0,nsite,nbond) atom2s=random.randint(0,nsite,nbond) bondvs=random.random([nbond,vdim]) #check for validity #atoms=concatenate([atom1s[:,newaxis],atom2s[:,newaxis]],axis=1).view([('',atom1s.dtype)]*2) #ar,indices=unique(atoms,return_index=True) #atom1s,atom2s,bondvs=atom1s[indices],atom2s[indices],bondvs[indices] return BondCollection((atom1s,atom2s,bondvs)) ############## start ################## def test_bond(): print 'Testing Bond.' b1=_random_bond() print 'reverse' b1_r=-b1 assert_(b1.atom1==b1_r.atom2) assert_(b1.atom2==b1_r.atom1) assert_allclose(b1.bondv,-b1_r.bondv) print '==' b2=Bond(b1.atom1,b1.atom2,b1.bondv) assert_(b1_r==-b2) assert_(b1_r!=b2) print b1 def test_construction(): filename='test.dat' print 'test construction' bc=_random_bc() pm=arange(bc.N); random.shuffle(pm) bc2=bc[pm] _assert_same_collection(bc2,bc) print 'test saveload.' bc.save(filename) bc2=load_bonds(filename) _assert_same_collection(bc2,bc) def test_add(): print 'test __len__, __(r)add__, __itter__, __getitem__' nsite=10 bc1=_random_bc(nsite=10) bc2=_random_bc(nsite=10) bc3=bc1+bc2 for i,b1 in enumerate(bc1): assert_(b1==bc3[i]) _assert_same_collection(bc3[len(bc1):],bc2) print 'test property N, vdim' assert_(bc3.N==bc2.N+bc1.N) assert_(bc3.vdim==vdim) def test_query(): print 'test query.' nsite=10 bc1=BondCollection(((0,1,2,3,4,2,6),(0,0,0,4,3,1,0),([0,1],[2,3],[1,3],[0,1.],[1,1],[1,1],[2,2]))) xs=[2,5] _assert_same_collection(bc1.query(atom1=2),BondCollection((bc1.atom1s[xs],bc1.atom2s[xs],bc1.bondvs[xs]))) xs=[0,2,5] _assert_same_collection(bc1.query(atom1=(0,2)),BondCollection((bc1.atom1s[xs],bc1.atom2s[xs],bc1.bondvs[xs]))) xs=[2] _assert_same_collection(bc1.query(atom1=2,atom2=0),BondCollection((bc1.atom1s[xs],bc1.atom2s[xs],bc1.bondvs[xs]))) xs=[0,1,2,5,6] _assert_same_collection(bc1.query(atom1=2,atom2=0,condition='or'),BondCollection((bc1.atom1s[xs],bc1.atom2s[xs],bc1.bondvs[xs]))) xs=[2,4] _assert_same_collection(bc1.query(atom1=2,bondv=[1,1],condition='xor'),BondCollection((bc1.atom1s[xs],bc1.atom2s[xs],bc1.bondvs[xs]))) def test_all(): test_bond() test_construction() test_add() test_query() if __name__=='__main__': for i in xrange(3): _setup_globals(dim=i+1) test_all()
gpl-2.0
ucsd-progsys/nate
learning/decisionpath.py
2
5736
import math import os.path import random random.seed() from sklearn import tree from sklearn.ensemble import RandomForestClassifier import numpy as np import pandas as pd # import plotly.plotly as py # import plotly.graph_objs as go import pydotplus # from IPython.display import Image import sys import input_old model = sys.argv[1] test, fs, ls = input_old.load_csv(sys.argv[2], filter_no_labels=True, only_slice=True) # print test test_samps = test.loc[:,'F-Is-Eq':] test_labels = test.loc[:,'L-DidChange'] test_span = test.loc[:,'SourceSpan'] # print test.iloc[1] # print test.values[1] feature_names = fs[1:] #------- Loading from sklearn.externals import joblib estimator = joblib.load(model) # # -----PLOTTING dot_data = tree.export_graphviz(estimator, out_file=None, feature_names=feature_names, filled=True, rounded=True) graph = pydotplus.graph_from_dot_data(dot_data) # graph.write_png(model+'.png') # --------- # graph.render(filename='img/g1') graph.write_pdf(model+".pdf") # -------------- X_test = test_samps.values # Using those arrays, we can parse the tree structure: n_nodes = estimator.tree_.node_count children_left = estimator.tree_.children_left children_right = estimator.tree_.children_right feature = estimator.tree_.feature threshold = estimator.tree_.threshold # The tree structure can be traversed to compute various properties such # as the depth of each node and whether or not it is a leaf. # node_depth = np.zeros(shape=n_nodes) # is_leaves = np.zeros(shape=n_nodes, dtype=bool) # stack = [(0, -1)] # seed is the root node id and its parent depth # while len(stack) > 0: # node_id, parent_depth = stack.pop() # node_depth[node_id] = parent_depth + 1 # # If we have a test node # if (children_left[node_id] != children_right[node_id]): # stack.append((children_left[node_id], parent_depth + 1)) # stack.append((children_right[node_id], parent_depth + 1)) # else: # is_leaves[node_id] = True # print("The binary tree structure has %s nodes and has " # "the following tree structure:" # % n_nodes) # for i in range(n_nodes): # if is_leaves[i]: # print("%snode=%s leaf node." % (node_depth[i] * "\t", i)) # else: # print("%snode=%s test node: go to node %s if X[:, %s] <= %ss else to " # "node %s." # % (node_depth[i] * "\t", # i, # children_left[i], # feature[i], # threshold[i], # children_right[i], # )) # print() #i = n_nodes # lol loop # First let's retrieve the decision path of each sample. The decision_path # method allows to retrieve the node indicator functions. A non zero element of # indicator matrix at the position (i, j) indicates that the sample i goes # through the node j. node_indicator = estimator.decision_path(X_test) # Similarly, we can also have the leaves ids reached by each sample. leave_id = estimator.apply(X_test) # Now, it's possible to get the tests that were used to predict a sample or # a group of samples. First, let's make it for the sample. # sample_id = 50 # node_index = node_indicator.indices[node_indicator.indptr[sample_id]: # node_indicator.indptr[sample_id + 1]] # print('Rules used to predict sample %s: ' % sample_id) # for node_id in node_index: # if leave_id[sample_id] != node_id: # continue # if (X_test[sample_id, feature[node_id]] <= threshold[node_id]): # threshold_sign = "<=" # else: # threshold_sign = ">" # print("decision id node %s : (X[%s, %s] (= %s) %s %s)" # % (node_id, # sample_id, # feature[node_id], # X_test[sample_id, feature[node_id]], # threshold_sign, # threshold[node_id])) #print (estimator.decision_path(X_test[50])) #samp_inds = [i for i, x in enumerate(test_samps) if x] for ind, _ in enumerate(test_samps): if ind >= len(test_samps): # WTF? continue print ('----------------------------------') print ('For span') print(test_span.values[ind]) print ('with confidence') print ((estimator.predict_proba(X_test[ind].reshape(1,-1)))[0][1]) print ('our prediction is') print ((estimator.predict(X_test[ind].reshape(1,-1)))[0]) print ('should be') print (test_labels.values[ind]) sample_id = ind # print X_test[sample_id] node_index = node_indicator.indices[node_indicator.indptr[sample_id]: node_indicator.indptr[sample_id + 1]] print('Rules used to predict sample %s: ' % sample_id) for node_id in node_index: if leave_id[sample_id] == node_id: # <-- changed != to == #continue # <-- comment out print("leaf node {} reached, no decision here".format(leave_id[sample_id])) # <-- else: # < -- added else to iterate through decision nodes if (X_test[sample_id, feature[node_id]] <= threshold[node_id]): threshold_sign = "<=" else: threshold_sign = ">" print("%s : (= %s) %s %s" % (feature_names[feature[node_id]], X_test[sample_id, feature[node_id]], threshold_sign, threshold[node_id])) # print("decision id node %s : (X[%s, %s] (= %s) %s %s)" # % (node_id, # sample_id, # feature[node_id], # X_test[sample_id, feature[node_id]], # <-- changed i to sample_id # threshold_sign, # threshold[node_id])) # print(feature_names[feature[node_id]]);
bsd-3-clause
Windy-Ground/scikit-learn
examples/linear_model/plot_ransac.py
250
1673
""" =========================================== Robust linear model estimation using RANSAC =========================================== In this example we see how to robustly fit a linear model to faulty data using the RANSAC algorithm. """ import numpy as np from matplotlib import pyplot as plt from sklearn import linear_model, datasets n_samples = 1000 n_outliers = 50 X, y, coef = datasets.make_regression(n_samples=n_samples, n_features=1, n_informative=1, noise=10, coef=True, random_state=0) # Add outlier data np.random.seed(0) X[:n_outliers] = 3 + 0.5 * np.random.normal(size=(n_outliers, 1)) y[:n_outliers] = -3 + 10 * np.random.normal(size=n_outliers) # Fit line using all data model = linear_model.LinearRegression() model.fit(X, y) # Robustly fit linear model with RANSAC algorithm model_ransac = linear_model.RANSACRegressor(linear_model.LinearRegression()) model_ransac.fit(X, y) inlier_mask = model_ransac.inlier_mask_ outlier_mask = np.logical_not(inlier_mask) # Predict data of estimated models line_X = np.arange(-5, 5) line_y = model.predict(line_X[:, np.newaxis]) line_y_ransac = model_ransac.predict(line_X[:, np.newaxis]) # Compare estimated coefficients print("Estimated coefficients (true, normal, RANSAC):") print(coef, model.coef_, model_ransac.estimator_.coef_) plt.plot(X[inlier_mask], y[inlier_mask], '.g', label='Inliers') plt.plot(X[outlier_mask], y[outlier_mask], '.r', label='Outliers') plt.plot(line_X, line_y, '-k', label='Linear regressor') plt.plot(line_X, line_y_ransac, '-b', label='RANSAC regressor') plt.legend(loc='lower right') plt.show()
bsd-3-clause
aslihandincer/ibis
docs/sphinxext/ipython_sphinxext/ipython_directive.py
9
37645
# -*- coding: utf-8 -*- """ Sphinx directive to support embedded IPython code. This directive allows pasting of entire interactive IPython sessions, prompts and all, and their code will actually get re-executed at doc build time, with all prompts renumbered sequentially. It also allows you to input code as a pure python input by giving the argument python to the directive. The output looks like an interactive ipython section. To enable this directive, simply list it in your Sphinx ``conf.py`` file (making sure the directory where you placed it is visible to sphinx, as is needed for all Sphinx directives). For example, to enable syntax highlighting and the IPython directive:: extensions = ['IPython.sphinxext.ipython_console_highlighting', 'IPython.sphinxext.ipython_directive'] The IPython directive outputs code-blocks with the language 'ipython'. So if you do not have the syntax highlighting extension enabled as well, then all rendered code-blocks will be uncolored. By default this directive assumes that your prompts are unchanged IPython ones, but this can be customized. The configurable options that can be placed in conf.py are: ipython_savefig_dir: The directory in which to save the figures. This is relative to the Sphinx source directory. The default is `html_static_path`. ipython_rgxin: The compiled regular expression to denote the start of IPython input lines. The default is re.compile('In \[(\d+)\]:\s?(.*)\s*'). You shouldn't need to change this. ipython_rgxout: The compiled regular expression to denote the start of IPython output lines. The default is re.compile('Out\[(\d+)\]:\s?(.*)\s*'). You shouldn't need to change this. ipython_promptin: The string to represent the IPython input prompt in the generated ReST. The default is 'In [%d]:'. This expects that the line numbers are used in the prompt. ipython_promptout: The string to represent the IPython prompt in the generated ReST. The default is 'Out [%d]:'. This expects that the line numbers are used in the prompt. ipython_mplbackend: The string which specifies if the embedded Sphinx shell should import Matplotlib and set the backend. The value specifies a backend that is passed to `matplotlib.use()` before any lines in `ipython_execlines` are executed. If not specified in conf.py, then the default value of 'agg' is used. To use the IPython directive without matplotlib as a dependency, set the value to `None`. It may end up that matplotlib is still imported if the user specifies so in `ipython_execlines` or makes use of the @savefig pseudo decorator. ipython_execlines: A list of strings to be exec'd in the embedded Sphinx shell. Typical usage is to make certain packages always available. Set this to an empty list if you wish to have no imports always available. If specified in conf.py as `None`, then it has the effect of making no imports available. If omitted from conf.py altogether, then the default value of ['import numpy as np', 'import matplotlib.pyplot as plt'] is used. ipython_holdcount When the @suppress pseudo-decorator is used, the execution count can be incremented or not. The default behavior is to hold the execution count, corresponding to a value of `True`. Set this to `False` to increment the execution count after each suppressed command. As an example, to use the IPython directive when `matplotlib` is not available, one sets the backend to `None`:: ipython_mplbackend = None An example usage of the directive is: .. code-block:: rst .. ipython:: In [1]: x = 1 In [2]: y = x**2 In [3]: print(y) See http://matplotlib.org/sampledoc/ipython_directive.html for additional documentation. ToDo ---- - Turn the ad-hoc test() function into a real test suite. - Break up ipython-specific functionality from matplotlib stuff into better separated code. Authors ------- - John D Hunter: orignal author. - Fernando Perez: refactoring, documentation, cleanups, port to 0.11. - VáclavŠmilauer <eudoxos-AT-arcig.cz>: Prompt generalizations. - Skipper Seabold, refactoring, cleanups, pure python addition """ from __future__ import print_function from __future__ import unicode_literals #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Stdlib import os import re import sys import tempfile import ast from pandas.compat import zip, range, map, lmap, u, cStringIO as StringIO import warnings # To keep compatibility with various python versions try: from hashlib import md5 except ImportError: from md5 import md5 # Third-party import sphinx from docutils.parsers.rst import directives from docutils import nodes from sphinx.util.compat import Directive # Our own try: from traitlets.config import Config except ImportError: from IPython import Config from IPython import InteractiveShell from IPython.core.profiledir import ProfileDir from IPython.utils import io from IPython.utils.py3compat import PY3 if PY3: from io import StringIO text_type = str else: from StringIO import StringIO text_type = unicode #----------------------------------------------------------------------------- # Globals #----------------------------------------------------------------------------- # for tokenizing blocks COMMENT, INPUT, OUTPUT = range(3) #----------------------------------------------------------------------------- # Functions and class declarations #----------------------------------------------------------------------------- def block_parser(part, rgxin, rgxout, fmtin, fmtout): """ part is a string of ipython text, comprised of at most one input, one ouput, comments, and blank lines. The block parser parses the text into a list of:: blocks = [ (TOKEN0, data0), (TOKEN1, data1), ...] where TOKEN is one of [COMMENT | INPUT | OUTPUT ] and data is, depending on the type of token:: COMMENT : the comment string INPUT: the (DECORATOR, INPUT_LINE, REST) where DECORATOR: the input decorator (or None) INPUT_LINE: the input as string (possibly multi-line) REST : any stdout generated by the input line (not OUTPUT) OUTPUT: the output string, possibly multi-line """ block = [] lines = part.split('\n') N = len(lines) i = 0 decorator = None while 1: if i==N: # nothing left to parse -- the last line break line = lines[i] i += 1 line_stripped = line.strip() if line_stripped.startswith('#'): block.append((COMMENT, line)) continue if line_stripped.startswith('@'): # we're assuming at most one decorator -- may need to # rethink decorator = line_stripped continue # does this look like an input line? matchin = rgxin.match(line) if matchin: lineno, inputline = int(matchin.group(1)), matchin.group(2) # the ....: continuation string continuation = ' %s:'%''.join(['.']*(len(str(lineno))+2)) Nc = len(continuation) # input lines can continue on for more than one line, if # we have a '\' line continuation char or a function call # echo line 'print'. The input line can only be # terminated by the end of the block or an output line, so # we parse out the rest of the input line if it is # multiline as well as any echo text rest = [] while i<N: # look ahead; if the next line is blank, or a comment, or # an output line, we're done nextline = lines[i] matchout = rgxout.match(nextline) #print "nextline=%s, continuation=%s, starts=%s"%(nextline, continuation, nextline.startswith(continuation)) if matchout or nextline.startswith('#'): break elif nextline.startswith(continuation): nextline = nextline[Nc:] if nextline and nextline[0] == ' ': nextline = nextline[1:] inputline += '\n' + nextline else: rest.append(nextline) i+= 1 block.append((INPUT, (decorator, inputline, '\n'.join(rest)))) continue # if it looks like an output line grab all the text to the end # of the block matchout = rgxout.match(line) if matchout: lineno, output = int(matchout.group(1)), matchout.group(2) if i<N-1: output = '\n'.join([output] + lines[i:]) block.append((OUTPUT, output)) break return block class DecodingStringIO(StringIO, object): def __init__(self,buf='',encodings=('utf8',), *args, **kwds): super(DecodingStringIO, self).__init__(buf, *args, **kwds) self.set_encodings(encodings) def set_encodings(self, encodings): self.encodings = encodings def write(self,data): if isinstance(data, text_type): return super(DecodingStringIO, self).write(data) else: for enc in self.encodings: try: data = data.decode(enc) return super(DecodingStringIO, self).write(data) except : pass # default to brute utf8 if no encoding succeded return super(DecodingStringIO, self).write(data.decode('utf8', 'replace')) class EmbeddedSphinxShell(object): """An embedded IPython instance to run inside Sphinx""" def __init__(self, exec_lines=None,state=None): self.cout = DecodingStringIO(u'') if exec_lines is None: exec_lines = [] self.state = state # Create config object for IPython config = Config() config.InteractiveShell.autocall = False config.InteractiveShell.autoindent = False config.InteractiveShell.colors = 'NoColor' # create a profile so instance history isn't saved tmp_profile_dir = tempfile.mkdtemp(prefix='profile_') profname = 'auto_profile_sphinx_build' pdir = os.path.join(tmp_profile_dir,profname) profile = ProfileDir.create_profile_dir(pdir) # Create and initialize global ipython, but don't start its mainloop. # This will persist across different EmbededSphinxShell instances. IP = InteractiveShell.instance(config=config, profile_dir=profile) # io.stdout redirect must be done after instantiating InteractiveShell io.stdout = self.cout io.stderr = self.cout # For debugging, so we can see normal output, use this: #from IPython.utils.io import Tee #io.stdout = Tee(self.cout, channel='stdout') # dbg #io.stderr = Tee(self.cout, channel='stderr') # dbg # Store a few parts of IPython we'll need. self.IP = IP self.user_ns = self.IP.user_ns self.user_global_ns = self.IP.user_global_ns self.input = '' self.output = '' self.is_verbatim = False self.is_doctest = False self.is_suppress = False # Optionally, provide more detailed information to shell. self.directive = None # on the first call to the savefig decorator, we'll import # pyplot as plt so we can make a call to the plt.gcf().savefig self._pyplot_imported = False # Prepopulate the namespace. for line in exec_lines: self.process_input_line(line, store_history=False) def clear_cout(self): self.cout.seek(0) self.cout.truncate(0) def process_input_line(self, line, store_history=True): """process the input, capturing stdout""" stdout = sys.stdout splitter = self.IP.input_splitter try: sys.stdout = self.cout splitter.push(line) more = splitter.push_accepts_more() if not more: try: source_raw = splitter.source_raw_reset()[1] except: # recent ipython #4504 source_raw = splitter.raw_reset() self.IP.run_cell(source_raw, store_history=store_history) finally: sys.stdout = stdout def process_image(self, decorator): """ # build out an image directive like # .. image:: somefile.png # :width 4in # # from an input like # savefig somefile.png width=4in """ savefig_dir = self.savefig_dir source_dir = self.source_dir saveargs = decorator.split(' ') filename = saveargs[1] # insert relative path to image file in source outfile = os.path.relpath(os.path.join(savefig_dir,filename), source_dir) imagerows = ['.. image:: %s'%outfile] for kwarg in saveargs[2:]: arg, val = kwarg.split('=') arg = arg.strip() val = val.strip() imagerows.append(' :%s: %s'%(arg, val)) image_file = os.path.basename(outfile) # only return file name image_directive = '\n'.join(imagerows) return image_file, image_directive # Callbacks for each type of token def process_input(self, data, input_prompt, lineno): """ Process data block for INPUT token. """ decorator, input, rest = data image_file = None image_directive = None is_verbatim = decorator=='@verbatim' or self.is_verbatim is_doctest = (decorator is not None and \ decorator.startswith('@doctest')) or self.is_doctest is_suppress = decorator=='@suppress' or self.is_suppress is_okexcept = decorator=='@okexcept' or self.is_okexcept is_okwarning = decorator=='@okwarning' or self.is_okwarning is_savefig = decorator is not None and \ decorator.startswith('@savefig') # set the encodings to be used by DecodingStringIO # to convert the execution output into unicode if # needed. this attrib is set by IpythonDirective.run() # based on the specified block options, defaulting to ['ut self.cout.set_encodings(self.output_encoding) input_lines = input.split('\n') if len(input_lines) > 1: if input_lines[-1] != "": input_lines.append('') # make sure there's a blank line # so splitter buffer gets reset continuation = ' %s:'%''.join(['.']*(len(str(lineno))+2)) if is_savefig: image_file, image_directive = self.process_image(decorator) ret = [] is_semicolon = False # Hold the execution count, if requested to do so. if is_suppress and self.hold_count: store_history = False else: store_history = True # Note: catch_warnings is not thread safe with warnings.catch_warnings(record=True) as ws: for i, line in enumerate(input_lines): if line.endswith(';'): is_semicolon = True if i == 0: # process the first input line if is_verbatim: self.process_input_line('') self.IP.execution_count += 1 # increment it anyway else: # only submit the line in non-verbatim mode self.process_input_line(line, store_history=store_history) formatted_line = '%s %s'%(input_prompt, line) else: # process a continuation line if not is_verbatim: self.process_input_line(line, store_history=store_history) formatted_line = '%s %s'%(continuation, line) if not is_suppress: ret.append(formatted_line) if not is_suppress and len(rest.strip()) and is_verbatim: # the "rest" is the standard output of the # input, which needs to be added in # verbatim mode ret.append(rest) self.cout.seek(0) output = self.cout.read() if not is_suppress and not is_semicolon: ret.append(output) elif is_semicolon: # get spacing right ret.append('') # context information filename = self.state.document.current_source lineno = self.state.document.current_line # output any exceptions raised during execution to stdout # unless :okexcept: has been specified. if not is_okexcept and "Traceback" in output: s = "\nException in %s at block ending on line %s\n" % (filename, lineno) s += "Specify :okexcept: as an option in the ipython:: block to suppress this message\n" sys.stdout.write('\n\n>>>' + ('-' * 73)) sys.stdout.write(s) sys.stdout.write(output) sys.stdout.write('<<<' + ('-' * 73) + '\n\n') # output any warning raised during execution to stdout # unless :okwarning: has been specified. if not is_okwarning: for w in ws: s = "\nWarning in %s at block ending on line %s\n" % (filename, lineno) s += "Specify :okwarning: as an option in the ipython:: block to suppress this message\n" sys.stdout.write('\n\n>>>' + ('-' * 73)) sys.stdout.write(s) sys.stdout.write('-' * 76 + '\n') s=warnings.formatwarning(w.message, w.category, w.filename, w.lineno, w.line) sys.stdout.write(s) sys.stdout.write('<<<' + ('-' * 73) + '\n') self.cout.truncate(0) return (ret, input_lines, output, is_doctest, decorator, image_file, image_directive) def process_output(self, data, output_prompt, input_lines, output, is_doctest, decorator, image_file): """ Process data block for OUTPUT token. """ TAB = ' ' * 4 if is_doctest and output is not None: found = output found = found.strip() submitted = data.strip() if self.directive is None: source = 'Unavailable' content = 'Unavailable' else: source = self.directive.state.document.current_source content = self.directive.content # Add tabs and join into a single string. content = '\n'.join([TAB + line for line in content]) # Make sure the output contains the output prompt. ind = found.find(output_prompt) if ind < 0: e = ('output does not contain output prompt\n\n' 'Document source: {0}\n\n' 'Raw content: \n{1}\n\n' 'Input line(s):\n{TAB}{2}\n\n' 'Output line(s):\n{TAB}{3}\n\n') e = e.format(source, content, '\n'.join(input_lines), repr(found), TAB=TAB) raise RuntimeError(e) found = found[len(output_prompt):].strip() # Handle the actual doctest comparison. if decorator.strip() == '@doctest': # Standard doctest if found != submitted: e = ('doctest failure\n\n' 'Document source: {0}\n\n' 'Raw content: \n{1}\n\n' 'On input line(s):\n{TAB}{2}\n\n' 'we found output:\n{TAB}{3}\n\n' 'instead of the expected:\n{TAB}{4}\n\n') e = e.format(source, content, '\n'.join(input_lines), repr(found), repr(submitted), TAB=TAB) raise RuntimeError(e) else: self.custom_doctest(decorator, input_lines, found, submitted) def process_comment(self, data): """Process data fPblock for COMMENT token.""" if not self.is_suppress: return [data] def save_image(self, image_file): """ Saves the image file to disk. """ self.ensure_pyplot() command = ('plt.gcf().savefig("%s", bbox_inches="tight", ' 'dpi=100)' % image_file) #print 'SAVEFIG', command # dbg self.process_input_line('bookmark ipy_thisdir', store_history=False) self.process_input_line('cd -b ipy_savedir', store_history=False) self.process_input_line(command, store_history=False) self.process_input_line('cd -b ipy_thisdir', store_history=False) self.process_input_line('bookmark -d ipy_thisdir', store_history=False) self.clear_cout() def process_block(self, block): """ process block from the block_parser and return a list of processed lines """ ret = [] output = None input_lines = None lineno = self.IP.execution_count input_prompt = self.promptin % lineno output_prompt = self.promptout % lineno image_file = None image_directive = None for token, data in block: if token == COMMENT: out_data = self.process_comment(data) elif token == INPUT: (out_data, input_lines, output, is_doctest, decorator, image_file, image_directive) = \ self.process_input(data, input_prompt, lineno) elif token == OUTPUT: out_data = \ self.process_output(data, output_prompt, input_lines, output, is_doctest, decorator, image_file) if out_data: ret.extend(out_data) # save the image files if image_file is not None: self.save_image(image_file) return ret, image_directive def ensure_pyplot(self): """ Ensures that pyplot has been imported into the embedded IPython shell. Also, makes sure to set the backend appropriately if not set already. """ # We are here if the @figure pseudo decorator was used. Thus, it's # possible that we could be here even if python_mplbackend were set to # `None`. That's also strange and perhaps worthy of raising an # exception, but for now, we just set the backend to 'agg'. if not self._pyplot_imported: if 'matplotlib.backends' not in sys.modules: # Then ipython_matplotlib was set to None but there was a # call to the @figure decorator (and ipython_execlines did # not set a backend). #raise Exception("No backend was set, but @figure was used!") import matplotlib matplotlib.use('agg') # Always import pyplot into embedded shell. self.process_input_line('import matplotlib.pyplot as plt', store_history=False) self._pyplot_imported = True def process_pure_python(self, content): """ content is a list of strings. it is unedited directive content This runs it line by line in the InteractiveShell, prepends prompts as needed capturing stderr and stdout, then returns the content as a list as if it were ipython code """ output = [] savefig = False # keep up with this to clear figure multiline = False # to handle line continuation multiline_start = None fmtin = self.promptin ct = 0 for lineno, line in enumerate(content): line_stripped = line.strip() if not len(line): output.append(line) continue # handle decorators if line_stripped.startswith('@'): output.extend([line]) if 'savefig' in line: savefig = True # and need to clear figure continue # handle comments if line_stripped.startswith('#'): output.extend([line]) continue # deal with lines checking for multiline continuation = u' %s:'% ''.join(['.']*(len(str(ct))+2)) if not multiline: modified = u"%s %s" % (fmtin % ct, line_stripped) output.append(modified) ct += 1 try: ast.parse(line_stripped) output.append(u'') except Exception: # on a multiline multiline = True multiline_start = lineno else: # still on a multiline modified = u'%s %s' % (continuation, line) output.append(modified) # if the next line is indented, it should be part of multiline if len(content) > lineno + 1: nextline = content[lineno + 1] if len(nextline) - len(nextline.lstrip()) > 3: continue try: mod = ast.parse( '\n'.join(content[multiline_start:lineno+1])) if isinstance(mod.body[0], ast.FunctionDef): # check to see if we have the whole function for element in mod.body[0].body: if isinstance(element, ast.Return): multiline = False else: output.append(u'') multiline = False except Exception: pass if savefig: # clear figure if plotted self.ensure_pyplot() self.process_input_line('plt.clf()', store_history=False) self.clear_cout() savefig = False return output def custom_doctest(self, decorator, input_lines, found, submitted): """ Perform a specialized doctest. """ from .custom_doctests import doctests args = decorator.split() doctest_type = args[1] if doctest_type in doctests: doctests[doctest_type](self, args, input_lines, found, submitted) else: e = "Invalid option to @doctest: {0}".format(doctest_type) raise Exception(e) class IPythonDirective(Directive): has_content = True required_arguments = 0 optional_arguments = 4 # python, suppress, verbatim, doctest final_argumuent_whitespace = True option_spec = { 'python': directives.unchanged, 'suppress' : directives.flag, 'verbatim' : directives.flag, 'doctest' : directives.flag, 'okexcept': directives.flag, 'okwarning': directives.flag, 'output_encoding': directives.unchanged_required } shell = None seen_docs = set() def get_config_options(self): # contains sphinx configuration variables config = self.state.document.settings.env.config # get config variables to set figure output directory confdir = self.state.document.settings.env.app.confdir savefig_dir = config.ipython_savefig_dir source_dir = os.path.dirname(self.state.document.current_source) if savefig_dir is None: savefig_dir = config.html_static_path if isinstance(savefig_dir, list): savefig_dir = savefig_dir[0] # safe to assume only one path? savefig_dir = os.path.join(confdir, savefig_dir) # get regex and prompt stuff rgxin = config.ipython_rgxin rgxout = config.ipython_rgxout promptin = config.ipython_promptin promptout = config.ipython_promptout mplbackend = config.ipython_mplbackend exec_lines = config.ipython_execlines hold_count = config.ipython_holdcount return (savefig_dir, source_dir, rgxin, rgxout, promptin, promptout, mplbackend, exec_lines, hold_count) def setup(self): # Get configuration values. (savefig_dir, source_dir, rgxin, rgxout, promptin, promptout, mplbackend, exec_lines, hold_count) = self.get_config_options() if self.shell is None: # We will be here many times. However, when the # EmbeddedSphinxShell is created, its interactive shell member # is the same for each instance. if mplbackend: import matplotlib # Repeated calls to use() will not hurt us since `mplbackend` # is the same each time. matplotlib.use(mplbackend) # Must be called after (potentially) importing matplotlib and # setting its backend since exec_lines might import pylab. self.shell = EmbeddedSphinxShell(exec_lines, self.state) # Store IPython directive to enable better error messages self.shell.directive = self # reset the execution count if we haven't processed this doc #NOTE: this may be borked if there are multiple seen_doc tmp files #check time stamp? if not self.state.document.current_source in self.seen_docs: self.shell.IP.history_manager.reset() self.shell.IP.execution_count = 1 self.shell.IP.prompt_manager.width = 0 self.seen_docs.add(self.state.document.current_source) # and attach to shell so we don't have to pass them around self.shell.rgxin = rgxin self.shell.rgxout = rgxout self.shell.promptin = promptin self.shell.promptout = promptout self.shell.savefig_dir = savefig_dir self.shell.source_dir = source_dir self.shell.hold_count = hold_count # setup bookmark for saving figures directory self.shell.process_input_line('bookmark ipy_savedir %s'%savefig_dir, store_history=False) self.shell.clear_cout() return rgxin, rgxout, promptin, promptout def teardown(self): # delete last bookmark self.shell.process_input_line('bookmark -d ipy_savedir', store_history=False) self.shell.clear_cout() def run(self): debug = False #TODO, any reason block_parser can't be a method of embeddable shell # then we wouldn't have to carry these around rgxin, rgxout, promptin, promptout = self.setup() options = self.options self.shell.is_suppress = 'suppress' in options self.shell.is_doctest = 'doctest' in options self.shell.is_verbatim = 'verbatim' in options self.shell.is_okexcept = 'okexcept' in options self.shell.is_okwarning = 'okwarning' in options self.shell.output_encoding = [options.get('output_encoding', 'utf8')] # handle pure python code if 'python' in self.arguments: content = self.content self.content = self.shell.process_pure_python(content) parts = '\n'.join(self.content).split('\n\n') lines = ['.. code-block:: ipython', ''] figures = [] for part in parts: block = block_parser(part, rgxin, rgxout, promptin, promptout) if len(block): rows, figure = self.shell.process_block(block) for row in rows: lines.extend([' %s'%line for line in row.split('\n')]) if figure is not None: figures.append(figure) for figure in figures: lines.append('') lines.extend(figure.split('\n')) lines.append('') if len(lines)>2: if debug: print('\n'.join(lines)) else: # This has to do with input, not output. But if we comment # these lines out, then no IPython code will appear in the # final output. self.state_machine.insert_input( lines, self.state_machine.input_lines.source(0)) # cleanup self.teardown() return [] # Enable as a proper Sphinx directive def setup(app): setup.app = app app.add_directive('ipython', IPythonDirective) app.add_config_value('ipython_savefig_dir', None, 'env') app.add_config_value('ipython_rgxin', re.compile('In \[(\d+)\]:\s?(.*)\s*'), 'env') app.add_config_value('ipython_rgxout', re.compile('Out\[(\d+)\]:\s?(.*)\s*'), 'env') app.add_config_value('ipython_promptin', 'In [%d]:', 'env') app.add_config_value('ipython_promptout', 'Out[%d]:', 'env') # We could just let matplotlib pick whatever is specified as the default # backend in the matplotlibrc file, but this would cause issues if the # backend didn't work in headless environments. For this reason, 'agg' # is a good default backend choice. app.add_config_value('ipython_mplbackend', 'agg', 'env') # If the user sets this config value to `None`, then EmbeddedSphinxShell's # __init__ method will treat it as []. execlines = ['import numpy as np', 'import matplotlib.pyplot as plt'] app.add_config_value('ipython_execlines', execlines, 'env') app.add_config_value('ipython_holdcount', True, 'env') # Simple smoke test, needs to be converted to a proper automatic test. def test(): examples = [ r""" In [9]: pwd Out[9]: '/home/jdhunter/py4science/book' In [10]: cd bookdata/ /home/jdhunter/py4science/book/bookdata In [2]: from pylab import * In [2]: ion() In [3]: im = imread('stinkbug.png') @savefig mystinkbug.png width=4in In [4]: imshow(im) Out[4]: <matplotlib.image.AxesImage object at 0x39ea850> """, r""" In [1]: x = 'hello world' # string methods can be # used to alter the string @doctest In [2]: x.upper() Out[2]: 'HELLO WORLD' @verbatim In [3]: x.st<TAB> x.startswith x.strip """, r""" In [130]: url = 'http://ichart.finance.yahoo.com/table.csv?s=CROX\ .....: &d=9&e=22&f=2009&g=d&a=1&br=8&c=2006&ignore=.csv' In [131]: print url.split('&') ['http://ichart.finance.yahoo.com/table.csv?s=CROX', 'd=9', 'e=22', 'f=2009', 'g=d', 'a=1', 'b=8', 'c=2006', 'ignore=.csv'] In [60]: import urllib """, r"""\ In [133]: import numpy.random @suppress In [134]: numpy.random.seed(2358) @doctest In [135]: numpy.random.rand(10,2) Out[135]: array([[ 0.64524308, 0.59943846], [ 0.47102322, 0.8715456 ], [ 0.29370834, 0.74776844], [ 0.99539577, 0.1313423 ], [ 0.16250302, 0.21103583], [ 0.81626524, 0.1312433 ], [ 0.67338089, 0.72302393], [ 0.7566368 , 0.07033696], [ 0.22591016, 0.77731835], [ 0.0072729 , 0.34273127]]) """, r""" In [106]: print x jdh In [109]: for i in range(10): .....: print i .....: .....: 0 1 2 3 4 5 6 7 8 9 """, r""" In [144]: from pylab import * In [145]: ion() # use a semicolon to suppress the output @savefig test_hist.png width=4in In [151]: hist(np.random.randn(10000), 100); @savefig test_plot.png width=4in In [151]: plot(np.random.randn(10000), 'o'); """, r""" # use a semicolon to suppress the output In [151]: plt.clf() @savefig plot_simple.png width=4in In [151]: plot([1,2,3]) @savefig hist_simple.png width=4in In [151]: hist(np.random.randn(10000), 100); """, r""" # update the current fig In [151]: ylabel('number') In [152]: title('normal distribution') @savefig hist_with_text.png In [153]: grid(True) @doctest float In [154]: 0.1 + 0.2 Out[154]: 0.3 @doctest float In [155]: np.arange(16).reshape(4,4) Out[155]: array([[ 0, 1, 2, 3], [ 4, 5, 6, 7], [ 8, 9, 10, 11], [12, 13, 14, 15]]) In [1]: x = np.arange(16, dtype=float).reshape(4,4) In [2]: x[0,0] = np.inf In [3]: x[0,1] = np.nan @doctest float In [4]: x Out[4]: array([[ inf, nan, 2., 3.], [ 4., 5., 6., 7.], [ 8., 9., 10., 11.], [ 12., 13., 14., 15.]]) """, ] # skip local-file depending first example: examples = examples[1:] #ipython_directive.DEBUG = True # dbg #options = dict(suppress=True) # dbg options = dict() for example in examples: content = example.split('\n') IPythonDirective('debug', arguments=None, options=options, content=content, lineno=0, content_offset=None, block_text=None, state=None, state_machine=None, ) # Run test suite as a script if __name__=='__main__': if not os.path.isdir('_static'): os.mkdir('_static') test() print('All OK? Check figures in _static/')
apache-2.0
synergetics/nest
examples/neuronview/neuronview.py
13
10544
# -*- coding: utf-8 -*- # # neuronview.py # # This file is part of NEST. # # Copyright (C) 2004 The NEST Initiative # # NEST is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # # NEST is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with NEST. If not, see <http://www.gnu.org/licenses/>. import pygtk pygtk.require('2.0') import gtk import pango import gobject from matplotlib.figure import Figure from matplotlib.backends.backend_gtkagg import FigureCanvasGTKAgg as FigureCanvas import matplotlib.gridspec as gridspec import os import nest default_neuron = "iaf_neuron" default_stimulator = "dc_generator" class Main() : def __init__(self): self._gladefile = "neuronview.glade" self._builder = gtk.Builder() self._builder.add_from_file(self._gladefile) self._builder.connect_signals(self) self._win = self._builder.get_object("mainwindow") self._win.resize(900, 700) box = self._builder.get_object("box5") self._stimulatordictview = DictView() self._builder.get_object("scrolledwindow2").add(self._stimulatordictview) box = self._builder.get_object("box4") self._neurondictview = DictView() self._builder.get_object("scrolledwindow3").add(self._neurondictview) self.populate_comboboxes() self._figure = Figure(figsize=(5,4), dpi=100) canvas = FigureCanvas(self._figure) canvas.set_size_request(200, 250) canvas.show() box = self._builder.get_object("box3") bg_style = box.get_style().bg[gtk.STATE_NORMAL] gtk_color = (bg_style.red_float, bg_style.green_float, bg_style.blue_float) self._figure.set_facecolor(gtk_color) box.pack_start(canvas) self._win.show() gtk.main() def update_figure(self, spikes, potentials): if nest.GetKernelStatus("time") != 0.0: self._figure.clear() # num_figures = (len(spikes) != 0) + (len(potentials) != 0) # fig_num = 1 gs = gridspec.GridSpec(2, 1, height_ratios=[1, 4]) ax0 = self._figure.add_subplot(gs[0]) ax0.plot(spikes[0]["times"], [1]*len(spikes[0]["times"]), ".") ax0.set_yticks([]) ax0.set_xticks([]) ax1 = self._figure.add_subplot(gs[1]) ax1.plot(potentials[0]["times"], potentials[0]["V_m"], "r-") ax1.set_ylabel("$V_m$ (mV)") ax1.set_xlabel("time (s)") # plt.tight_layout() self._figure.canvas.draw() def filter_statusdict(self, params): for key in ["archiver_length", "available", "capacity", "elementsize", "frozen", "global_id", "instantiations", "is_refractory", "local", "model", "element_type", "offset", "origin", "receptor_types", "recordables", "refractory_input", "rmax", "state", "t_spike", "thread", "tlast", "tspike", "type_id", "vp", "ymod"]: if key in params.keys(): params.pop(key) def populate_comboboxes(self): neuronmodels = self._builder.get_object("neuronmodels") neuronmodelsliststore = neuronmodels.get_model() stimulatormodels = self._builder.get_object("stimulatormodels") stimulatormodelsliststore = stimulatormodels.get_model() neuron_it = None stimulator_it = None models = nest.Models("nodes") models = [x for x in models if x not in ["correlation_detector", "sli_neuron", "iaf_psc_alpha_norec", "parrot_neuron", "parrot_neuron_ps"]] for entry in models: try: entrytype = nest.GetDefaults(entry)["element_type"] except: entrytype = "unknown" if entrytype == "neuron": it = neuronmodelsliststore.append([entry]) if entry == default_neuron: neuron_it = it elif entrytype == "stimulator": it = stimulatormodelsliststore.append([entry]) if entry == default_stimulator: stimulator_it = it cell = gtk.CellRendererText() neuronmodels.pack_start(cell, True) neuronmodels.add_attribute(cell, 'text', 0) neuronmodels.set_active_iter(neuron_it) stimulatormodels.pack_start(cell, True) stimulatormodels.add_attribute(cell, 'text', 0) stimulatormodels.set_active_iter(stimulator_it) docviewcombo = self._builder.get_object("docviewcombo") docviewcomboliststore = docviewcombo.get_model() docviewcomboliststore.append(["Stimulating device"]) it = docviewcomboliststore.append(["Neuron"]) docviewcombo.pack_start(cell, True) docviewcombo.add_attribute(cell, 'text', 0) docviewcombo.set_active_iter(it) def get_help_text(self, name): nest.sli_run("statusdict /prgdocdir get") docdir = nest.sli_pop() helptext = "No documentation available" for subdir in ["cc", "sli"]: filename = os.path.join(docdir, "help", subdir, name + ".hlp") if os.path.isfile(filename): helptext = open(filename, 'r').read() return helptext def on_model_selected(self, widget): liststore = widget.get_model() model = liststore.get_value(widget.get_active_iter(), 0) statusdict = nest.GetDefaults(model) self.filter_statusdict(statusdict) if widget == self._builder.get_object("neuronmodels"): self._neurondictview.set_params(statusdict) if widget == self._builder.get_object("stimulatormodels"): self._stimulatordictview.set_params(statusdict) self.on_doc_selected(self._builder.get_object("docviewcombo")) def on_doc_selected(self, widget): liststore = widget.get_model() doc = liststore.get_value(widget.get_active_iter(), 0) docview = self._builder.get_object("docview") docbuffer = gtk.TextBuffer() if doc == "Neuron": combobox = self._builder.get_object("neuronmodels") if doc == "Stimulating device": combobox = self._builder.get_object("stimulatormodels") liststore = combobox.get_model() model = liststore.get_value(combobox.get_active_iter(), 0) docbuffer.set_text(self.get_help_text(model)) docview.set_buffer(docbuffer) docview.modify_font(pango.FontDescription("monospace 10")) def on_simulate_clicked(self, widget): nest.ResetKernel() combobox = self._builder.get_object("stimulatormodels") liststore = combobox.get_model() stimulatormodel = liststore.get_value(combobox.get_active_iter(), 0) params = self._stimulatordictview.get_params() stimulator = nest.Create(stimulatormodel, params=params) combobox = self._builder.get_object("neuronmodels") liststore = combobox.get_model() neuronmodel = liststore.get_value(combobox.get_active_iter(), 0) neuron = nest.Create(neuronmodel, params=self._neurondictview.get_params()) weight = self._builder.get_object("weight").get_value() delay = self._builder.get_object("delay").get_value() nest.Connect(stimulator, neuron, weight, delay) sd = nest.Create("spike_detector", params={"record_to": ["memory"]}) nest.Connect(neuron, sd) vm = nest.Create("voltmeter", params={"record_to": ["memory"], "interval": 0.1}) nest.Connect(vm, neuron) simtime = self._builder.get_object("simtime").get_value() nest.Simulate(simtime) self.update_figure(nest.GetStatus(sd, "events"), nest.GetStatus(vm, "events")) def on_delete_event(self, widget, event): self.on_quit(widget) return True def on_quit(self, project): self._builder.get_object("mainwindow").hide() gtk.main_quit() class DictView(gtk.TreeView) : def __init__(self, params = None) : gtk.TreeView.__init__(self) if params: self.params = params self.repopulate() renderer = gtk.CellRendererText() column = gtk.TreeViewColumn("Name", renderer, text=1) self.append_column(column) renderer = gtk.CellRendererText() renderer.set_property("mode", gtk.CELL_RENDERER_MODE_EDITABLE) renderer.set_property("editable", True) column = gtk.TreeViewColumn("Value", renderer, text=2) self.append_column(column) self.set_size_request(200, 150) renderer.connect("edited", self.check_value) self.show() def repopulate(self) : model = gtk.TreeStore(gobject.TYPE_PYOBJECT, gobject.TYPE_STRING, gobject.TYPE_STRING) for key in sorted(self.params.keys()) : pos = model.insert_after(None, None) data = {"key" : key, "element_type" : type(self.params[key])} model.set_value(pos, 0, data) model.set_value(pos, 1, str(key)) model.set_value(pos, 2, str(self.params[key])) self.set_model(model) def check_value(self, widget, path, new_text) : model = self.get_model() data = model[path][0] try : typename = data["element_type"].__name__ new_value = eval("%s('%s')" % (typename, new_text)) if typename == "bool" and new_text.lower() in ["false", "0"] : new_value = False self.params[data["key"]] = new_value model[path][2] = str(new_value) except ValueError : old_value = self.params[data["key"]] model[path][2] = str(old_value) def get_params(self) : return self.params def set_params(self, params) : self.params = params self.repopulate() if __name__ == "__main__" : Main()
gpl-2.0
vanzaj/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers
Chapter2_MorePyMC/separation_plot.py
86
1494
# separation plot # Author: Cameron Davidson-Pilon,2013 # see http://mdwardlab.com/sites/default/files/GreenhillWardSacks.pdf import matplotlib.pyplot as plt import numpy as np def separation_plot( p, y, **kwargs ): """ This function creates a separation plot for logistic and probit classification. See http://mdwardlab.com/sites/default/files/GreenhillWardSacks.pdf p: The proportions/probabilities, can be a nxM matrix which represents M models. y: the 0-1 response variables. """ assert p.shape[0] == y.shape[0], "p.shape[0] != y.shape[0]" n = p.shape[0] try: M = p.shape[1] except: p = p.reshape( n, 1 ) M = p.shape[1] #colors = np.array( ["#fdf2db", "#e44a32"] ) colors_bmh = np.array( ["#eeeeee", "#348ABD"] ) fig = plt.figure( )#figsize = (8, 1.3*M) ) for i in range(M): ax = fig.add_subplot(M, 1, i+1) ix = np.argsort( p[:,i] ) #plot the different bars bars = ax.bar( np.arange(n), np.ones(n), width=1., color = colors_bmh[ y[ix].astype(int) ], edgecolor = 'none') ax.plot( np.arange(n+1), np.append(p[ix,i], p[ix,i][-1]), "k", linewidth = 1.,drawstyle="steps-post" ) #create expected value bar. ax.vlines( [(1-p[ix,i]).sum()], [0], [1] ) #ax.grid(False) #ax.axis('off') plt.xlim( 0, n) plt.tight_layout() return
mit
stefanv/brainx
doc/sphinxext/inheritance_diagram.py
6
13678
""" Defines a docutils directive for inserting inheritance diagrams. Provide the directive with one or more classes or modules (separated by whitespace). For modules, all of the classes in that module will be used. Example:: Given the following classes: class A: pass class B(A): pass class C(A): pass class D(B, C): pass class E(B): pass .. inheritance-diagram: D E Produces a graph like the following: A / \ B C / \ / E D The graph is inserted as a PNG+image map into HTML and a PDF in LaTeX. """ import inspect import os import re import subprocess try: from hashlib import md5 except ImportError: from md5 import md5 from docutils.nodes import Body, Element from docutils.parsers.rst import directives from sphinx.roles import xfileref_role def my_import(name): """Module importer - taken from the python documentation. This function allows importing names with dots in them.""" mod = __import__(name) components = name.split('.') for comp in components[1:]: mod = getattr(mod, comp) return mod class DotException(Exception): pass class InheritanceGraph(object): """ Given a list of classes, determines the set of classes that they inherit from all the way to the root "object", and then is able to generate a graphviz dot graph from them. """ def __init__(self, class_names, show_builtins=False): """ *class_names* is a list of child classes to show bases from. If *show_builtins* is True, then Python builtins will be shown in the graph. """ self.class_names = class_names self.classes = self._import_classes(class_names) self.all_classes = self._all_classes(self.classes) if len(self.all_classes) == 0: raise ValueError("No classes found for inheritance diagram") self.show_builtins = show_builtins py_sig_re = re.compile(r'''^([\w.]*\.)? # class names (\w+) \s* $ # optionally arguments ''', re.VERBOSE) def _import_class_or_module(self, name): """ Import a class using its fully-qualified *name*. """ try: path, base = self.py_sig_re.match(name).groups() except: raise ValueError( "Invalid class or module '%s' specified for inheritance diagram" % name) fullname = (path or '') + base path = (path and path.rstrip('.')) if not path: path = base try: module = __import__(path, None, None, []) # We must do an import of the fully qualified name. Otherwise if a # subpackage 'a.b' is requested where 'import a' does NOT provide # 'a.b' automatically, then 'a.b' will not be found below. This # second call will force the equivalent of 'import a.b' to happen # after the top-level import above. my_import(fullname) except ImportError: raise ValueError( "Could not import class or module '%s' specified for inheritance diagram" % name) try: todoc = module for comp in fullname.split('.')[1:]: todoc = getattr(todoc, comp) except AttributeError: raise ValueError( "Could not find class or module '%s' specified for inheritance diagram" % name) # If a class, just return it if inspect.isclass(todoc): return [todoc] elif inspect.ismodule(todoc): classes = [] for cls in todoc.__dict__.values(): if inspect.isclass(cls) and cls.__module__ == todoc.__name__: classes.append(cls) return classes print 'todoc?',todoc raise ValueError( "'%s' does not resolve to a class or module" % name) def _import_classes(self, class_names): """ Import a list of classes. """ classes = [] for name in class_names: classes.extend(self._import_class_or_module(name)) return classes def _all_classes(self, classes): """ Return a list of all classes that are ancestors of *classes*. """ all_classes = {} def recurse(cls): all_classes[cls] = None for c in cls.__bases__: if c not in all_classes: recurse(c) for cls in classes: recurse(cls) return all_classes.keys() def class_name(self, cls, parts=0): """ Given a class object, return a fully-qualified name. This works for things I've tested in matplotlib so far, but may not be completely general. """ module = cls.__module__ if module == '__builtin__': fullname = cls.__name__ else: fullname = "%s.%s" % (module, cls.__name__) if parts == 0: return fullname name_parts = fullname.split('.') return '.'.join(name_parts[-parts:]) def get_all_class_names(self): """ Get all of the class names involved in the graph. """ return [self.class_name(x) for x in self.all_classes] # These are the default options for graphviz default_graph_options = { "rankdir": "LR", "size": '"8.0, 12.0"' } default_node_options = { "shape": "box", "fontsize": 10, "height": 0.25, "fontname": "Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans", "style": '"setlinewidth(0.5)"' } default_edge_options = { "arrowsize": 0.5, "style": '"setlinewidth(0.5)"' } def _format_node_options(self, options): return ','.join(["%s=%s" % x for x in options.items()]) def _format_graph_options(self, options): return ''.join(["%s=%s;\n" % x for x in options.items()]) def generate_dot(self, fd, name, parts=0, urls={}, graph_options={}, node_options={}, edge_options={}): """ Generate a graphviz dot graph from the classes that were passed in to __init__. *fd* is a Python file-like object to write to. *name* is the name of the graph *urls* is a dictionary mapping class names to http urls *graph_options*, *node_options*, *edge_options* are dictionaries containing key/value pairs to pass on as graphviz properties. """ g_options = self.default_graph_options.copy() g_options.update(graph_options) n_options = self.default_node_options.copy() n_options.update(node_options) e_options = self.default_edge_options.copy() e_options.update(edge_options) fd.write('digraph %s {\n' % name) fd.write(self._format_graph_options(g_options)) for cls in self.all_classes: if not self.show_builtins and cls in __builtins__.values(): continue name = self.class_name(cls, parts) # Write the node this_node_options = n_options.copy() url = urls.get(self.class_name(cls)) if url is not None: this_node_options['URL'] = '"%s"' % url fd.write(' "%s" [%s];\n' % (name, self._format_node_options(this_node_options))) # Write the edges for base in cls.__bases__: if not self.show_builtins and base in __builtins__.values(): continue base_name = self.class_name(base, parts) fd.write(' "%s" -> "%s" [%s];\n' % (base_name, name, self._format_node_options(e_options))) fd.write('}\n') def run_dot(self, args, name, parts=0, urls={}, graph_options={}, node_options={}, edge_options={}): """ Run graphviz 'dot' over this graph, returning whatever 'dot' writes to stdout. *args* will be passed along as commandline arguments. *name* is the name of the graph *urls* is a dictionary mapping class names to http urls Raises DotException for any of the many os and installation-related errors that may occur. """ try: dot = subprocess.Popen(['dot'] + list(args), stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=True) except OSError: raise DotException("Could not execute 'dot'. Are you sure you have 'graphviz' installed?") except ValueError: raise DotException("'dot' called with invalid arguments") except: raise DotException("Unexpected error calling 'dot'") self.generate_dot(dot.stdin, name, parts, urls, graph_options, node_options, edge_options) dot.stdin.close() result = dot.stdout.read() returncode = dot.wait() if returncode != 0: raise DotException("'dot' returned the errorcode %d" % returncode) return result class inheritance_diagram(Body, Element): """ A docutils node to use as a placeholder for the inheritance diagram. """ pass def inheritance_diagram_directive(name, arguments, options, content, lineno, content_offset, block_text, state, state_machine): """ Run when the inheritance_diagram directive is first encountered. """ node = inheritance_diagram() class_names = arguments # Create a graph starting with the list of classes graph = InheritanceGraph(class_names) # Create xref nodes for each target of the graph's image map and # add them to the doc tree so that Sphinx can resolve the # references to real URLs later. These nodes will eventually be # removed from the doctree after we're done with them. for name in graph.get_all_class_names(): refnodes, x = xfileref_role( 'class', ':class:`%s`' % name, name, 0, state) node.extend(refnodes) # Store the graph object so we can use it to generate the # dot file later node['graph'] = graph # Store the original content for use as a hash node['parts'] = options.get('parts', 0) node['content'] = " ".join(class_names) return [node] def get_graph_hash(node): return md5(node['content'] + str(node['parts'])).hexdigest()[-10:] def html_output_graph(self, node): """ Output the graph for HTML. This will insert a PNG with clickable image map. """ graph = node['graph'] parts = node['parts'] graph_hash = get_graph_hash(node) name = "inheritance%s" % graph_hash path = '_images' dest_path = os.path.join(setup.app.builder.outdir, path) if not os.path.exists(dest_path): os.makedirs(dest_path) png_path = os.path.join(dest_path, name + ".png") path = setup.app.builder.imgpath # Create a mapping from fully-qualified class names to URLs. urls = {} for child in node: if child.get('refuri') is not None: urls[child['reftitle']] = child.get('refuri') elif child.get('refid') is not None: urls[child['reftitle']] = '#' + child.get('refid') # These arguments to dot will save a PNG file to disk and write # an HTML image map to stdout. image_map = graph.run_dot(['-Tpng', '-o%s' % png_path, '-Tcmapx'], name, parts, urls) return ('<img src="%s/%s.png" usemap="#%s" class="inheritance"/>%s' % (path, name, name, image_map)) def latex_output_graph(self, node): """ Output the graph for LaTeX. This will insert a PDF. """ graph = node['graph'] parts = node['parts'] graph_hash = get_graph_hash(node) name = "inheritance%s" % graph_hash dest_path = os.path.abspath(os.path.join(setup.app.builder.outdir, '_images')) if not os.path.exists(dest_path): os.makedirs(dest_path) pdf_path = os.path.abspath(os.path.join(dest_path, name + ".pdf")) graph.run_dot(['-Tpdf', '-o%s' % pdf_path], name, parts, graph_options={'size': '"6.0,6.0"'}) return '\n\\includegraphics{%s}\n\n' % pdf_path def visit_inheritance_diagram(inner_func): """ This is just a wrapper around html/latex_output_graph to make it easier to handle errors and insert warnings. """ def visitor(self, node): try: content = inner_func(self, node) except DotException, e: # Insert the exception as a warning in the document warning = self.document.reporter.warning(str(e), line=node.line) warning.parent = node node.children = [warning] else: source = self.document.attributes['source'] self.body.append(content) node.children = [] return visitor def do_nothing(self, node): pass def setup(app): setup.app = app setup.confdir = app.confdir app.add_node( inheritance_diagram, latex=(visit_inheritance_diagram(latex_output_graph), do_nothing), html=(visit_inheritance_diagram(html_output_graph), do_nothing)) app.add_directive( 'inheritance-diagram', inheritance_diagram_directive, False, (1, 100, 0), parts = directives.nonnegative_int)
bsd-3-clause
moritzschaefer/kickercam
src/kickercam/labeling/interpolate.py
1
1231
#!/usr/bin/env python3 import argparse import pandas as pd import numpy as np import sys def interpolate(df): new_index = pd.Index(range(df.index.max() + 1)) interpolate_df = pd.DataFrame(index=new_index).join(df) for i, index in enumerate(df.index): try: index2 = df.index[i+1] except: break if isinstance(df.loc[index, 'x'], np.int64) and df.loc[index, 'x'] >= 0 and \ isinstance(df.loc[index2, 'x'], np.int64) and df.loc[index2, 'x'] >= 0: interpolate_df.loc[index:index2, 'x'] = np.linspace(df.loc[index, 'x'], df.loc[index2, 'x'], num=1+index2-index, endpoint=True) interpolate_df.loc[index:index2, 'y'] = np.linspace(df.loc[index, 'y'], df.loc[index2, 'y'], num=1+index2-index, endpoint=True) return interpolate_df.fillna(-1) if __name__ == '__main__': ap = argparse.ArgumentParser() ap.add_argument('input', type=str) ap.add_argument('output', type=str) args = ap.parse_args(sys.argv[1:]) df = pd.read_csv(args.input, index_col=0) interpolate(df).to_csv(args.output)
gpl-3.0
kanchenxi04/vnpy-app
vn.trader/ctaAlgo/strategy_TripleMa_v0.3.py
1
32295
# encoding: UTF-8 # 首先写系统内置模块 from datetime import datetime, timedelta, date from time import sleep # 其次,导入vnpy的基础模块 import sys sys.path.append('C:\\vnpy-master\\vnpy-master\\vn.trader') from vtConstant import EMPTY_STRING, EMPTY_INT, DIRECTION_LONG, DIRECTION_SHORT, OFFSET_OPEN, STATUS_CANCELLED, EMPTY_FLOAT from utilSinaClient import UtilSinaClient # 然后是自己编写的模块 from ctaTemplate import * from ctaBase import * from ctaLineBar import * from ctaPolicy import * class Strategy_TripleMa(CtaTemplate): """螺纹钢、5分钟级别、三均线策略 策略: 10,20,120均线,120均线做多空过滤 MA120之上 MA10 上穿 MA20,金叉,做多 MA10 下穿 MA20,死叉,平多 MA120之下 MA10 下穿 MA20,死叉,做空 MA10 上穿 MA20,金叉,平空 更新记录 v0.1 初始版本,带有1分钟后撤单逻辑 v0.2 优化开仓条件 v0.3 优化平仓,设定移动止损 """ className = 'Strategy_TripleMa' author = u'李来佳' # 策略在外部设置的参数 inputSS = 1 # 参数SS,下单,范围是1~100,步长为1,默认=1, minDiff = 1 # 商品的最小交易单位 atrLength = 20 # 平均波动周期 ATR Length 20个5分钟的平均波动 # ---------------------------------------------------------------------- def __init__(self, ctaEngine, setting=None): """Constructor""" super(Strategy_TripleMa, self).__init__(ctaEngine, setting) # 增加监控参数项目 self.paramList.append('inputSS') self.paramList.append('minDiff') # 增加监控变量项目 self.varList.append('pos') # 仓位 self.varList.append('entrust') # 是否正在委托 self.curDateTime = None # 当前Tick时间 self.curTick = None # 最新的tick self.lastOrderTime = None # 上一次委托时间 self.cancelSeconds = 60 # 撤单时间(秒) # 定义日内的交易窗口 self.openWindow = False # 开市窗口 self.tradeWindow = False # 交易窗口 self.closeWindow = False # 收市平仓窗口 self.inited = False # 是否完成了策略初始化 self.backtesting = False # 是否回测 self.lineM5 = None # 5分钟K线 # 创建一个策略规则 self.policy = CtaPolicy() self.atr = 10 # 平均波动 self.highPriceInLong = EMPTY_FLOAT # 成交后,最高价格 self.lowPriceInShort = EMPTY_FLOAT # 成交后,最低价格 if setting: # 根据配置文件更新参数 self.setParam(setting) # 创建的M5 K线 lineM5Setting = {} lineM5Setting['name'] = u'M5' # k线名称 lineM5Setting['barTimeInterval'] = 60 * 5 # K线的Bar时长 lineM5Setting['inputMa1Len'] = 10 # 第1条均线 lineM5Setting['inputMa2Len'] = 20 # 第2条均线 lineM5Setting['inputMa3Len'] = 120 # 第3条均线 lineM5Setting['inputAtr1Len'] = self.atrLength # ATR lineM5Setting['inputPreLen'] = 10 # 前高/前低 lineM5Setting['minDiff'] = self.minDiff lineM5Setting['shortSymbol'] = self.shortSymbol self.lineM5 = CtaLineBar(self, self.onBarM5, lineM5Setting) try: mode = setting['mode'] if mode != EMPTY_STRING: self.lineM5.setMode(setting['mode']) except KeyError: self.lineM5.setMode(self.lineM5.TICK_MODE) self.onInit() # ---------------------------------------------------------------------- def onInit(self, force=False): """初始化 """ if force: self.writeCtaLog(u'策略强制初始化') self.inited = False self.trading = False # 控制是否启动交易 else: self.writeCtaLog(u'策略初始化') if self.inited: self.writeCtaLog(u'已经初始化过,不再执行') return self.pos = EMPTY_INT # 初始化持仓 self.entrust = EMPTY_INT # 初始化委托状态 if not self.backtesting: # 这里需要加载前置数据哦。 if not self.__initDataFromSina(): return self.inited = True # 更新初始化标识 self.trading = True # 启动交易 self.putEvent() self.writeCtaLog(u'策略初始化完成') def __initDataFromSina(self): """从sina初始化5分钟数据""" sina = UtilSinaClient(self) ret = sina.getMinBars(symbol=self.symbol, minute=5, callback=self.lineM5.addBar) if not ret: self.writeCtaLog(u'获取M5数据失败') return False return True def onStart(self): """启动策略(必须由用户继承实现)""" self.writeCtaLog(u'启动') # ---------------------------------------------------------------------- def onStop(self): """停止策略(必须由用户继承实现)""" self.uncompletedOrders.clear() self.pos = EMPTY_INT self.entrust = EMPTY_INT self.writeCtaLog(u'停止') self.putEvent() # ---------------------------------------------------------------------- def onTrade(self, trade): """交易更新""" self.writeCtaLog(u'{0},OnTrade(),当前持仓:{1} '.format(self.curDateTime, self.pos)) # ---------------------------------------------------------------------- def onOrder(self, order): """报单更新""" self.writeCtaLog( u'OnOrder()报单更新,orderID:{0},{1},totalVol:{2},tradedVol:{3},offset:{4},price:{5},direction:{6},status:{7}' .format(order.orderID, order.vtSymbol, order.totalVolume, order.tradedVolume, order.offset, order.price, order.direction, order.status)) # 委托单主键,vnpy使用 "gateway.orderid" 的组合 orderkey = order.gatewayName + u'.' + order.orderID if orderkey in self.uncompletedOrders: if order.totalVolume == order.tradedVolume: # 开仓,平仓委托单全部成交 # 平空仓完成(cover) if self.uncompletedOrders[orderkey]['DIRECTION'] == DIRECTION_LONG and order.offset != OFFSET_OPEN: self.writeCtaLog(u'平空仓完成') # 更新仓位 self.pos = EMPTY_INT # 平多仓完成(sell) if self.uncompletedOrders[orderkey]['DIRECTION'] == DIRECTION_SHORT and order.offset != OFFSET_OPEN: self.writeCtaLog(u'平多仓完成') # 更新仓位 self.pos = EMPTY_INT # 开多仓完成 if self.uncompletedOrders[orderkey]['DIRECTION'] == DIRECTION_LONG and order.offset == OFFSET_OPEN: self.writeCtaLog(u'开多仓完成') # 更新仓位 self.pos = order.tradedVolume # 开空仓完成 if self.uncompletedOrders[orderkey]['DIRECTION'] == DIRECTION_SHORT and order.offset == OFFSET_OPEN: self.writeCtaLog(u'开空仓完成') self.pos = 0 - order.tradedVolume del self.uncompletedOrders[orderkey] if len(self.uncompletedOrders) == 0: self.entrust = 0 self.lastOrderTime = None if self.pos == 0: self.highPriceInLong = EMPTY_FLOAT self.lowPriceInShort = EMPTY_FLOAT elif order.tradedVolume > 0 and not order.totalVolume == order.tradedVolume and order.offset != OFFSET_OPEN: # 平仓委托单部分成交 pass elif order.offset == OFFSET_OPEN and order.status == STATUS_CANCELLED: # 开仓委托单被撤销 self.entrust = 0 pass else: self.writeCtaLog(u'OnOrder()委托单返回,total:{0},traded:{1}' .format(order.totalVolume, order.tradedVolume, )) self.putEvent() # 更新监控事件 # ---------------------------------------------------------------------- def onStopOrder(self, orderRef): """停止单更新""" self.writeCtaLog(u'{0},停止单触发,orderRef:{1}'.format(self.curDateTime, orderRef)) pass # ---------------------------------------------------------------------- def onTick(self, tick): """行情更新 :type tick: object """ self.curTick = tick if (tick.datetime.hour >= 3 and tick.datetime.hour <= 8) or ( tick.datetime.hour >= 16 and tick.datetime.hour <= 20): self.writeCtaLog(u'休市/集合竞价排名时数据不处理') return # 更新策略执行的时间(用于回测时记录发生的时间) self.curDateTime = tick.datetime # 2、计算交易时间和平仓时间 self.__timeWindow(self.curDateTime) # 推送Tick到lineM5 self.lineM5.onTick(tick) # 首先检查是否是实盘运行还是数据预处理阶段 if not (self.inited and len(self.lineM5.lineMa3) > 0): return # tick级别的开平仓逻辑 if self.pos != 0: # policy 跟随止损 if self.policy.exitOnLastRtnPips > 0: if self.pos > 0 and self.entrust != 1 \ and tick.lastPrice < (self.highPriceInLong - self.policy.exitOnLastRtnPips * self.minDiff): self.writeCtaLog( u'{0},onTick跟随止损,平仓多单{1}手,价格:{2}'.format(tick.datetime, self.inputSS, tick.lastPrice)) orderid = self.sell(price=tick.lastPrice, volume=self.inputSS, orderTime=self.curDateTime) if orderid: # 更新下单时间(为了定时撤单) self.lastOrderTime = self.curDateTime return if self.pos < 0 and self.entrust != -1 \ and tick.lastPrice > (self.lowPriceInShort + self.policy.exitOnLastRtnPips * self.minDiff): self.writeCtaLog( u'{0},onTick跟随止损,平仓空单{1}手,价格:{2}'.format(tick.datetime, self.inputSS, tick.lastPrice)) orderid = self.cover(price=tick.lastPrice, volume=self.inputSS, orderTime=self.curDateTime) if orderid: # 更新下单时间(为了定时撤单) self.lastOrderTime = self.curDateTime return # 固定止损 if self.policy.exitOnStopPrice > 0: if self.pos > 0 and self.entrust != 1 \ and tick.lastPrice < self.policy.exitOnStopPrice: self.writeCtaLog( u'{0},onTick固定止损,平仓多单{1}手,价格:{2}'.format(tick.datetime, self.inputSS, tick.lastPrice)) orderid = self.sell(price=tick.lastPrice, volume=self.inputSS, orderTime=self.curDateTime) if orderid: # 更新下单时间(为了定时撤单) self.lastOrderTime = self.curDateTime return if self.pos < 0 and self.entrust != -1 \ and tick.lastPrice > self.policy.exitOnStopPrice: self.writeCtaLog( u'{0},onTick固定止损,平仓空单{1}手,价格:{2}'.format(tick.datetime, self.inputSS, tick.lastPrice)) orderid = self.cover(price=tick.lastPrice, volume=self.inputSS, orderTime=self.curDateTime) if orderid: # 更新下单时间(为了定时撤单) self.lastOrderTime = self.curDateTime return # ---------------------------------------------------------------------- def onBar(self, bar): """分钟K线数据更新(仅用于回测时,从策略外部调用)""" # 更新策略执行的时间(用于回测时记录发生的时间) # 回测数据传送的bar.datetime,为bar的开始时间,所以,到达策略时,当前时间为bar的结束时间 self.curDateTime = bar.datetime + timedelta(seconds=self.lineM5.barTimeInterval) # 2、计算交易时间和平仓时间 self.__timeWindow(bar.datetime) # 推送tick到15分钟K线 self.lineM5.addBar(bar) # 4、交易逻辑 # 首先检查是否是实盘运行还是数据预处理阶段 if not self.inited: if len(self.lineM5.lineBar) > 120 + 5: self.inited = True else: return def onBarM5(self, bar): """ 分钟K线数据更新,实盘时,由self.lineM5的回调""" # 调用lineM5的显示bar内容 self.writeCtaLog(self.lineM5.displayLastBar()) # 未初始化完成 if not self.inited: if len(self.lineM5.lineBar) > 120 + 5: self.inited = True else: return if self.lineM5.mode == self.lineM5.TICK_MODE: idx = 2 else: idx = 1 # 更新ATR if self.lineM5.lineAtr1[-1] > 2: self.atr = max(self.lineM5.lineAtr1[-1], 5) # 2倍的ATR作为跟随止损,盈利后最后开仓的回调点数exitOnLastRtnPips self.policy.exitOnLastRtnPips = int((self.atr * 2) / self.minDiff) + 1 # 更新最高价/最低价 if self.backtesting: # 持有多仓时,更新最高价 if self.pos > 0: if bar.high > self.highPriceInLong: self.highPriceInLong = bar.high # 持有空仓时,更新最低价 if self.pos < 0: if bar.low < self.lowPriceInShort: self.lowPriceInShort = bar.low # 执行撤单逻辑 self.__cancelLogic(dt=self.curDateTime) if len(self.lineM5.lineMa3) > 5: # 取前五个第三条均线的均值 ma5_Ma120 = ta.MA(numpy.array(self.lineM5.lineMa3, dtype=float), 5)[-1] else: ma5_Ma120 = self.lineM5.lineMa3[-1] ma5_Ma10 = ta.MA(numpy.array(self.lineM5.lineMa1, dtype=float), 5)[-1] # 如果未持仓,检查是否符合开仓逻辑 if self.pos == 0: # MA10 上穿MA20, MA10 > MA120, bar.close > MA120, MA(MA120)< MA120 if self.lineM5.lineMa1[-1 - idx] < self.lineM5.lineMa2[-1 - idx] \ and self.lineM5.lineMa1[0 - idx] > self.lineM5.lineMa2[0 - idx] \ and self.lineM5.lineMa1[0 - idx] > self.lineM5.lineMa3[0 - idx] \ and bar.close > self.lineM5.lineMa3[-1] \ and ma5_Ma120 < self.lineM5.lineMa3[-1] \ and self.lineM5.lineMa1[-1] > ma5_Ma10: self.writeCtaLog(u'{0},开仓多单{1}手,价格:{2}'.format(bar.datetime, self.inputSS, bar.close)) orderid = self.buy(price=bar.close, volume=self.inputSS, orderTime=self.curDateTime) if orderid: # 更新下单时间(为了定时撤单) self.lastOrderTime = self.curDateTime # 更新开仓价格,开仓价格为bar的收盘价,entryPrice是开仓价格 self.policy.entryPrice = bar.close # 多仓,设置前低为止损价,设定固定止损价格为前低 self.policy.exitOnStopPrice = self.lineM5.preLow[-1] return # MA10 下穿MA20, MA10 < MA120, bar.close < MA120, MA(MA120) > MA120 if self.lineM5.lineMa1[-1 - idx] > self.lineM5.lineMa2[-1 - idx] \ and self.lineM5.lineMa1[0 - idx] < self.lineM5.lineMa2[0 - idx] \ and self.lineM5.lineMa1[0 - idx] < self.lineM5.lineMa3[0 - idx] \ and bar.close < self.lineM5.lineMa3[-1] \ and ma5_Ma120 > self.lineM5.lineMa3[-1] \ and self.lineM5.lineMa1[-1] < ma5_Ma10: self.writeCtaLog(u'{0},开仓空单{1}手,价格:{2}'.format(bar.datetime, self.inputSS, bar.close)) orderid = self.short(price=bar.close, volume=self.inputSS, orderTime=self.curDateTime) if orderid: # 更新下单时间(为了定时撤单) self.lastOrderTime = self.curDateTime # 更新开仓价格 self.policy.entryPrice = bar.close # 更新最低价 self.lowPriceInShort = bar.close # 做空,设置前高为止损价,exitOnStopPrice固定止损价格 self.policy.exitOnStopPrice = self.lineM5.preHigh[-1] return # 持仓,检查是否满足平仓条件 else: # MA10下穿MA20,多单离场 if self.lineM5.lineMa1[-1] < self.lineM5.lineMa2[-1] \ and self.pos > 0 and self.entrust != -1: self.writeCtaLog(u'{0},平仓多单{1}手,价格:{2}'.format(bar.datetime, self.inputSS, bar.close)) orderid = self.sell(price=bar.close, volume=self.inputSS, orderTime=self.curDateTime) if orderid: # 更新下单时间(为了定时撤单) self.lastOrderTime = self.curDateTime return # MA10上穿MA20,空离场 if self.lineM5.lineMa1[-1] > self.lineM5.lineMa2[-1] \ and self.pos < 0 and self.entrust != 1: self.writeCtaLog(u'{0},平仓空单{1}手,价格:{2}'.format(bar.datetime, self.inputSS, bar.close)) orderid = self.cover(price=bar.close, volume=self.inputSS, orderTime=self.curDateTime) if orderid: # 更新下单时间(为了定时撤单) self.lastOrderTime = self.curDateTime return # policy 跟随止损 if self.policy.exitOnLastRtnPips > 0: # 盈利后最后开仓的回调点数大于0 # 做多 if self.pos > 0 and self.entrust != 1 \ and bar.close < (self.highPriceInLong - self.policy.exitOnLastRtnPips * self.minDiff): self.writeCtaLog(u'{0},跟随止损,平仓多单{1}手,价格:{2}'.format(bar.datetime, self.inputSS, bar.close)) orderid = self.sell(price=bar.close, volume=self.inputSS, orderTime=self.curDateTime) if orderid: # 更新下单时间(为了定时撤单) self.lastOrderTime = self.curDateTime return # 做空 if self.pos < 0 and self.entrust != -1 \ and bar.close > (self.lowPriceInShort + self.policy.exitOnLastRtnPips * self.minDiff): self.writeCtaLog(u'{0},跟随止损,平仓空单{1}手,价格:{2}'.format(bar.datetime, self.inputSS, bar.close)) orderid = self.cover(price=bar.close, volume=self.inputSS, orderTime=self.curDateTime) if orderid: # 更新下单时间(为了定时撤单) self.lastOrderTime = self.curDateTime return # 固定止损 if self.policy.exitOnStopPrice > 0: # if self.pos > 0 and self.entrust != 1 \ and bar.close < self.policy.exitOnStopPrice: self.writeCtaLog(u'{0},固定止损,平仓多单{1}手,价格:{2}'.format(bar.datetime, self.inputSS, bar.close)) orderid = self.sell(price=bar.close, volume=self.inputSS, orderTime=self.curDateTime) if orderid: # 更新下单时间(为了定时撤单) self.lastOrderTime = self.curDateTime return if self.pos < 0 and self.entrust != -1 \ and bar.close > self.policy.exitOnStopPrice: self.writeCtaLog(u'{0},固定止损,平仓空单{1}手,价格:{2}'.format(bar.datetime, self.inputSS, bar.close)) orderid = self.cover(price=bar.close, volume=self.inputSS, orderTime=self.curDateTime) if orderid: # 更新下单时间(为了定时撤单) self.lastOrderTime = self.curDateTime return # 执行收盘前平仓检查 self.__dailyCloseCheck(bar) # ---------------------------------------------------------------------- def __cancelLogic(self, dt, force=False): "撤单逻辑""" if len(self.uncompletedOrders) < 1: return if not self.lastOrderTime: self.writeCtaLog(u'异常,上一交易时间为None') return # 平仓检查时间比开开仓时间需要短一倍 if (self.pos >= 0 and self.entrust == 1) \ or (self.pos <= 0 and self.entrust == -1): i = 1 else: i = 1 # 原来是2,暂时取消 canceled = False if ((dt - self.lastOrderTime).seconds > self.cancelSeconds / i) \ or force: # 超过设置的时间还未成交 for order in self.uncompletedOrders.keys(): self.writeCtaLog(u'{0}超时{1}秒未成交,取消委托单:{2}'.format(dt, (dt - self.lastOrderTime).seconds, order)) self.cancelOrder(str(order)) canceled = True # 取消未完成的订单 self.uncompletedOrders.clear() if canceled: self.entrust = 0 self.policy.entryPrice = 0 else: self.writeCtaLog(u'异常:没有撤单') def __dailyCloseCheck(self, bar): """每天收盘前检查,如果是亏损单,则平掉""" if self.pos == 0 and self.entrust == 0: return False if bar.time not in ['14:45:00', '14:50:00', '14:55:00', '22:45:00', '22:50:00', '22:55:00']: return False # 撤销未成交的订单 if len(self.uncompletedOrders) > 0: for order in self.uncompletedOrders.keys(): self.writeCtaLog(u'{0},收盘前15分钟,仍未成交,取消委托单:{1}'.format(bar.datetime, order)) self.cancelOrder(str(order)) self.uncompletedOrders.clear() self.entrust = 0 # 强制平仓 if self.pos > 0 and bar.close < self.policy.entryPrice + self.atr: # 当盈利还不是很多的时候就会强制它平仓 self.writeCtaLog(u'强制日内平亏损多仓') # 降低两个滑点 orderid = self.sell(price=bar.close - 2 * self.minDiff, volume=self.inputSS, orderTime=self.curDateTime) if orderid: # 更新下单时间 self.lastOrderTime = self.curDateTime return True if self.pos < 0 and bar.close > self.policy.entryPrice - self.atr: self.writeCtaLog(u'强制日内平亏损空仓') orderid = self.cover(price=bar.close + 2 * self.minDiff, volume=self.inputSS, orderTime=self.curDateTime) if orderid: # 更新下单时间(为了定时撤单) self.lastOrderTime = self.curDateTime return True return True def __timeWindow(self, dt): """交易与平仓窗口""" # 交易窗口 避开早盘和夜盘的前5分钟,防止隔夜跳空。 self.closeWindow = False self.tradeWindow = False self.openWindow = False # 初始化当日的首次交易 # if (tick.datetime.hour == 9 or tick.datetime.hour == 21) and tick.datetime.minute == 0 and tick.datetime.second ==0: # self.firstTrade = True # 开市期,波动较大,用于判断止损止盈,或开仓 if (dt.hour == 9 or dt.hour == 21) and dt.minute < 2: self.openWindow = True # 日盘 if dt.hour == 9 and dt.minute >= 0: self.tradeWindow = True return if dt.hour == 10: if dt.minute <= 15 or dt.minute >= 30: self.tradeWindow = True return if dt.hour == 11 and dt.minute <= 30: self.tradeWindow = True return if dt.hour == 13 and dt.minute >= 30: self.tradeWindow = True return if dt.hour == 14: if dt.minute < 59: self.tradeWindow = True return if dt.minute == 59: # 日盘平仓 self.closeWindow = True return # 夜盘 if dt.hour == 21 and dt.minute >= 0: self.tradeWindow = True return # 上期 贵金属, 次日凌晨2:30 if self.shortSymbol in NIGHT_MARKET_SQ1: if dt.hour == 22 or dt.hour == 23 or dt.hour == 0 or dt.hour == 1: self.tradeWindow = True return if dt.hour == 2: if dt.minute < 29: # 收市前29分钟 self.tradeWindow = True return if dt.minute == 29: # 夜盘平仓 self.closeWindow = True return return # 上期 有色金属,黑色金属,沥青 次日01:00 if self.shortSymbol in NIGHT_MARKET_SQ2: if dt.hour == 22 or dt.hour == 23: self.tradeWindow = True return if dt.hour == 0: if dt.minute < 59: # 收市前29分钟 self.tradeWindow = True return if dt.minute == 59: # 夜盘平仓 self.closeWindow = True return return # 上期 天然橡胶 23:00 if self.shortSymbol in NIGHT_MARKET_SQ3: if dt.hour == 22: if dt.minute < 59: # 收市前1分钟 self.tradeWindow = True return if dt.minute == 59: # 夜盘平仓 self.closeWindow = True return # 郑商、大连 23:30 if self.shortSymbol in NIGHT_MARKET_ZZ or self.shortSymbol in NIGHT_MARKET_DL: if dt.hour == 22: self.tradeWindow = True return if dt.hour == 23: if dt.minute < 29: # 收市前1分钟 self.tradeWindow = True return if dt.minute == 29 and dt.second > 30: # 夜盘平仓 self.closeWindow = True return return # ---------------------------------------------------------------------- def strToTime(self, t, ms): """从字符串时间转化为time格式的时间""" hh, mm, ss = t.split(':') tt = datetime.time(int(hh), int(mm), int(ss), microsecond=ms) return tt # ---------------------------------------------------------------------- def saveData(self, id): """保存过程数据""" # 保存K线 if not self.backtesting: return def testRbByTick(): # 创建回测引擎 engine = BacktestingEngine() # 设置引擎的回测模式为Tick engine.setBacktestingMode(engine.TICK_MODE) # 设置回测用的数据起始日期 engine.setStartDate('20160101') # 设置回测用的数据结束日期 engine.setEndDate('20160330') # engine.connectMysql() engine.setDatabase(dbName='stockcn', symbol='rb') # 设置产品相关参数 engine.setSlippage(0) # 1跳(0.1)2跳0.2 engine.setRate(float(0.0001)) # 万1 engine.setSize(10) # 合约大小 settings = {} settings['shortSymbol'] = 'RB' settings['name'] = 'TripleMa' settings['mode'] = 'tick' settings['backtesting'] = True # 在引擎中创建策略对象 engine.initStrategy(Strategy_TripleMa, setting=settings) # 使用简单复利模式计算 engine.usageCompounding = False # True时,只针对FINAL_MODE有效 # 启用实时计算净值模式REALTIME_MODE / FINAL_MODE 回测结束时统一计算模式 engine.calculateMode = engine.REALTIME_MODE engine.initCapital = 100000 # 设置期初资金 engine.percentLimit = 30 # 设置资金使用上限比例(%) engine.barTimeInterval = 60 * 5 # bar的周期秒数,用于csv文件自动减时间 engine.fixCommission = 10 # 固定交易费用(每次开平仓收费) # 开始跑回测 engine.runBacktestingWithMysql() # 显示回测结果 engine.showBacktestingResult() def testRbByBar(): # 创建回测引擎 engine = BacktestingEngine() # 设置引擎的回测模式为Tick engine.setBacktestingMode(engine.BAR_MODE) # 设置回测用的数据起始日期 engine.setStartDate('20100101') # 设置回测用的数据结束日期 engine.setEndDate('20161231') engine.setDatabase(dbName='stockcn', symbol='rb') # 设置产品相关参数 engine.setSlippage(0) # 1跳(0.1)2跳0.2 engine.setRate(float(0.0001)) # 万1 engine.setSize(10) # 合约大小 settings = {} settings['shortSymbol'] = 'RB' settings['name'] = 'TripleMa' settings['mode'] = 'bar' settings['backtesting'] = True settings['percentLimit'] = 30 # 在引擎中创建策略对象 engine.initStrategy(Strategy_TripleMa, setting=settings) # 使用简单复利模式计算 engine.usageCompounding = False # True时,只针对FINAL_MODE有效 # 启用实时计算净值模式REALTIME_MODE / FINAL_MODE 回测结束时统一计算模式 engine.calculateMode = engine.REALTIME_MODE engine.initCapital = 100000 # 设置期初资金 engine.percentLimit = 30 # 设置资金使用上限比例(%) engine.barTimeInterval = 300 # bar的周期秒数,用于csv文件自动减时间 # 开始跑回测 engine.runBackTestingWithBarFile(os.getcwd() + '/cache/RB88_20100101_20161231_5m.csv') # 显示回测结果 engine.showBacktestingResult() # 从csv文件进行回测 if __name__ == '__main__': # 提供直接双击回测的功能 # 导入PyQt4的包是为了保证matplotlib使用PyQt4而不是PySide,防止初始化出错 from ctaBacktesting import * from setup_logger import setup_logger setup_logger( filename=u'TestLogs/{0}_{1}.log'.format(Strategy_TripleMa.className, datetime.now().strftime('%m%d_%H%M')), debug=False) # 回测螺纹 testRbByBar()
mit
cauchycui/scikit-learn
sklearn/tests/test_common.py
127
7665
""" General tests for all estimators in sklearn. """ # Authors: Andreas Mueller <[email protected]> # Gael Varoquaux [email protected] # License: BSD 3 clause from __future__ import print_function import os import warnings import sys import pkgutil from sklearn.externals.six import PY3 from sklearn.utils.testing import assert_false, clean_warning_registry from sklearn.utils.testing import all_estimators from sklearn.utils.testing import assert_greater from sklearn.utils.testing import assert_in from sklearn.utils.testing import ignore_warnings import sklearn from sklearn.cluster.bicluster import BiclusterMixin from sklearn.linear_model.base import LinearClassifierMixin from sklearn.utils.estimator_checks import ( _yield_all_checks, CROSS_DECOMPOSITION, check_parameters_default_constructible, check_class_weight_balanced_linear_classifier, check_transformer_n_iter, check_non_transformer_estimators_n_iter, check_get_params_invariance) def test_all_estimator_no_base_class(): # test that all_estimators doesn't find abstract classes. for name, Estimator in all_estimators(): msg = ("Base estimators such as {0} should not be included" " in all_estimators").format(name) assert_false(name.lower().startswith('base'), msg=msg) def test_all_estimators(): # Test that estimators are default-constructible, clonable # and have working repr. estimators = all_estimators(include_meta_estimators=True) # Meta sanity-check to make sure that the estimator introspection runs # properly assert_greater(len(estimators), 0) for name, Estimator in estimators: # some can just not be sensibly default constructed yield check_parameters_default_constructible, name, Estimator def test_non_meta_estimators(): # input validation etc for non-meta estimators estimators = all_estimators() for name, Estimator in estimators: if issubclass(Estimator, BiclusterMixin): continue if name.startswith("_"): continue for check in _yield_all_checks(name, Estimator): yield check, name, Estimator def test_configure(): # Smoke test the 'configure' step of setup, this tests all the # 'configure' functions in the setup.pys in the scikit cwd = os.getcwd() setup_path = os.path.abspath(os.path.join(sklearn.__path__[0], '..')) setup_filename = os.path.join(setup_path, 'setup.py') if not os.path.exists(setup_filename): return try: os.chdir(setup_path) old_argv = sys.argv sys.argv = ['setup.py', 'config'] clean_warning_registry() with warnings.catch_warnings(): # The configuration spits out warnings when not finding # Blas/Atlas development headers warnings.simplefilter('ignore', UserWarning) if PY3: with open('setup.py') as f: exec(f.read(), dict(__name__='__main__')) else: execfile('setup.py', dict(__name__='__main__')) finally: sys.argv = old_argv os.chdir(cwd) def test_class_weight_balanced_linear_classifiers(): classifiers = all_estimators(type_filter='classifier') clean_warning_registry() with warnings.catch_warnings(record=True): linear_classifiers = [ (name, clazz) for name, clazz in classifiers if 'class_weight' in clazz().get_params().keys() and issubclass(clazz, LinearClassifierMixin)] for name, Classifier in linear_classifiers: if name == "LogisticRegressionCV": # Contrary to RidgeClassifierCV, LogisticRegressionCV use actual # CV folds and fit a model for each CV iteration before averaging # the coef. Therefore it is expected to not behave exactly as the # other linear model. continue yield check_class_weight_balanced_linear_classifier, name, Classifier @ignore_warnings def test_import_all_consistency(): # Smoke test to check that any name in a __all__ list is actually defined # in the namespace of the module or package. pkgs = pkgutil.walk_packages(path=sklearn.__path__, prefix='sklearn.', onerror=lambda _: None) submods = [modname for _, modname, _ in pkgs] for modname in submods + ['sklearn']: if ".tests." in modname: continue package = __import__(modname, fromlist="dummy") for name in getattr(package, '__all__', ()): if getattr(package, name, None) is None: raise AttributeError( "Module '{0}' has no attribute '{1}'".format( modname, name)) def test_root_import_all_completeness(): EXCEPTIONS = ('utils', 'tests', 'base', 'setup') for _, modname, _ in pkgutil.walk_packages(path=sklearn.__path__, onerror=lambda _: None): if '.' in modname or modname.startswith('_') or modname in EXCEPTIONS: continue assert_in(modname, sklearn.__all__) def test_non_transformer_estimators_n_iter(): # Test that all estimators of type which are non-transformer # and which have an attribute of max_iter, return the attribute # of n_iter atleast 1. for est_type in ['regressor', 'classifier', 'cluster']: regressors = all_estimators(type_filter=est_type) for name, Estimator in regressors: # LassoLars stops early for the default alpha=1.0 for # the iris dataset. if name == 'LassoLars': estimator = Estimator(alpha=0.) else: estimator = Estimator() if hasattr(estimator, "max_iter"): # These models are dependent on external solvers like # libsvm and accessing the iter parameter is non-trivial. if name in (['Ridge', 'SVR', 'NuSVR', 'NuSVC', 'RidgeClassifier', 'SVC', 'RandomizedLasso', 'LogisticRegressionCV']): continue # Tested in test_transformer_n_iter below elif (name in CROSS_DECOMPOSITION or name in ['LinearSVC', 'LogisticRegression']): continue else: # Multitask models related to ENet cannot handle # if y is mono-output. yield (check_non_transformer_estimators_n_iter, name, estimator, 'Multi' in name) def test_transformer_n_iter(): transformers = all_estimators(type_filter='transformer') for name, Estimator in transformers: estimator = Estimator() # Dependent on external solvers and hence accessing the iter # param is non-trivial. external_solver = ['Isomap', 'KernelPCA', 'LocallyLinearEmbedding', 'RandomizedLasso', 'LogisticRegressionCV'] if hasattr(estimator, "max_iter") and name not in external_solver: yield check_transformer_n_iter, name, estimator def test_get_params_invariance(): # Test for estimators that support get_params, that # get_params(deep=False) is a subset of get_params(deep=True) # Related to issue #4465 estimators = all_estimators(include_meta_estimators=False, include_other=True) for name, Estimator in estimators: if hasattr(Estimator, 'get_params'): yield check_get_params_invariance, name, Estimator
bsd-3-clause
rgommers/statsmodels
statsmodels/stats/tests/test_statstools.py
8
10621
# TODO: Test robust skewness # TODO: Test robust kurtosis import numpy as np import pandas as pd from numpy.testing import (assert_almost_equal, assert_raises, TestCase) from statsmodels.stats.stattools import (omni_normtest, jarque_bera, durbin_watson, _medcouple_1d, medcouple, robust_kurtosis, robust_skewness) from statsmodels.stats.adnorm import normal_ad #a random array, rounded to 4 decimals x = np.array([-0.1184, -1.3403, 0.0063, -0.612, -0.3869, -0.2313, -2.8485, -0.2167, 0.4153, 1.8492, -0.3706, 0.9726, -0.1501, -0.0337, -1.4423, 1.2489, 0.9182, -0.2331, -0.6182, 0.183]) def test_durbin_watson(): #benchmark values from R car::durbinWatsonTest(x) #library("car") #> durbinWatsonTest(x) #[1] 1.95298958377419 #> durbinWatsonTest(x**2) #[1] 1.848802400319998 #> durbinWatsonTest(x[2:20]+0.5*x[1:19]) #[1] 1.09897993228779 #> durbinWatsonTest(x[2:20]+0.8*x[1:19]) #[1] 0.937241876707273 #> durbinWatsonTest(x[2:20]+0.9*x[1:19]) #[1] 0.921488912587806 st_R = 1.95298958377419 assert_almost_equal(durbin_watson(x), st_R, 14) st_R = 1.848802400319998 assert_almost_equal(durbin_watson(x**2), st_R, 14) st_R = 1.09897993228779 assert_almost_equal(durbin_watson(x[1:] + 0.5 * x[:-1]), st_R, 14) st_R = 0.937241876707273 assert_almost_equal(durbin_watson(x[1:] + 0.8 * x[:-1]), st_R, 14) st_R = 0.921488912587806 assert_almost_equal(durbin_watson(x[1:] + 0.9 * x[:-1]), st_R, 14) def test_omni_normtest(): #tests against R fBasics from scipy import stats st_pv_R = np.array( [[3.994138321207883, -1.129304302161460, 1.648881473704978], [0.1357325110375005, 0.2587694866795507, 0.0991719192710234]]) nt = omni_normtest(x) assert_almost_equal(nt, st_pv_R[:, 0], 14) st = stats.skewtest(x) assert_almost_equal(st, st_pv_R[:, 1], 14) kt = stats.kurtosistest(x) assert_almost_equal(kt, st_pv_R[:, 2], 11) st_pv_R = np.array( [[34.523210399523926, 4.429509162503833, 3.860396220444025], [3.186985686465249e-08, 9.444780064482572e-06, 1.132033129378485e-04]]) x2 = x**2 #TODO: fix precision in these test with relative tolerance nt = omni_normtest(x2) assert_almost_equal(nt, st_pv_R[:, 0], 12) st = stats.skewtest(x2) assert_almost_equal(st, st_pv_R[:, 1], 12) kt = stats.kurtosistest(x2) assert_almost_equal(kt, st_pv_R[:, 2], 12) def test_omni_normtest_axis(): #test axis of omni_normtest x = np.random.randn(25, 3) nt1 = omni_normtest(x) nt2 = omni_normtest(x, axis=0) nt3 = omni_normtest(x.T, axis=1) assert_almost_equal(nt2, nt1, decimal=13) assert_almost_equal(nt3, nt1, decimal=13) def test_jarque_bera(): #tests against R fBasics st_pv_R = np.array([1.9662677226861689, 0.3741367669648314]) jb = jarque_bera(x)[:2] assert_almost_equal(jb, st_pv_R, 14) st_pv_R = np.array([78.329987305556, 0.000000000000]) jb = jarque_bera(x**2)[:2] assert_almost_equal(jb, st_pv_R, 13) st_pv_R = np.array([5.7135750796706670, 0.0574530296971343]) jb = jarque_bera(np.log(x**2))[:2] assert_almost_equal(jb, st_pv_R, 14) st_pv_R = np.array([2.6489315748495761, 0.2659449923067881]) jb = jarque_bera(np.exp(-x**2))[:2] assert_almost_equal(jb, st_pv_R, 14) def test_shapiro(): #tests against R fBasics #testing scipy.stats from scipy.stats import shapiro st_pv_R = np.array([0.939984787255526, 0.239621898000460]) sh = shapiro(x) assert_almost_equal(sh, st_pv_R, 4) #st is ok -7.15e-06, pval agrees at -3.05e-10 st_pv_R = np.array([5.799574255943298e-01, 1.838456834681376e-06 * 1e4]) sh = shapiro(x**2) * np.array([1, 1e4]) assert_almost_equal(sh, st_pv_R, 5) st_pv_R = np.array([0.91730442643165588, 0.08793704167882448]) sh = shapiro(np.log(x**2)) assert_almost_equal(sh, st_pv_R, 5) #diff is [ 9.38773155e-07, 5.48221246e-08] st_pv_R = np.array([0.818361863493919373, 0.001644620895206969]) sh = shapiro(np.exp(-x**2)) assert_almost_equal(sh, st_pv_R, 5) def test_adnorm(): #tests against R fBasics st_pv = [] st_pv_R = np.array([0.5867235358882148, 0.1115380760041617]) ad = normal_ad(x) assert_almost_equal(ad, st_pv_R, 12) st_pv.append(st_pv_R) st_pv_R = np.array([2.976266267594575e+00, 8.753003709960645e-08]) ad = normal_ad(x**2) assert_almost_equal(ad, st_pv_R, 11) st_pv.append(st_pv_R) st_pv_R = np.array([0.4892557856308528, 0.1968040759316307]) ad = normal_ad(np.log(x**2)) assert_almost_equal(ad, st_pv_R, 12) st_pv.append(st_pv_R) st_pv_R = np.array([1.4599014654282669312, 0.0006380009232897535]) ad = normal_ad(np.exp(-x**2)) assert_almost_equal(ad, st_pv_R, 12) st_pv.append(st_pv_R) ad = normal_ad(np.column_stack((x, x**2, np.log(x**2), np.exp(-x**2))).T, axis=1) assert_almost_equal(ad, np.column_stack(st_pv), 11) def test_durbin_watson_pandas(): x = np.random.randn(50) x_series = pd.Series(x) assert_almost_equal(durbin_watson(x), durbin_watson(x_series), decimal=13) class TestStattools(TestCase): @classmethod def setup_class(cls): x = np.random.standard_normal(1000) e1, e2, e3, e4, e5, e6, e7 = np.percentile(x, (12.5, 25.0, 37.5, 50.0, 62.5, 75.0, 87.5)) c05, c50, c95 = np.percentile(x, (5.0, 50.0, 95.0)) f025, f25, f75, f975 = np.percentile(x, (2.5, 25.0, 75.0, 97.5)) mean = np.mean kr1 = mean(((x - mean(x)) / np.std(x))**4.0) - 3.0 kr2 = ((e7 - e5) + (e3 - e1)) / (e6 - e2) - 1.2330951154852172 kr3 = (mean(x[x > c95]) - mean(x[x < c05])) / (mean(x[x > c50]) - mean(x[x < c50])) - 2.5852271228708048 kr4 = (f975 - f025) / (f75 - f25) - 2.9058469516701639 cls.kurtosis_x = x cls.expected_kurtosis = np.array([kr1, kr2, kr3, kr4]) cls.kurtosis_constants = np.array([3.0,1.2330951154852172,2.5852271228708048,2.9058469516701639]) def test_medcouple_no_axis(self): x = np.reshape(np.arange(100.0), (50, 2)) mc = medcouple(x, axis=None) assert_almost_equal(mc, medcouple(x.ravel())) def test_medcouple_1d(self): x = np.reshape(np.arange(100.0),(50,2)) assert_raises(ValueError, _medcouple_1d, x) def test_medcouple_symmetric(self): mc = medcouple(np.arange(5.0)) assert_almost_equal(mc, 0) def test_medcouple_nonzero(self): mc = medcouple(np.array([1, 2, 7, 9, 10.0])) assert_almost_equal(mc, -0.3333333) def test_medcouple_symmetry(self): x = np.random.standard_normal(100) mcp = medcouple(x) mcn = medcouple(-x) assert_almost_equal(mcp + mcn, 0) def test_durbin_watson(self): x = np.random.standard_normal(100) dw = sum(np.diff(x)**2.0) / np.dot(x, x) assert_almost_equal(dw, durbin_watson(x)) def test_durbin_watson_2d(self): shape = (1, 10) x = np.random.standard_normal(100) dw = sum(np.diff(x)**2.0) / np.dot(x, x) x = np.tile(x[:, None], shape) assert_almost_equal(np.squeeze(dw * np.ones(shape)), durbin_watson(x)) def test_durbin_watson_3d(self): shape = (10, 1, 10) x = np.random.standard_normal(100) dw = sum(np.diff(x)**2.0) / np.dot(x, x) x = np.tile(x[None, :, None], shape) assert_almost_equal(np.squeeze(dw * np.ones(shape)), durbin_watson(x, axis=1)) def test_robust_skewness_1d(self): x = np.arange(21.0) sk = robust_skewness(x) assert_almost_equal(np.array(sk), np.zeros(4)) def test_robust_skewness_1d_2d(self): x = np.random.randn(21) y = x[:, None] sk_x = robust_skewness(x) sk_y = robust_skewness(y, axis=None) assert_almost_equal(np.array(sk_x), np.array(sk_y)) def test_robust_skewness_symmetric(self): x = np.random.standard_normal(100) x = np.hstack([x, np.zeros(1), -x]) sk = robust_skewness(x) assert_almost_equal(np.array(sk), np.zeros(4)) def test_robust_skewness_3d(self): x = np.random.standard_normal(100) x = np.hstack([x, np.zeros(1), -x]) x = np.tile(x, (10, 10, 1)) sk_3d = robust_skewness(x, axis=2) result = np.zeros((10, 10)) for sk in sk_3d: assert_almost_equal(sk, result) def test_robust_kurtosis_1d_2d(self): x = np.random.randn(100) y = x[:, None] kr_x = np.array(robust_kurtosis(x)) kr_y = np.array(robust_kurtosis(y, axis=None)) assert_almost_equal(kr_x, kr_y) def test_robust_kurtosis(self): x = self.kurtosis_x assert_almost_equal(np.array(robust_kurtosis(x)), self.expected_kurtosis) def test_robust_kurtosis_3d(self): x = np.tile(self.kurtosis_x, (10, 10, 1)) kurtosis = np.array(robust_kurtosis(x, axis=2)) for i, r in enumerate(self.expected_kurtosis): assert_almost_equal(r * np.ones((10, 10)), kurtosis[i]) def test_robust_kurtosis_excess_false(self): x = self.kurtosis_x expected = self.expected_kurtosis + self.kurtosis_constants kurtosis = np.array(robust_kurtosis(x, excess=False)) assert_almost_equal(expected, kurtosis) def test_robust_kurtosis_ab(self): """Test custom alpha, beta in kr3""" x = self.kurtosis_x alpha, beta = (10.0, 45.0) kurtosis = robust_kurtosis(self.kurtosis_x, ab=(alpha,beta), excess=False) num = np.mean(x[x>np.percentile(x,100.0 - alpha)]) - np.mean(x[x<np.percentile(x,alpha)]) denom = np.mean(x[x>np.percentile(x,100.0 - beta)]) - np.mean(x[x<np.percentile(x,beta)]) assert_almost_equal(kurtosis[2], num/denom) def test_robust_kurtosis_dg(self): """Test custom delta, gamma in kr4""" x = self.kurtosis_x delta, gamma = (10.0, 45.0) kurtosis = robust_kurtosis(self.kurtosis_x, dg=(delta,gamma), excess=False) q = np.percentile(x,[delta, 100.0-delta, gamma, 100.0-gamma]) assert_almost_equal(kurtosis[3], (q[1] - q[0]) / (q[3] - q[2])) if __name__ == "__main__": import nose nose.runmodule(argv=[__file__, '-vvs', '-x'], exit=False) #, '--pdb' # run_module_suite() #nose.runmodule(argv=[__file__,'-vvs','-x','--pdb', '--pdb-failure'], # exit=False)
bsd-3-clause
SciTools/iris
lib/iris/tests/unit/plot/__init__.py
3
3875
# Copyright Iris contributors # # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """Unit tests for the :mod:`iris.plot` module.""" # Import iris.tests first so that some things can be initialised before # importing anything else. import iris.tests as tests from iris.plot import _broadcast_2d as broadcast from iris.coords import AuxCoord from iris.tests.stock import simple_2d, lat_lon_cube @tests.skip_plot class TestGraphicStringCoord(tests.GraphicsTest): def setUp(self): super().setUp() self.cube = simple_2d(with_bounds=True) self.cube.add_aux_coord( AuxCoord(list("abcd"), long_name="str_coord"), 1 ) self.lat_lon_cube = lat_lon_cube() def tick_loc_and_label(self, axis_name, axes=None): # Intentional lazy import so that subclasses can have an opportunity # to change the backend. import matplotlib.pyplot as plt # Draw the plot to 'fix' the ticks. if axes: axes.figure.canvas.draw() else: axes = plt.gca() plt.draw() axis = getattr(axes, axis_name) locations = axis.get_majorticklocs() labels = [tick.get_text() for tick in axis.get_ticklabels()] return list(zip(locations, labels)) def assertBoundsTickLabels(self, axis, axes=None): actual = self.tick_loc_and_label(axis, axes) expected = [ (-1.0, ""), (0.0, "a"), (1.0, "b"), (2.0, "c"), (3.0, "d"), (4.0, ""), ] self.assertEqual(expected, actual) def assertPointsTickLabels(self, axis, axes=None): actual = self.tick_loc_and_label(axis, axes) expected = [(0.0, "a"), (1.0, "b"), (2.0, "c"), (3.0, "d")] self.assertEqual(expected, actual) @tests.skip_plot class MixinCoords: """ Mixin class of common plotting tests providing 2-dimensional permutations of coordinates and anonymous dimensions. """ def _check(self, u, v, data=None): self.assertEqual(self.mpl_patch.call_count, 1) if data is not None: (actual_u, actual_v, actual_data), _ = self.mpl_patch.call_args self.assertArrayEqual(actual_data, data) else: (actual_u, actual_v), _ = self.mpl_patch.call_args self.assertArrayEqual(actual_u, u) self.assertArrayEqual(actual_v, v) def test_foo_bar(self): self.draw_func(self.cube, coords=("foo", "bar")) u, v = broadcast(self.foo, self.bar) self._check(u, v, self.data) def test_bar_foo(self): self.draw_func(self.cube, coords=("bar", "foo")) u, v = broadcast(self.bar, self.foo) self._check(u, v, self.dataT) def test_foo_0(self): self.draw_func(self.cube, coords=("foo", 0)) u, v = broadcast(self.foo, self.bar_index) self._check(u, v, self.data) def test_1_bar(self): self.draw_func(self.cube, coords=(1, "bar")) u, v = broadcast(self.foo_index, self.bar) self._check(u, v, self.data) def test_1_0(self): self.draw_func(self.cube, coords=(1, 0)) u, v = broadcast(self.foo_index, self.bar_index) self._check(u, v, self.data) def test_0_foo(self): self.draw_func(self.cube, coords=(0, "foo")) u, v = broadcast(self.bar_index, self.foo) self._check(u, v, self.dataT) def test_bar_1(self): self.draw_func(self.cube, coords=("bar", 1)) u, v = broadcast(self.bar, self.foo_index) self._check(u, v, self.dataT) def test_0_1(self): self.draw_func(self.cube, coords=(0, 1)) u, v = broadcast(self.bar_index, self.foo_index) self._check(u, v, self.dataT)
lgpl-3.0
ch3ll0v3k/scikit-learn
examples/semi_supervised/plot_label_propagation_digits_active_learning.py
294
3417
""" ======================================== Label Propagation digits active learning ======================================== Demonstrates an active learning technique to learn handwritten digits using label propagation. We start by training a label propagation model with only 10 labeled points, then we select the top five most uncertain points to label. Next, we train with 15 labeled points (original 10 + 5 new ones). We repeat this process four times to have a model trained with 30 labeled examples. A plot will appear showing the top 5 most uncertain digits for each iteration of training. These may or may not contain mistakes, but we will train the next model with their true labels. """ print(__doc__) # Authors: Clay Woolam <[email protected]> # Licence: BSD import numpy as np import matplotlib.pyplot as plt from scipy import stats from sklearn import datasets from sklearn.semi_supervised import label_propagation from sklearn.metrics import classification_report, confusion_matrix digits = datasets.load_digits() rng = np.random.RandomState(0) indices = np.arange(len(digits.data)) rng.shuffle(indices) X = digits.data[indices[:330]] y = digits.target[indices[:330]] images = digits.images[indices[:330]] n_total_samples = len(y) n_labeled_points = 10 unlabeled_indices = np.arange(n_total_samples)[n_labeled_points:] f = plt.figure() for i in range(5): y_train = np.copy(y) y_train[unlabeled_indices] = -1 lp_model = label_propagation.LabelSpreading(gamma=0.25, max_iter=5) lp_model.fit(X, y_train) predicted_labels = lp_model.transduction_[unlabeled_indices] true_labels = y[unlabeled_indices] cm = confusion_matrix(true_labels, predicted_labels, labels=lp_model.classes_) print('Iteration %i %s' % (i, 70 * '_')) print("Label Spreading model: %d labeled & %d unlabeled (%d total)" % (n_labeled_points, n_total_samples - n_labeled_points, n_total_samples)) print(classification_report(true_labels, predicted_labels)) print("Confusion matrix") print(cm) # compute the entropies of transduced label distributions pred_entropies = stats.distributions.entropy( lp_model.label_distributions_.T) # select five digit examples that the classifier is most uncertain about uncertainty_index = uncertainty_index = np.argsort(pred_entropies)[-5:] # keep track of indices that we get labels for delete_indices = np.array([]) f.text(.05, (1 - (i + 1) * .183), "model %d\n\nfit with\n%d labels" % ((i + 1), i * 5 + 10), size=10) for index, image_index in enumerate(uncertainty_index): image = images[image_index] sub = f.add_subplot(5, 5, index + 1 + (5 * i)) sub.imshow(image, cmap=plt.cm.gray_r) sub.set_title('predict: %i\ntrue: %i' % ( lp_model.transduction_[image_index], y[image_index]), size=10) sub.axis('off') # labeling 5 points, remote from labeled set delete_index, = np.where(unlabeled_indices == image_index) delete_indices = np.concatenate((delete_indices, delete_index)) unlabeled_indices = np.delete(unlabeled_indices, delete_indices) n_labeled_points += 5 f.suptitle("Active learning with Label Propagation.\nRows show 5 most " "uncertain labels to learn with the next model.") plt.subplots_adjust(0.12, 0.03, 0.9, 0.8, 0.2, 0.45) plt.show()
bsd-3-clause
f3r/scikit-learn
sklearn/neighbors/approximate.py
30
22370
"""Approximate nearest neighbor search""" # Author: Maheshakya Wijewardena <[email protected]> # Joel Nothman <[email protected]> import numpy as np import warnings from scipy import sparse from .base import KNeighborsMixin, RadiusNeighborsMixin from ..base import BaseEstimator from ..utils.validation import check_array from ..utils import check_random_state from ..metrics.pairwise import pairwise_distances from ..random_projection import GaussianRandomProjection __all__ = ["LSHForest"] HASH_DTYPE = '>u4' MAX_HASH_SIZE = np.dtype(HASH_DTYPE).itemsize * 8 def _find_matching_indices(tree, bin_X, left_mask, right_mask): """Finds indices in sorted array of integers. Most significant h bits in the binary representations of the integers are matched with the items' most significant h bits. """ left_index = np.searchsorted(tree, bin_X & left_mask) right_index = np.searchsorted(tree, bin_X | right_mask, side='right') return left_index, right_index def _find_longest_prefix_match(tree, bin_X, hash_size, left_masks, right_masks): """Find the longest prefix match in tree for each query in bin_X Most significant bits are considered as the prefix. """ hi = np.empty_like(bin_X, dtype=np.intp) hi.fill(hash_size) lo = np.zeros_like(bin_X, dtype=np.intp) res = np.empty_like(bin_X, dtype=np.intp) left_idx, right_idx = _find_matching_indices(tree, bin_X, left_masks[hi], right_masks[hi]) found = right_idx > left_idx res[found] = lo[found] = hash_size r = np.arange(bin_X.shape[0]) kept = r[lo < hi] # indices remaining in bin_X mask while kept.shape[0]: mid = (lo.take(kept) + hi.take(kept)) // 2 left_idx, right_idx = _find_matching_indices(tree, bin_X.take(kept), left_masks[mid], right_masks[mid]) found = right_idx > left_idx mid_found = mid[found] lo[kept[found]] = mid_found + 1 res[kept[found]] = mid_found hi[kept[~found]] = mid[~found] kept = r[lo < hi] return res class ProjectionToHashMixin(object): """Turn a transformed real-valued array into a hash""" @staticmethod def _to_hash(projected): if projected.shape[1] % 8 != 0: raise ValueError('Require reduced dimensionality to be a multiple ' 'of 8 for hashing') # XXX: perhaps non-copying operation better out = np.packbits((projected > 0).astype(int)).view(dtype=HASH_DTYPE) return out.reshape(projected.shape[0], -1) def fit_transform(self, X, y=None): self.fit(X) return self.transform(X) def transform(self, X, y=None): return self._to_hash(super(ProjectionToHashMixin, self).transform(X)) class GaussianRandomProjectionHash(ProjectionToHashMixin, GaussianRandomProjection): """Use GaussianRandomProjection to produce a cosine LSH fingerprint""" def __init__(self, n_components=8, random_state=None): super(GaussianRandomProjectionHash, self).__init__( n_components=n_components, random_state=random_state) def _array_of_arrays(list_of_arrays): """Creates an array of array from list of arrays.""" out = np.empty(len(list_of_arrays), dtype=object) out[:] = list_of_arrays return out class LSHForest(BaseEstimator, KNeighborsMixin, RadiusNeighborsMixin): """Performs approximate nearest neighbor search using LSH forest. LSH Forest: Locality Sensitive Hashing forest [1] is an alternative method for vanilla approximate nearest neighbor search methods. LSH forest data structure has been implemented using sorted arrays and binary search and 32 bit fixed-length hashes. Random projection is used as the hash family which approximates cosine distance. The cosine distance is defined as ``1 - cosine_similarity``: the lowest value is 0 (identical point) but it is bounded above by 2 for the farthest points. Its value does not depend on the norm of the vector points but only on their relative angles. Read more in the :ref:`User Guide <approximate_nearest_neighbors>`. Parameters ---------- n_estimators : int (default = 10) Number of trees in the LSH Forest. min_hash_match : int (default = 4) lowest hash length to be searched when candidate selection is performed for nearest neighbors. n_candidates : int (default = 10) Minimum number of candidates evaluated per estimator, assuming enough items meet the `min_hash_match` constraint. n_neighbors : int (default = 5) Number of neighbors to be returned from query function when it is not provided to the :meth:`kneighbors` method. radius : float, optinal (default = 1.0) Radius from the data point to its neighbors. This is the parameter space to use by default for the :meth`radius_neighbors` queries. radius_cutoff_ratio : float, optional (default = 0.9) A value ranges from 0 to 1. Radius neighbors will be searched until the ratio between total neighbors within the radius and the total candidates becomes less than this value unless it is terminated by hash length reaching `min_hash_match`. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Attributes ---------- hash_functions_ : list of GaussianRandomProjectionHash objects Hash function g(p,x) for a tree is an array of 32 randomly generated float arrays with the same dimenstion as the data set. This array is stored in GaussianRandomProjectionHash object and can be obtained from ``components_`` attribute. trees_ : array, shape (n_estimators, n_samples) Each tree (corresponding to a hash function) contains an array of sorted hashed values. The array representation may change in future versions. original_indices_ : array, shape (n_estimators, n_samples) Original indices of sorted hashed values in the fitted index. References ---------- .. [1] M. Bawa, T. Condie and P. Ganesan, "LSH Forest: Self-Tuning Indexes for Similarity Search", WWW '05 Proceedings of the 14th international conference on World Wide Web, 651-660, 2005. Examples -------- >>> from sklearn.neighbors import LSHForest >>> X_train = [[5, 5, 2], [21, 5, 5], [1, 1, 1], [8, 9, 1], [6, 10, 2]] >>> X_test = [[9, 1, 6], [3, 1, 10], [7, 10, 3]] >>> lshf = LSHForest(random_state=42) >>> lshf.fit(X_train) # doctest: +NORMALIZE_WHITESPACE LSHForest(min_hash_match=4, n_candidates=50, n_estimators=10, n_neighbors=5, radius=1.0, radius_cutoff_ratio=0.9, random_state=42) >>> distances, indices = lshf.kneighbors(X_test, n_neighbors=2) >>> distances # doctest: +ELLIPSIS array([[ 0.069..., 0.149...], [ 0.229..., 0.481...], [ 0.004..., 0.014...]]) >>> indices array([[1, 2], [2, 0], [4, 0]]) """ def __init__(self, n_estimators=10, radius=1.0, n_candidates=50, n_neighbors=5, min_hash_match=4, radius_cutoff_ratio=.9, random_state=None): self.n_estimators = n_estimators self.radius = radius self.random_state = random_state self.n_candidates = n_candidates self.n_neighbors = n_neighbors self.min_hash_match = min_hash_match self.radius_cutoff_ratio = radius_cutoff_ratio def _compute_distances(self, query, candidates): """Computes the cosine distance. Distance is from the query to points in the candidates array. Returns argsort of distances in the candidates array and sorted distances. """ if candidates.shape == (0,): # needed since _fit_X[np.array([])] doesn't work if _fit_X sparse return np.empty(0, dtype=np.int), np.empty(0, dtype=float) if sparse.issparse(self._fit_X): candidate_X = self._fit_X[candidates] else: candidate_X = self._fit_X.take(candidates, axis=0, mode='clip') distances = pairwise_distances(query, candidate_X, metric='cosine')[0] distance_positions = np.argsort(distances) distances = distances.take(distance_positions, mode='clip', axis=0) return distance_positions, distances def _generate_masks(self): """Creates left and right masks for all hash lengths.""" tri_size = MAX_HASH_SIZE + 1 # Called once on fitting, output is independent of hashes left_mask = np.tril(np.ones((tri_size, tri_size), dtype=int))[:, 1:] right_mask = left_mask[::-1, ::-1] self._left_mask = np.packbits(left_mask).view(dtype=HASH_DTYPE) self._right_mask = np.packbits(right_mask).view(dtype=HASH_DTYPE) def _get_candidates(self, query, max_depth, bin_queries, n_neighbors): """Performs the Synchronous ascending phase. Returns an array of candidates, their distance ranks and distances. """ index_size = self._fit_X.shape[0] # Number of candidates considered including duplicates # XXX: not sure whether this is being calculated correctly wrt # duplicates from different iterations through a single tree n_candidates = 0 candidate_set = set() min_candidates = self.n_candidates * self.n_estimators while (max_depth > self.min_hash_match and (n_candidates < min_candidates or len(candidate_set) < n_neighbors)): left_mask = self._left_mask[max_depth] right_mask = self._right_mask[max_depth] for i in range(self.n_estimators): start, stop = _find_matching_indices(self.trees_[i], bin_queries[i], left_mask, right_mask) n_candidates += stop - start candidate_set.update( self.original_indices_[i][start:stop].tolist()) max_depth -= 1 candidates = np.fromiter(candidate_set, count=len(candidate_set), dtype=np.intp) # For insufficient candidates, candidates are filled. # Candidates are filled from unselected indices uniformly. if candidates.shape[0] < n_neighbors: warnings.warn( "Number of candidates is not sufficient to retrieve" " %i neighbors with" " min_hash_match = %i. Candidates are filled up" " uniformly from unselected" " indices." % (n_neighbors, self.min_hash_match)) remaining = np.setdiff1d(np.arange(0, index_size), candidates) to_fill = n_neighbors - candidates.shape[0] candidates = np.concatenate((candidates, remaining[:to_fill])) ranks, distances = self._compute_distances(query, candidates.astype(int)) return (candidates[ranks[:n_neighbors]], distances[:n_neighbors]) def _get_radius_neighbors(self, query, max_depth, bin_queries, radius): """Finds radius neighbors from the candidates obtained. Their distances from query are smaller than radius. Returns radius neighbors and distances. """ ratio_within_radius = 1 threshold = 1 - self.radius_cutoff_ratio total_candidates = np.array([], dtype=int) total_neighbors = np.array([], dtype=int) total_distances = np.array([], dtype=float) while (max_depth > self.min_hash_match and ratio_within_radius > threshold): left_mask = self._left_mask[max_depth] right_mask = self._right_mask[max_depth] candidates = [] for i in range(self.n_estimators): start, stop = _find_matching_indices(self.trees_[i], bin_queries[i], left_mask, right_mask) candidates.extend( self.original_indices_[i][start:stop].tolist()) candidates = np.setdiff1d(candidates, total_candidates) total_candidates = np.append(total_candidates, candidates) ranks, distances = self._compute_distances(query, candidates) m = np.searchsorted(distances, radius, side='right') positions = np.searchsorted(total_distances, distances[:m]) total_neighbors = np.insert(total_neighbors, positions, candidates[ranks[:m]]) total_distances = np.insert(total_distances, positions, distances[:m]) ratio_within_radius = (total_neighbors.shape[0] / float(total_candidates.shape[0])) max_depth = max_depth - 1 return total_neighbors, total_distances def fit(self, X, y=None): """Fit the LSH forest on the data. This creates binary hashes of input data points by getting the dot product of input points and hash_function then transforming the projection into a binary string array based on the sign (positive/negative) of the projection. A sorted array of binary hashes is created. Parameters ---------- X : array_like or sparse (CSR) matrix, shape (n_samples, n_features) List of n_features-dimensional data points. Each row corresponds to a single data point. Returns ------- self : object Returns self. """ self._fit_X = check_array(X, accept_sparse='csr') # Creates a g(p,x) for each tree self.hash_functions_ = [] self.trees_ = [] self.original_indices_ = [] rng = check_random_state(self.random_state) int_max = np.iinfo(np.int32).max for i in range(self.n_estimators): # This is g(p,x) for a particular tree. # Builds a single tree. Hashing is done on an array of data points. # `GaussianRandomProjection` is used for hashing. # `n_components=hash size and n_features=n_dim. hasher = GaussianRandomProjectionHash(MAX_HASH_SIZE, rng.randint(0, int_max)) hashes = hasher.fit_transform(self._fit_X)[:, 0] original_index = np.argsort(hashes) bin_hashes = hashes[original_index] self.original_indices_.append(original_index) self.trees_.append(bin_hashes) self.hash_functions_.append(hasher) self._generate_masks() return self def _query(self, X): """Performs descending phase to find maximum depth.""" # Calculate hashes of shape (n_samples, n_estimators, [hash_size]) bin_queries = np.asarray([hasher.transform(X)[:, 0] for hasher in self.hash_functions_]) bin_queries = np.rollaxis(bin_queries, 1) # descend phase depths = [_find_longest_prefix_match(tree, tree_queries, MAX_HASH_SIZE, self._left_mask, self._right_mask) for tree, tree_queries in zip(self.trees_, np.rollaxis(bin_queries, 1))] return bin_queries, np.max(depths, axis=0) def kneighbors(self, X, n_neighbors=None, return_distance=True): """Returns n_neighbors of approximate nearest neighbors. Parameters ---------- X : array_like or sparse (CSR) matrix, shape (n_samples, n_features) List of n_features-dimensional data points. Each row corresponds to a single query. n_neighbors : int, opitonal (default = None) Number of neighbors required. If not provided, this will return the number specified at the initialization. return_distance : boolean, optional (default = False) Returns the distances of neighbors if set to True. Returns ------- dist : array, shape (n_samples, n_neighbors) Array representing the cosine distances to each point, only present if return_distance=True. ind : array, shape (n_samples, n_neighbors) Indices of the approximate nearest points in the population matrix. """ if not hasattr(self, 'hash_functions_'): raise ValueError("estimator should be fitted.") if n_neighbors is None: n_neighbors = self.n_neighbors X = check_array(X, accept_sparse='csr') neighbors, distances = [], [] bin_queries, max_depth = self._query(X) for i in range(X.shape[0]): neighs, dists = self._get_candidates(X[[i]], max_depth[i], bin_queries[i], n_neighbors) neighbors.append(neighs) distances.append(dists) if return_distance: return np.array(distances), np.array(neighbors) else: return np.array(neighbors) def radius_neighbors(self, X, radius=None, return_distance=True): """Finds the neighbors within a given radius of a point or points. Return the indices and distances of some points from the dataset lying in a ball with size ``radius`` around the points of the query array. Points lying on the boundary are included in the results. The result points are *not* necessarily sorted by distance to their query point. LSH Forest being an approximate method, some true neighbors from the indexed dataset might be missing from the results. Parameters ---------- X : array_like or sparse (CSR) matrix, shape (n_samples, n_features) List of n_features-dimensional data points. Each row corresponds to a single query. radius : float Limiting distance of neighbors to return. (default is the value passed to the constructor). return_distance : boolean, optional (default = False) Returns the distances of neighbors if set to True. Returns ------- dist : array, shape (n_samples,) of arrays Each element is an array representing the cosine distances to some points found within ``radius`` of the respective query. Only present if ``return_distance=True``. ind : array, shape (n_samples,) of arrays Each element is an array of indices for neighbors within ``radius`` of the respective query. """ if not hasattr(self, 'hash_functions_'): raise ValueError("estimator should be fitted.") if radius is None: radius = self.radius X = check_array(X, accept_sparse='csr') neighbors, distances = [], [] bin_queries, max_depth = self._query(X) for i in range(X.shape[0]): neighs, dists = self._get_radius_neighbors(X[[i]], max_depth[i], bin_queries[i], radius) neighbors.append(neighs) distances.append(dists) if return_distance: return _array_of_arrays(distances), _array_of_arrays(neighbors) else: return _array_of_arrays(neighbors) def partial_fit(self, X, y=None): """ Inserts new data into the already fitted LSH Forest. Cost is proportional to new total size, so additions should be batched. Parameters ---------- X : array_like or sparse (CSR) matrix, shape (n_samples, n_features) New data point to be inserted into the LSH Forest. """ X = check_array(X, accept_sparse='csr') if not hasattr(self, 'hash_functions_'): return self.fit(X) if X.shape[1] != self._fit_X.shape[1]: raise ValueError("Number of features in X and" " fitted array does not match.") n_samples = X.shape[0] n_indexed = self._fit_X.shape[0] for i in range(self.n_estimators): bin_X = self.hash_functions_[i].transform(X)[:, 0] # gets the position to be added in the tree. positions = self.trees_[i].searchsorted(bin_X) # adds the hashed value into the tree. self.trees_[i] = np.insert(self.trees_[i], positions, bin_X) # add the entry into the original_indices_. self.original_indices_[i] = np.insert(self.original_indices_[i], positions, np.arange(n_indexed, n_indexed + n_samples)) # adds the entry into the input_array. if sparse.issparse(X) or sparse.issparse(self._fit_X): self._fit_X = sparse.vstack((self._fit_X, X)) else: self._fit_X = np.row_stack((self._fit_X, X)) return self
bsd-3-clause
jcpeterson/Dallinger
setup.py
1
1637
"""Install Dallinger as a command line utility.""" import os from setuptools import setup setup_args = dict( name='dallinger', packages=['dallinger'], version="4.0.0", description='Laboratory automation for the behavioral and social sciences', url='http://github.com/Dallinger/Dallinger', maintainer='Jordan Suchow', maintainer_email='[email protected]', license='MIT', keywords=['science', 'cultural evolution', 'experiments', 'psychology'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'Topic :: Scientific/Engineering', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.6', ], include_package_data=True, zip_safe=False, entry_points={ 'console_scripts': [ 'dallinger = dallinger.command_line:dallinger', ], 'dallinger.experiments': [], }, extras_require={ 'data': [ "networkx==1.11", "odo==0.5.0", "openpyxl==2.4.11", # 2.5 is incompatible with tablib "pandas==0.22.0", "tablib==0.11.5", ], 'jupyter': [ "jupyter", "ipywidgets", ], } ) # If not on Heroku, install setuptools-markdown. try: os.environ["DYNO"] except KeyError: setup_args.update({ "setup_requires": ['setuptools-markdown==0.2'], "long_description_markdown_filename": 'README.md', }) setup(**setup_args)
mit
fabioticconi/scikit-learn
sklearn/linear_model/tests/test_perceptron.py
378
1815
import numpy as np import scipy.sparse as sp from sklearn.utils.testing import assert_array_almost_equal from sklearn.utils.testing import assert_true from sklearn.utils.testing import assert_raises from sklearn.utils import check_random_state from sklearn.datasets import load_iris from sklearn.linear_model import Perceptron iris = load_iris() random_state = check_random_state(12) indices = np.arange(iris.data.shape[0]) random_state.shuffle(indices) X = iris.data[indices] y = iris.target[indices] X_csr = sp.csr_matrix(X) X_csr.sort_indices() class MyPerceptron(object): def __init__(self, n_iter=1): self.n_iter = n_iter def fit(self, X, y): n_samples, n_features = X.shape self.w = np.zeros(n_features, dtype=np.float64) self.b = 0.0 for t in range(self.n_iter): for i in range(n_samples): if self.predict(X[i])[0] != y[i]: self.w += y[i] * X[i] self.b += y[i] def project(self, X): return np.dot(X, self.w) + self.b def predict(self, X): X = np.atleast_2d(X) return np.sign(self.project(X)) def test_perceptron_accuracy(): for data in (X, X_csr): clf = Perceptron(n_iter=30, shuffle=False) clf.fit(data, y) score = clf.score(data, y) assert_true(score >= 0.7) def test_perceptron_correctness(): y_bin = y.copy() y_bin[y != 1] = -1 clf1 = MyPerceptron(n_iter=2) clf1.fit(X, y_bin) clf2 = Perceptron(n_iter=2, shuffle=False) clf2.fit(X, y_bin) assert_array_almost_equal(clf1.w, clf2.coef_.ravel()) def test_undefined_methods(): clf = Perceptron() for meth in ("predict_proba", "predict_log_proba"): assert_raises(AttributeError, lambda x: getattr(clf, x), meth)
bsd-3-clause
bobbymckinney/seebeck_measurement
old versions/Seebeck_HighTemp_CMDLine.py
1
34381
#! /usr/bin/python # -*- coding: utf-8 -*- """ Created: 2016-02-09 @author: Bobby McKinney ([email protected]) """ import os import numpy as np import matplotlib.pyplot as plt import minimalmodbus as modbus # For communicating with the cn7500s import omegacn7500 # Driver for cn7500s under minimalmodbus, adds a few easy commands import visa # pyvisa, essential for communicating with the Keithley import time from datetime import datetime # for getting the current date and time import exceptions #============================================================================== version = '1.0 (2016-02-09)' # Keeps Windows from complaining that the port is already open: modbus.CLOSE_PORT_AFTER_EACH_CALL = True #ResourceManager for visa instrument control ResourceManager = visa.ResourceManager() ############################################################################### class Keithley_2700: ''' Used for the matrix card operations. ''' #-------------------------------------------------------------------------- def __init__(self, instr): self.ctrl = ResourceManager.open_resource(instr) #end init #-------------------------------------------------------------------------- def fetch(self, channel): """ Scan the channel and take a reading """ measure = False while (not measure): try: self.ctrl.write(":ROUTe:SCAN:INTernal (@ %s)" % (channel)) # Specify Channel #keithley.write(":SENSe1:FUNCtion 'TEMPerature'") # Specify Data type self.ctrl.write(":ROUTe:SCAN:LSELect INTernal") # Scan Selected Channel time.sleep(.1) self.ctrl.write(":ROUTe:SCAN:LSELect NONE") # Stop Scan time.sleep(.1) data = self.ctrl.query(":FETCh?") time.sleep(.1) data = float(str(data)[0:15]) measure = True except exceptions.ValueError as VE: print(VE) measure = False #end while return data # Fetches Reading #end def #-------------------------------------------------------------------------- def openAllChannels(self): self.ctrl.write("ROUTe:OPEN:ALL") #end def #end class ############################################################################### ############################################################################### class PID(omegacn7500.OmegaCN7500): #-------------------------------------------------------------------------- def __init__(self, portname, slaveaddress): omegacn7500.OmegaCN7500.__init__(self, portname, slaveaddress) #end init #-------------------------------------------------------------------------- # Commands for easy reference: # Use .write_register(command, value) and .read_register(command) # All register values can be found in the Manual or Instruction Sheet. # You must convert each address from Hex to Decimal. control = 4101 # Register for control method pIDcontrol = 0 # Value for PID control method pIDparam = 4124 # Register for PID parameter selection pIDparam_Auto = 4 # Value for Auto PID tCouple = 4100 # Register for setting the temperature sensor type tCouple_K = 0 # K type thermocouple heatingCoolingControl = 4102 # Register for Heating/Cooling control selection heating = 0 # Value for Heating setting #end class ############################################################################### ############################################################################### class Main: def __init__(self): self.Get_User_Input() self.open_files() self.Setup() self.abort_ID = 0 self.start = time.time() self.delay = 1.0 self.plotnumber = 0 try: for self.avgtemp in self.measureList: self.dT = 0 self.plotnumber +=1 self.timecalclist = [] self.Vchromelcalclist = [] self.Valumelcalclist = [] self.dTcalclist = [] self.avgTcalclist = [] print "\n****\nSet avg temp to %f C\n****" %(self.avgtemp) print "set sample A temp to %f" %(self.avgtemp) while True: try: self.sampleApid.set_setpoint(self.avgtemp) break except IOError: print 'IOError: communication failure' #end while print "set sample B temp to %f" %(self.avgtemp) while True: try: self.sampleBpid.set_setpoint(self.avgtemp) break except IOError: print 'IOError: communication failure' #end while self.recenttempA = [] self.recenttempAtime=[] self.recenttempB = [] self.recenttempBtime=[] self.stabilityA = '-' self.stabilityB = '-' while True: self.data_measurement() self.write_data_to_file('status') time.sleep(5) if self.abort_ID==1: break if (self.tol == 'OK' and self.stable == 'OK'): for self.dT in self.dTlist: print "\n****\nSet delta temp to %f C\n\n" %(self.dT) print "set sample A temp to %f" %(self.avgtemp+self.dT/2.0) while True: try: self.sampleApid.set_setpoint(self.avgtemp+self.dT/2.0) break except IOError: print 'IOError: communication failure' #end while print "set sample B temp to %f" %(self.avgtemp-self.dT/2.0) while True: try: self.sampleBpid.set_setpoint(self.avgtemp-self.dT/2.0) break except IOError: print 'IOError: communication failure' #end while self.recenttempA = [] self.recenttempAtime=[] self.recenttempB = [] self.recenttempBtime=[] self.stabilityA = '-' self.stabilityB = '-' while True: self.data_measurement() self.write_data_to_file('status') time.sleep(3) if self.abort_ID==1: break if (self.tol == 'OK' and self.stable == 'OK'): for n in range(self.measurement_number): # start measurement print "\n****\nseebeck measurement" print 'measurement number: ', n self.data_measurement() self.write_data_to_file('status') self.write_data_to_file('data') if self.abort_ID==1: break #end for if self.abort_ID==1: break self.tol = 'NO' self.stable = 'NO' break #end if # end while if self.abort_ID==1: break #end for break #end if # end while self.process_data() if self.abort_ID==1: break #end for except KeyboardInterrupt: print '\n****\nprogram interrupted\nsaving files at current location\n****\n' self.save_files() print "set sample A temp to %f" %(20) while True: try: self.sampleApid.set_setpoint(20) break except IOError: print 'IOError: communication failure' #end while print "set sample B temp to %f" %(20) while True: try: self.sampleBpid.set_setpoint(20) break except IOError: print 'IOError: communication failure' #end while self.sampleApid.stop() self.sampleBpid.stop() print "Huzzah! Your program finished! You are awesome, sir or maam!" #end def #-------------------------------------------------------------------------- def Setup(self): """ Prepare the Keithley to take data on the specified channels: """ # Define Keithley instrument port: self.k2700 = Keithley_2700('GPIB0::1::INSTR') # Define the ports for the PID self.sampleApid = PID('/dev/cu.usbserial', 1) # Top heater self.sampleBpid = PID('/dev/cu.usbserial', 2) # Bottom heater self.blockApid = PID('/dev/cu.usbserial', 3) # Top block self.blockBpid = PID('/dev/cu.usbserial', 4) # Top block """ Prepare the Keithley for operation: """ self.k2700.openAllChannels # Define the type of measurement for the channels we are looking at: self.k2700.ctrl.write(":SENSe1:FUNCtion 'VOLTage:DC', (@ 107,108)") self.k2700.ctrl.write(":TRIGger:SEQuence1:DELay 0") self.k2700.ctrl.write(":TRIGger:SEQuence1:COUNt 1") # Set the count rate # Sets the the acquisition rate of the measurements self.k2700.ctrl.write(":SENSe1:VOLTage:DC:NPLCycles 4, (@ 107,108)") # Sets integration period based on frequency """ Prepare the PID for operation: """ # Set the control method to PID self.sampleApid.write_register(PID.control, PID.pIDcontrol) self.sampleBpid.write_register(PID.control, PID.pIDcontrol) # Set the PID to auto parameter self.sampleApid.write_register(PID.pIDparam, PID.pIDparam_Auto) self.sampleBpid.write_register(PID.pIDparam, PID.pIDparam_Auto) # Set the thermocouple type self.sampleApid.write_register(PID.tCouple, PID.tCouple_K) self.sampleBpid.write_register(PID.tCouple, PID.tCouple_K) self.blockApid.write_register(PID.tCouple, PID.tCouple_K) self.blockBpid.write_register(PID.tCouple, PID.tCouple_K) # Set the control to heating only self.sampleApid.write_register(PID.heatingCoolingControl, PID.heating) self.sampleBpid.write_register(PID.heatingCoolingControl, PID.heating) # Run the controllers self.sampleApid.run() self.sampleBpid.run() #end def #-------------------------------------------------------------------------- def Get_User_Input(self): print "Get Input From User" self.oscillation = input("Please enter PID oscillation in deg C (example: 6 or 8): ") self.oscillation = float(self.oscillation) self.dTlist = [self.oscillation*i/2 for i in range(0,-3,-1)+range(-1,3)+range(1,-1,-1)] #self.oscillation = 4 #self.dTlist = [-4,0,4] self.tolerance = input("Please enter PID tolerance in deg C (example: 1): ") self.tolerance = float(self.tolerance) self.stability_threshold = input("Please enter stability threshold in deg C per min (example: .25): ") self.stability_threshold = float(self.stability_threshold) / 60 self.measurement_number = input("Please enter measurement number at each delta temp (example: 3): ") self.measurement_number = int(self.measurement_number) self.measureList = input("Please enter the temperatures to measure as a list (example: [50, 75, ...]): ") for self.temp in self.measureList: if self.temp > 600: self.temp = 600 #end if #end for print "Your data folder will be saved to Desktop automatically" self.folder_name = raw_input("Please enter name for folder: ") self.folder_name = str(self.folder_name) if self.folder_name == '': date = str(datetime.now()) self.folder_name = 'Seebeck_Data %s.%s.%s' % (date[0:13], date[14:16], date[17:19]) #end if self.make_new_folder(self.folder_name) #end def #-------------------------------------------------------------------------- def make_new_folder(self, folder_name): self.filePath = "/Users/tobererlab1/Desktop/" + folder_name found = False if not os.path.exists(self.filePath): os.makedirs(self.filePath) os.chdir(self.filePath) #end if else: n = 1 while found == False: path = self.filePath + ' - ' + str(n) if os.path.exists(path): n = n + 1 #end if else: os.makedirs(path) os.chdir(path) n = 1 found = True #end else #end while #end else if found == True: self.filePath = path #end if #end def #-------------------------------------------------------------------------- def open_files(self): self.datafile = open('Data.csv', 'w') # opens file for writing/overwriting self.statusfile = open('Status.csv','w') self.seebeckfile = open('Seebeck.csv','w') begin = datetime.now() # Current date and time self.datafile.write('Start Time: ' + str(begin) + '\n') self.statusfile.write('Start Time: ' + str(begin) + '\n') self.seebeckfile.write('Start Time: ' + str(begin) + '\n') dataheaders = 'time (s), tempA (C), tempB (C), avgtemp (C), deltatemp (C), Vchromel (uV), Valumel (uV)\n' self.datafile.write(dataheaders) statusheaders1 = 'time (s), sampletempA (C), samplesetpointA (C), blocktempA (C), stabilityA (C/min), sampletempB (C), samplesetpointB (C), blocktempB (C), stabilityB (C/min),' statusheaders2 = 'chromelvoltageraw (uV), chromelvoltagecalc (uV), alumelvoltageraw(C), alumelvoltagecalc (uV), tolerance, stability\n' self.statusfile.write(statusheaders1 + statusheaders2) seebeckheaders = 'time(s),temperature (C),seebeck_chromel (uV/K),offset_chromel (uV),R^2_chromel,seebeck_alumel (uV/K),offset_alumel (uV),R^2_alumel\n' self.seebeckfile.write(seebeckheaders) #end def #-------------------------------------------------------------------------- def data_measurement(self): # Takes and writes to file the data on the Keithley # The only change between blocks like this one is the specific # channel on the Keithley that is being measured. self.sampletempA = float(self.sampleApid.get_pv()) self.samplesetpointA = float(self.sampleApid.get_setpoint()) self.blocktempA = float(self.blockApid.get_pv()) self.time_sampletempA = time.time() - self.start print "time: %.2f s\ttempA: %.2f C\tsetpointA: %.2f C" % (self.time_sampletempA, self.sampletempA,self.samplesetpointA) time.sleep(self.delay) self.sampletempB = float(self.sampleBpid.get_pv()) self.samplesetpointB = float(self.sampleBpid.get_setpoint()) self.blocktempB = float(self.blockBpid.get_pv()) self.time_sampletempB = time.time() - self.start print "time: %.2f s\ttempB: %.2f C\tsetpointB: %.2f C" % (self.time_sampletempB, self.sampletempB, self.samplesetpointB) time.sleep(self.delay) self.Vchromelraw = float(self.k2700.fetch('107'))*10**6 self.Vchromelcalc = self.voltage_Correction(self.Vchromelraw,self.sampletempA,self.sampletempB, 'chromel') self.time_Vchromel = time.time() - self.start print "time: %.2f s\tvoltage (Ch): %f uV" % (self.time_Vchromel, self.Vchromelcalc) time.sleep(self.delay) self.Valumelraw = float(self.k2700.fetch('108'))*10**6 self.Valumelcalc = self.voltage_Correction(self.Valumelraw,self.sampletempA,self.sampletempB, 'alumel') self.time_Valumel = time.time() - self.start print "time: %.2f s\tvoltage (Al): %f uV" % (self.time_Valumel, self.Valumelcalc) time.sleep(self.delay) self.Valumelraw2 = float(self.k2700.fetch('108'))*10**6 self.Valumelcalc2 = self.voltage_Correction(self.Valumelraw2,self.sampletempA,self.sampletempB, 'alumel') self.time_Valumel2 = time.time() - self.start print "time: %.2f s\tvoltage (Al): %f uV" % (self.time_Valumel2, self.Valumelcalc2) time.sleep(self.delay) self.Vchromelraw2 = float(self.k2700.fetch('107'))*10**6 self.Vchromelcalc2 = self.voltage_Correction(self.Vchromelraw2,self.sampletempA,self.sampletempB, 'chromel') self.time_Vchromel2 = time.time() - self.start print "time: %.2f s\tvoltage (Ch): %f uV" % (self.time_Vchromel2, self.Vchromelcalc2) time.sleep(self.delay) self.sampletempB2 = float(self.sampleBpid.get_pv()) self.samplesetpointB = float(self.sampleBpid.get_setpoint()) self.blocktempB = float(self.blockApid.get_pv()) self.time_sampletempB2 = time.time() - self.start print "time: %.2f s\ttempB: %.2f C\tsetpointB: %.2f C" % (self.time_sampletempB2, self.sampletempB2,self.samplesetpointB) time.sleep(self.delay) self.sampletempA2 = float(self.sampleApid.get_pv()) self.samplesetpointA = float(self.sampleApid.get_setpoint()) self.blocktempA = float(self.blockApid.get_pv()) self.time_sampletempA2 = time.time() - self.start print "time: %.2f s\ttempA: %.2f C\tsetpointA: %.2f C" % (self.time_sampletempA2, self.sampletempA2,self.samplesetpointA) self.time = ( self.time_sampletempA + self.time_sampletempB + self.time_Vchromel + self.time_Valumel + self.time_Valumel2 + self.time_Vchromel2 + self.time_sampletempB2 + self.time_sampletempA2 ) / 8 #check stability of PID if (len(self.recenttempA)<3): self.recenttempA.append(self.sampletempA) self.recenttempAtime.append(self.time_sampletempA) self.recenttempA.append(self.sampletempA2) self.recenttempAtime.append(self.time_sampletempA2) #end if else: self.recenttempA.pop(0) self.recenttempAtime.pop(0) self.recenttempA.pop(0) self.recenttempAtime.pop(0) self.recenttempA.append(self.sampletempA) self.recenttempAtime.append(self.time_sampletempA) self.recenttempA.append(self.sampletempA2) self.recenttempAtime.append(self.time_sampletempA2) self.stabilityA = self.getStability(self.recenttempA,self.recenttempAtime) print "stability A: %.4f C/min" % (self.stabilityA*60) #end else if (len(self.recenttempB)<3): self.recenttempB.append(self.sampletempB) self.recenttempBtime.append(self.time_sampletempB) self.recenttempB.append(self.sampletempB2) self.recenttempBtime.append(self.time_sampletempB2) #end if else: self.recenttempB.pop(0) self.recenttempBtime.pop(0) self.recenttempB.pop(0) self.recenttempBtime.pop(0) self.recenttempB.append(self.sampletempB) self.recenttempBtime.append(self.time_sampletempB) self.recenttempB.append(self.sampletempB2) self.recenttempBtime.append(self.time_sampletempB2) self.stabilityB = self.getStability(self.recenttempB,self.recenttempBtime) print "stability B: %.4f C/min" % (self.stabilityB*60) #end else self.safety_check() self.check_status() #end def #-------------------------------------------------------------------------- def voltage_Correction(self, raw_voltage, tempA, tempB, side): ''' raw_data must be in uV ''' # Kelvin conversion for polynomial correction. dT = tempA - tempB avgT = (tempA + tempB)/2 + 273.15 # Correction for effect from Thermocouple Seebeck out = self.alpha(avgT, side)*dT - raw_voltage return out #end def #-------------------------------------------------------------------------- def alpha(self, x, side): ''' x = avgT alpha in uV/K ''' ### If Chromel, taken from Chromel_Seebeck.txt if side == 'chromel': if ( x >= 270 and x < 700): alpha = -2467.61114613*x**0 + 55.6028987953*x**1 + \ -0.552110359087*x**2 + 0.00320554346691*x**3 + \ -1.20477254034e-05*x**4 + 3.06344710205e-08*x**5 + \ -5.33914758601e-11*x**6 + 6.30044607727e-14*x**7 + \ -4.8197269477e-17*x**8 + 2.15928374212e-20*x**9 + \ -4.30421084091e-24*x**10 #end if elif ( x >= 700 and x < 1599): alpha = 1165.13254764*x**0 + -9.49622421414*x**1 + \ 0.0346344390853*x**2 + -7.27785048931e-05*x**3 + \ 9.73981855547e-08*x**4 + -8.64369652227e-11*x**5 + \ 5.10080771762e-14*x**6 + -1.93318725171e-17*x**7 + \ 4.27299905603e-21*x**8 + -4.19761748937e-25*x**9 #end if else: print "Error in voltage correction, out of range." #end if (Chromel) ### If Alumel, taken from Alumel_Seebeck.txt elif side == 'alumel': if ( x >= 270 and x < 570): alpha = -3465.28789643*x**0 + 97.4007289124*x**1 + \ -1.17546754681*x**2 + 0.00801252041119*x**3 + \ -3.41263237031e-05*x**4 + 9.4391002358e-08*x**5 + \ -1.69831949233e-10*x**6 + 1.91977765586e-13*x**7 + \ -1.2391854625e-16*x**8 + 3.48576207577e-20*x**9 #end if elif ( x >= 570 and x < 1599): alpha = 254.644633774*x**0 + -2.17639940109*x**1 + \ 0.00747127856327*x**2 + -1.41920634198e-05*x**3 + \ 1.61971537881e-08*x**4 + -1.14428153299e-11*x**5 + \ 4.969263632e-15*x**6 + -1.27526741699e-18*x**7 + \ 1.80403838088e-22*x**8 + -1.23699936952e-26*x**9 #end if else: print "Error in voltage correction, out of range." #end if (Alumel) else: print "Error in voltage correction." #end else return alpha #end def #-------------------------------------------------------------------------- def getStability(self, temps, times): coeffs = np.polyfit(times, temps, 1) # Polynomial Coefficients results = coeffs.tolist() return results[0] #end def #-------------------------------------------------------------------------- def safety_check(self): print 'safety check' if self.sampletempA >600 or self.sampletempA2 > 600: self.abort_ID = 1 print 'Safety Failure: Sample Temp A greater than 600' #end if if self.sampletempB > 600 or self.sampletempA2 > 600: self.abort_ID = 1 print 'Safety Failure: Sample Temp B greater than Max Limit' #end if if self.blocktempA > 600: self.abort_ID = 1 print 'Safety Failure: Block Temp A greater than Max Limit' #end if if self.blocktempB > 600: self.abort_ID = 1 print 'Safety Failure: Block Temp B greater than Max Limit' #end if if self.blocktempA > self.sampletempA + 100 or self.blocktempA > self.sampletempA2 + 100: self.abort_ID = 1 print 'Safety Failure: Block Temp A 100 C greater than Sample Temp A' #end if if self.blocktempB > self.sampletempB + 100 or self.blocktempB > self.sampletempB2 + 100: self.abort_ID = 1 print 'Safety Failure: Block Temp B 100 C greater than Sample Temp B' #end if if self.sampletempA > self.blocktempA + 100 or self.sampletempA2 > self.blocktempA + 100: self.abort_ID = 1 print 'Safety Failure: Sample Temp A 100 C greater than Block Temp A' #end if if self.sampletempB > self.blocktempB + 100 or self.sampletempB2 > self.blocktempB + 100: self.abort_ID = 1 print 'Safety Failure: Sample Temp B 100 C greater than Block Temp B' #end if #end def #-------------------------------------------------------------------------- def check_status(self): print 'check tolerance' tempA = (self.sampletempA + self.sampletempA2)/2 tempB = (self.sampletempB + self.sampletempB2)/2 self.tolA = (np.abs(tempA-(self.avgtemp+self.dT/2.0)) < self.tolerance) self.tolB = (np.abs(tempB-(self.avgtemp-self.dT/2.0)) < self.tolerance) print 'tolerance A: ',self.tolA print 'tolerance B:', self.tolB if (self.tolA and self.tolB): self.tol = 'OK' #end if else: self.tol = 'NO' #end else print 'check stability' if (self.stabilityA != '-'): self.stableA = (np.abs(self.stabilityA) < self.stability_threshold) print 'stable A: ',self.stableA #end if else: self.stableA = False print 'stable A: ',self.stableA #end else if (self.stabilityB != '-'): self.stableB = (np.abs(self.stabilityB) < self.stability_threshold) print 'stable B: ',self.stableB #end if else: self.stableB = False print 'stable B: ',self.stableB #end else if (self.stableA and self.stableB): self.stable = 'OK' #end if else: self.stable = 'NO' #end else print "\ntolerance: %s\nstable: %s\n" % (self.tol, self.stable) #end def #-------------------------------------------------------------------------- def process_data(self): print '\n***\n' print 'process data to get seebeck coefficient' time = np.average(self.timecalclist) avgT = np.average(self.avgTcalclist) dTchromellist = self.dTcalclist dTalumellist = self.dTcalclist results_chromel = {} results_alumel = {} coeffs_chromel = np.polyfit(dTchromellist, self.Vchromelcalclist, 1) coeffs_alumel = np.polyfit(dTalumellist,self.Valumelcalclist,1) # Polynomial Coefficients polynomial_chromel = coeffs_chromel.tolist() polynomial_alumel = coeffs_alumel.tolist() seebeck_chromel = polynomial_chromel[0] offset_chromel = polynomial_chromel[1] seebeck_alumel = polynomial_alumel[0] offset_alumel = polynomial_alumel[1] print 'seebeck (chromel): %.3f uV/K'%(seebeck_chromel) print 'seebeck (alumel): %.3f uV/K'%(seebeck_alumel) print '\n***\n' # Calculate coefficient of determination (r-squared): p_chromel = np.poly1d(coeffs_chromel) p_alumel = np.poly1d(coeffs_alumel) # fitted values: yhat_chromel = p_chromel(dTchromellist) yhat_alumel = p_alumel(dTalumellist) # mean of values: ybar_chromel = np.sum(self.Vchromelcalclist)/len(self.Vchromelcalclist) ybar_alumel = np.sum(self.Valumelcalclist)/len(self.Valumelcalclist) # regression sum of squares: ssreg_chromel = np.sum((yhat_chromel-ybar_chromel)**2) # or sum([ (yihat - ybar)**2 for yihat in yhat]) ssreg_alumel = np.sum((yhat_alumel-ybar_alumel)**2) # total sum of squares: sstot_chromel = np.sum((self.Vchromelcalclist - ybar_chromel)**2) sstot_alumel = np.sum((self.Valumelcalclist - ybar_alumel)**2) # or sum([ (yi - ybar)**2 for yi in y]) rsquared_chromel = ssreg_chromel / sstot_chromel rsquared_alumel = ssreg_alumel / sstot_alumel self.seebeckfile.write('%.3f,%.5f,%.5f,%.5f,%.5f,%.5f,%.5f,%.5f\n'%(time,avgT,seebeck_chromel,offset_chromel,rsquared_chromel,seebeck_alumel,offset_alumel,rsquared_alumel)) fitchromel = {} fitalumel = {} fitchromel['polynomial'] = polynomial_chromel fitalumel['polynomial'] = polynomial_alumel fitchromel['r-squared'] = rsquared_chromel fitalumel['r-squared'] = rsquared_alumel celsius = u"\u2103" self.create_backup_file(str(self.plotnumber)+'_'+str(avgT)+ 'C_backupfile.csv',self.timecalclist,self.avgTcalclist,self.dTcalclist,self.Vchromelcalclist,self.Valumelcalclist) self.create_plot(dTalumellist,dTchromellist,self.Valumelcalclist,self.Vchromelcalclist,fitalumel,fitchromel,str(self.plotnumber)+'_'+str(avgT)+ 'C') #end def #-------------------------------------------------------------------------- def create_backup_file(self, title,tlist,avgTlist,dTlist,Vchlist,Vallist): backup_folder = self.filePath + '/Seebeck Backup Files/' if not os.path.exists(backup_folder): os.makedirs(backup_folder) #end if tempfile = open('Seebeck Backup Files/' + title,'w') tempfile.write(title + '\n') tempfile.write('time,avgT,dT,Vch,Val\n') for i in range(len(tlist)): tempfile.write('%.3f,%.4f,%.4f,%.6f,%.6f\n'%(tlist[i],avgTlist[i],dTlist[i],Vchlist[i],Vallist[i])) #end for tempfile.close() #end def #-------------------------------------------------------------------------- def create_plot(self, xalumel, xchromel, yalumel, ychromel, fitalumel, fitchromel, title): print 'create seebeck plot' dpi = 400 plt.ioff() # Create Plot: fig = plt.figure(self.plotnumber, dpi=dpi) ax = fig.add_subplot(111) ax.grid() ax.set_title(title) ax.set_xlabel("dT (K)") ax.set_ylabel("dV (uV)") # Plot data points: ax.scatter(xalumel, yalumel, color='r', marker='.', label="alumel Voltage") ax.scatter(xchromel, ychromel, color='b', marker='.', label="chromel Voltage") # Overlay linear fits: coeffsalumel = fitalumel['polynomial'] coeffschromel = fitchromel['polynomial'] p_alumel = np.poly1d(coeffsalumel) p_chromel = np.poly1d(coeffschromel) xp = np.linspace(min(xalumel+xchromel), max(xalumel+xchromel), 5000) alumel_eq = 'dV = %.2f*(dT) + %.2f' % (coeffsalumel[0], coeffsalumel[1]) chromel_eq = 'dV = %.2f*(dT) + %.2f' % (coeffschromel[0], coeffschromel[1]) ax.plot(xp, p_alumel(xp), '-', c='#FF9900', label="alumel Voltage Fit\n %s" % alumel_eq) ax.plot(xp, p_chromel(xp), '-', c='g', label="chromel Voltage Fit\n %s" % chromel_eq) ax.legend(loc='upper left', fontsize='10') # Save: plot_folder = self.filePath + '/Seebeck Plots/' if not os.path.exists(plot_folder): os.makedirs(plot_folder) fig.savefig('%s.png' % (plot_folder + title) , dpi=dpi) plt.close() #end def #-------------------------------------------------------------------------- def write_data_to_file(self, file): if file == 'status': print('Write status to file\n') self.statusfile.write('%.1f,'%(self.time)) self.statusfile.write('%.2f,%.2f,%.2f,' %(self.sampletempA2,self.samplesetpointA,self.blocktempA)) self.statusfile.write(str(self.stabilityA)+',') self.statusfile.write('%.2f,%.2f,%.2f,' %(self.sampletempB2,self.samplesetpointB,self.blocktempB)) self.statusfile.write(str(self.stabilityB)+',') self.statusfile.write('%.3f,%.3f,%.3f,%.3f,'%(self.Vchromelraw2, self.Vchromelcalc2,self.Valumelraw2, self.Valumelcalc2)) self.statusfile.write(str(self.tol)+','+str(self.stable)+'\n') #end if elif file == 'data': print('Write data to file\n') ta = (self.sampletempA + self.sampletempA2)/2 tb = (self.sampletempB + self.sampletempB2)/2 avgt = (ta + tb)/2 dt = ta-tb vchromel = (self.Vchromelcalc + self.Vchromelcalc2)/2 valumel = (self.Valumelcalc + self.Valumelcalc2)/2 self.datafile.write('%.3f,' %(self.time)) self.datafile.write('%.4f,%.4f,%.4f,%.4f,' % (ta, tb, avgt, dt) ) self.datafile.write('%.6f,%.6f\n' % (vchromel,valumel)) self.timecalclist.append(self.time) self.Vchromelcalclist.append(vchromel) self.Valumelcalclist.append(valumel) self.dTcalclist.append(dt) self.avgTcalclist.append(avgt) #end elif #end def #-------------------------------------------------------------------------- def save_files(self): print('\nSave Files\n') self.datafile.close() self.statusfile.close() self.seebeckfile.close() #end def #end class ############################################################################### #============================================================================== if __name__=='__main__': runprogram = Main() #end if
gpl-3.0
jstoxrocky/statsmodels
statsmodels/tools/_testing.py
29
4809
"""Testing helper functions Warning: current status experimental, mostly copy paste Warning: these functions will be changed without warning as the need during refactoring arises. The first group of functions provide consistency checks """ import numpy as np from numpy.testing import assert_allclose, assert_ from nose import SkipTest # the following are copied from # statsmodels.base.tests.test_generic_methods.CheckGenericMixin # and only adjusted to work as standalone functions def check_ttest_tvalues(results): # test that t_test has same results a params, bse, tvalues, ... res = results mat = np.eye(len(res.params)) tt = res.t_test(mat) assert_allclose(tt.effect, res.params, rtol=1e-12) # TODO: tt.sd and tt.tvalue are 2d also for single regressor, squeeze assert_allclose(np.squeeze(tt.sd), res.bse, rtol=1e-10) assert_allclose(np.squeeze(tt.tvalue), res.tvalues, rtol=1e-12) assert_allclose(tt.pvalue, res.pvalues, rtol=5e-10) assert_allclose(tt.conf_int(), res.conf_int(), rtol=1e-10) # test params table frame returned by t_test table_res = np.column_stack((res.params, res.bse, res.tvalues, res.pvalues, res.conf_int())) table1 = np.column_stack((tt.effect, tt.sd, tt.tvalue, tt.pvalue, tt.conf_int())) table2 = tt.summary_frame().values assert_allclose(table2, table_res, rtol=1e-12) # move this to test_attributes ? assert_(hasattr(res, 'use_t')) tt = res.t_test(mat[0]) tt.summary() # smoke test for #1323 assert_allclose(tt.pvalue, res.pvalues[0], rtol=5e-10) def check_ftest_pvalues(results): res = results use_t = res.use_t k_vars = len(res.params) # check default use_t pvals = [res.wald_test(np.eye(k_vars)[k], use_f=use_t).pvalue for k in range(k_vars)] assert_allclose(pvals, res.pvalues, rtol=5e-10, atol=1e-25) # sutomatic use_f based on results class use_t pvals = [res.wald_test(np.eye(k_vars)[k]).pvalue for k in range(k_vars)] assert_allclose(pvals, res.pvalues, rtol=5e-10, atol=1e-25) # label for pvalues in summary string_use_t = 'P>|z|' if use_t is False else 'P>|t|' summ = str(res.summary()) assert_(string_use_t in summ) # try except for models that don't have summary2 try: summ2 = str(res.summary2()) except AttributeError: summ2 = None if summ2 is not None: assert_(string_use_t in summ2) # TODO The following is not (yet) guaranteed across models #@knownfailureif(True) def check_fitted(results): # ignore wrapper for isinstance check from statsmodels.genmod.generalized_linear_model import GLMResults from statsmodels.discrete.discrete_model import DiscreteResults # FIXME: work around GEE has no wrapper if hasattr(results, '_results'): results = results._results else: results = results if (isinstance(results, GLMResults) or isinstance(results, DiscreteResults)): raise SkipTest res = results fitted = res.fittedvalues assert_allclose(res.model.endog - fitted, res.resid, rtol=1e-12) assert_allclose(fitted, res.predict(), rtol=1e-12) def check_predict_types(results): res = results # squeeze to make 1d for single regressor test case p_exog = np.squeeze(np.asarray(res.model.exog[:2])) # ignore wrapper for isinstance check from statsmodels.genmod.generalized_linear_model import GLMResults from statsmodels.discrete.discrete_model import DiscreteResults # FIXME: work around GEE has no wrapper if hasattr(results, '_results'): results = results._results else: results = results if (isinstance(results, GLMResults) or isinstance(results, DiscreteResults)): # SMOKE test only TODO res.predict(p_exog) res.predict(p_exog.tolist()) res.predict(p_exog[0].tolist()) else: fitted = res.fittedvalues[:2] assert_allclose(fitted, res.predict(p_exog), rtol=1e-12) # this needs reshape to column-vector: assert_allclose(fitted, res.predict(np.squeeze(p_exog).tolist()), rtol=1e-12) # only one prediction: assert_allclose(fitted[:1], res.predict(p_exog[0].tolist()), rtol=1e-12) assert_allclose(fitted[:1], res.predict(p_exog[0]), rtol=1e-12) # predict doesn't preserve DataFrame, e.g. dot converts to ndarray #import pandas #predicted = res.predict(pandas.DataFrame(p_exog)) #assert_(isinstance(predicted, pandas.DataFrame)) #assert_allclose(predicted, fitted, rtol=1e-12)
bsd-3-clause
SheffieldML/GPy
GPy/plotting/matplot_dep/maps.py
15
5721
# Copyright (c) 2012, GPy authors (see AUTHORS.txt). # Licensed under the BSD 3-clause license (see LICENSE.txt) import numpy as np try: from matplotlib import pyplot as pb from matplotlib.patches import Polygon from matplotlib.collections import PatchCollection #from matplotlib import cm try: __IPYTHON__ pb.ion() except NameError: pass except: pass import re def plot(shape_records,facecolor='w',edgecolor='k',linewidths=.5, ax=None,xlims=None,ylims=None): """ Plot the geometry of a shapefile :param shape_records: geometry and attributes list :type shape_records: ShapeRecord object (output of a shapeRecords() method) :param facecolor: color to be used to fill in polygons :param edgecolor: color to be used for lines :param ax: axes to plot on. :type ax: axes handle """ #Axes handle if ax is None: fig = pb.figure() ax = fig.add_subplot(111) #Iterate over shape_records for srec in shape_records: points = np.vstack(srec.shape.points) sparts = srec.shape.parts par = list(sparts) + [points.shape[0]] polygs = [] for pj in range(len(sparts)): polygs.append(Polygon(points[par[pj]:par[pj+1]])) ax.add_collection(PatchCollection(polygs,facecolor=facecolor,edgecolor=edgecolor, linewidths=linewidths)) #Plot limits _box = np.vstack([srec.shape.bbox for srec in shape_records]) minx,miny = np.min(_box[:,:2],0) maxx,maxy = np.max(_box[:,2:],0) if xlims is not None: minx,maxx = xlims if ylims is not None: miny,maxy = ylims ax.set_xlim(minx,maxx) ax.set_ylim(miny,maxy) def string_match(sf,regex,field=2): """ Return the geometry and attributes of a shapefile whose fields match a regular expression given :param sf: shapefile :type sf: shapefile object :regex: regular expression to match :type regex: string :field: field number to be matched with the regex :type field: integer """ index = [] shape_records = [] for rec in enumerate(sf.shapeRecords()): m = re.search(regex,rec[1].record[field]) if m is not None: index.append(rec[0]) shape_records.append(rec[1]) return index,shape_records def bbox_match(sf,bbox,inside_only=True): """ Return the geometry and attributes of a shapefile that lie within (or intersect) a bounding box :param sf: shapefile :type sf: shapefile object :param bbox: bounding box :type bbox: list of floats [x_min,y_min,x_max,y_max] :inside_only: True if the objects returned are those that lie within the bbox and False if the objects returned are any that intersect the bbox :type inside_only: Boolean """ A,B,C,D = bbox index = [] shape_records = [] for rec in enumerate(sf.shapeRecords()): a,b,c,d = rec[1].shape.bbox if inside_only: if A <= a and B <= b and C >= c and D >= d: index.append(rec[0]) shape_records.append(rec[1]) else: cond1 = A <= a and B <= b and C >= a and D >= b cond2 = A <= c and B <= d and C >= c and D >= d cond3 = A <= a and D >= d and C >= a and B <= d cond4 = A <= c and D >= b and C >= c and B <= b cond5 = a <= C and b <= B and d >= D cond6 = c <= A and b <= B and d >= D cond7 = d <= B and a <= A and c >= C cond8 = b <= D and a <= A and c >= C if cond1 or cond2 or cond3 or cond4 or cond5 or cond6 or cond7 or cond8: index.append(rec[0]) shape_records.append(rec[1]) return index,shape_records def plot_bbox(sf,bbox,inside_only=True): """ Plot the geometry of a shapefile within a bbox :param sf: shapefile :type sf: shapefile object :param bbox: bounding box :type bbox: list of floats [x_min,y_min,x_max,y_max] :inside_only: True if the objects returned are those that lie within the bbox and False if the objects returned are any that intersect the bbox :type inside_only: Boolean """ index,shape_records = bbox_match(sf,bbox,inside_only) A,B,C,D = bbox plot(shape_records,xlims=[bbox[0],bbox[2]],ylims=[bbox[1],bbox[3]]) def plot_string_match(sf,regex,field,**kwargs): """ Plot the geometry of a shapefile whose fields match a regular expression given :param sf: shapefile :type sf: shapefile object :regex: regular expression to match :type regex: string :field: field number to be matched with the regex :type field: integer """ index,shape_records = string_match(sf,regex,field) plot(shape_records,**kwargs) def new_shape_string(sf,name,regex,field=2,type=None): import shapefile if type is None: type = shapefile.POINT newshp = shapefile.Writer(shapeType = sf.shapeType) newshp.autoBalance = 1 index,shape_records = string_match(sf,regex,field) _fi = [sf.fields[j] for j in index] for f in _fi: newshp.field(name=f[0],fieldType=f[1],size=f[2],decimal=f[3]) _shre = shape_records for sr in _shre: _points = [] _parts = [] for point in sr.shape.points: _points.append(point) _parts.append(_points) newshp.line(parts=_parts) newshp.records.append(sr.record) print(len(sr.record)) newshp.save(name) print(index) def apply_bbox(sf,ax): """ Use bbox as xlim and ylim in ax """ limits = sf.bbox xlim = limits[0],limits[2] ylim = limits[1],limits[3] ax.set_xlim(xlim) ax.set_ylim(ylim)
bsd-3-clause
idlead/scikit-learn
sklearn/cluster/tests/test_affinity_propagation.py
341
2620
""" Testing for Clustering methods """ import numpy as np from sklearn.utils.testing import assert_equal from sklearn.utils.testing import assert_array_equal from sklearn.utils.testing import assert_raises from sklearn.cluster.affinity_propagation_ import AffinityPropagation from sklearn.cluster.affinity_propagation_ import affinity_propagation from sklearn.datasets.samples_generator import make_blobs from sklearn.metrics import euclidean_distances n_clusters = 3 centers = np.array([[1, 1], [-1, -1], [1, -1]]) + 10 X, _ = make_blobs(n_samples=60, n_features=2, centers=centers, cluster_std=0.4, shuffle=True, random_state=0) def test_affinity_propagation(): # Affinity Propagation algorithm # Compute similarities S = -euclidean_distances(X, squared=True) preference = np.median(S) * 10 # Compute Affinity Propagation cluster_centers_indices, labels = affinity_propagation( S, preference=preference) n_clusters_ = len(cluster_centers_indices) assert_equal(n_clusters, n_clusters_) af = AffinityPropagation(preference=preference, affinity="precomputed") labels_precomputed = af.fit(S).labels_ af = AffinityPropagation(preference=preference, verbose=True) labels = af.fit(X).labels_ assert_array_equal(labels, labels_precomputed) cluster_centers_indices = af.cluster_centers_indices_ n_clusters_ = len(cluster_centers_indices) assert_equal(np.unique(labels).size, n_clusters_) assert_equal(n_clusters, n_clusters_) # Test also with no copy _, labels_no_copy = affinity_propagation(S, preference=preference, copy=False) assert_array_equal(labels, labels_no_copy) # Test input validation assert_raises(ValueError, affinity_propagation, S[:, :-1]) assert_raises(ValueError, affinity_propagation, S, damping=0) af = AffinityPropagation(affinity="unknown") assert_raises(ValueError, af.fit, X) def test_affinity_propagation_predict(): # Test AffinityPropagation.predict af = AffinityPropagation(affinity="euclidean") labels = af.fit_predict(X) labels2 = af.predict(X) assert_array_equal(labels, labels2) def test_affinity_propagation_predict_error(): # Test exception in AffinityPropagation.predict # Not fitted. af = AffinityPropagation(affinity="euclidean") assert_raises(ValueError, af.predict, X) # Predict not supported when affinity="precomputed". S = np.dot(X, X.T) af = AffinityPropagation(affinity="precomputed") af.fit(S) assert_raises(ValueError, af.predict, X)
bsd-3-clause
elijah513/scikit-learn
sklearn/decomposition/tests/test_pca.py
199
10949
import numpy as np from sklearn.utils.testing import assert_almost_equal from sklearn.utils.testing import assert_array_almost_equal from sklearn.utils.testing import assert_true from sklearn.utils.testing import assert_equal from sklearn.utils.testing import assert_greater from sklearn.utils.testing import assert_raises from sklearn import datasets from sklearn.decomposition import PCA from sklearn.decomposition import RandomizedPCA from sklearn.decomposition.pca import _assess_dimension_ from sklearn.decomposition.pca import _infer_dimension_ iris = datasets.load_iris() def test_pca(): # PCA on dense arrays pca = PCA(n_components=2) X = iris.data X_r = pca.fit(X).transform(X) np.testing.assert_equal(X_r.shape[1], 2) X_r2 = pca.fit_transform(X) assert_array_almost_equal(X_r, X_r2) pca = PCA() pca.fit(X) assert_almost_equal(pca.explained_variance_ratio_.sum(), 1.0, 3) X_r = pca.transform(X) X_r2 = pca.fit_transform(X) assert_array_almost_equal(X_r, X_r2) # Test get_covariance and get_precision with n_components == n_features # with n_components < n_features and with n_components == 0 for n_components in [0, 2, X.shape[1]]: pca.n_components = n_components pca.fit(X) cov = pca.get_covariance() precision = pca.get_precision() assert_array_almost_equal(np.dot(cov, precision), np.eye(X.shape[1]), 12) def test_whitening(): # Check that PCA output has unit-variance rng = np.random.RandomState(0) n_samples = 100 n_features = 80 n_components = 30 rank = 50 # some low rank data with correlated features X = np.dot(rng.randn(n_samples, rank), np.dot(np.diag(np.linspace(10.0, 1.0, rank)), rng.randn(rank, n_features))) # the component-wise variance of the first 50 features is 3 times the # mean component-wise variance of the remaingin 30 features X[:, :50] *= 3 assert_equal(X.shape, (n_samples, n_features)) # the component-wise variance is thus highly varying: assert_almost_equal(X.std(axis=0).std(), 43.9, 1) for this_PCA, copy in [(x, y) for x in (PCA, RandomizedPCA) for y in (True, False)]: # whiten the data while projecting to the lower dim subspace X_ = X.copy() # make sure we keep an original across iterations. pca = this_PCA(n_components=n_components, whiten=True, copy=copy) # test fit_transform X_whitened = pca.fit_transform(X_.copy()) assert_equal(X_whitened.shape, (n_samples, n_components)) X_whitened2 = pca.transform(X_) assert_array_almost_equal(X_whitened, X_whitened2) assert_almost_equal(X_whitened.std(axis=0), np.ones(n_components)) assert_almost_equal(X_whitened.mean(axis=0), np.zeros(n_components)) X_ = X.copy() pca = this_PCA(n_components=n_components, whiten=False, copy=copy).fit(X_) X_unwhitened = pca.transform(X_) assert_equal(X_unwhitened.shape, (n_samples, n_components)) # in that case the output components still have varying variances assert_almost_equal(X_unwhitened.std(axis=0).std(), 74.1, 1) # we always center, so no test for non-centering. def test_explained_variance(): # Check that PCA output has unit-variance rng = np.random.RandomState(0) n_samples = 100 n_features = 80 X = rng.randn(n_samples, n_features) pca = PCA(n_components=2).fit(X) rpca = RandomizedPCA(n_components=2, random_state=42).fit(X) assert_array_almost_equal(pca.explained_variance_, rpca.explained_variance_, 1) assert_array_almost_equal(pca.explained_variance_ratio_, rpca.explained_variance_ratio_, 3) # compare to empirical variances X_pca = pca.transform(X) assert_array_almost_equal(pca.explained_variance_, np.var(X_pca, axis=0)) X_rpca = rpca.transform(X) assert_array_almost_equal(rpca.explained_variance_, np.var(X_rpca, axis=0)) def test_pca_check_projection(): # Test that the projection of data is correct rng = np.random.RandomState(0) n, p = 100, 3 X = rng.randn(n, p) * .1 X[:10] += np.array([3, 4, 5]) Xt = 0.1 * rng.randn(1, p) + np.array([3, 4, 5]) Yt = PCA(n_components=2).fit(X).transform(Xt) Yt /= np.sqrt((Yt ** 2).sum()) assert_almost_equal(np.abs(Yt[0][0]), 1., 1) def test_pca_inverse(): # Test that the projection of data can be inverted rng = np.random.RandomState(0) n, p = 50, 3 X = rng.randn(n, p) # spherical data X[:, 1] *= .00001 # make middle component relatively small X += [5, 4, 3] # make a large mean # same check that we can find the original data from the transformed # signal (since the data is almost of rank n_components) pca = PCA(n_components=2).fit(X) Y = pca.transform(X) Y_inverse = pca.inverse_transform(Y) assert_almost_equal(X, Y_inverse, decimal=3) # same as above with whitening (approximate reconstruction) pca = PCA(n_components=2, whiten=True) pca.fit(X) Y = pca.transform(X) Y_inverse = pca.inverse_transform(Y) assert_almost_equal(X, Y_inverse, decimal=3) def test_pca_validation(): X = [[0, 1], [1, 0]] for n_components in [-1, 3]: assert_raises(ValueError, PCA(n_components).fit, X) def test_randomized_pca_check_projection(): # Test that the projection by RandomizedPCA on dense data is correct rng = np.random.RandomState(0) n, p = 100, 3 X = rng.randn(n, p) * .1 X[:10] += np.array([3, 4, 5]) Xt = 0.1 * rng.randn(1, p) + np.array([3, 4, 5]) Yt = RandomizedPCA(n_components=2, random_state=0).fit(X).transform(Xt) Yt /= np.sqrt((Yt ** 2).sum()) assert_almost_equal(np.abs(Yt[0][0]), 1., 1) def test_randomized_pca_check_list(): # Test that the projection by RandomizedPCA on list data is correct X = [[1.0, 0.0], [0.0, 1.0]] X_transformed = RandomizedPCA(n_components=1, random_state=0).fit(X).transform(X) assert_equal(X_transformed.shape, (2, 1)) assert_almost_equal(X_transformed.mean(), 0.00, 2) assert_almost_equal(X_transformed.std(), 0.71, 2) def test_randomized_pca_inverse(): # Test that RandomizedPCA is inversible on dense data rng = np.random.RandomState(0) n, p = 50, 3 X = rng.randn(n, p) # spherical data X[:, 1] *= .00001 # make middle component relatively small X += [5, 4, 3] # make a large mean # same check that we can find the original data from the transformed signal # (since the data is almost of rank n_components) pca = RandomizedPCA(n_components=2, random_state=0).fit(X) Y = pca.transform(X) Y_inverse = pca.inverse_transform(Y) assert_almost_equal(X, Y_inverse, decimal=2) # same as above with whitening (approximate reconstruction) pca = RandomizedPCA(n_components=2, whiten=True, random_state=0).fit(X) Y = pca.transform(X) Y_inverse = pca.inverse_transform(Y) relative_max_delta = (np.abs(X - Y_inverse) / np.abs(X).mean()).max() assert_almost_equal(relative_max_delta, 0.11, decimal=2) def test_pca_dim(): # Check automated dimensionality setting rng = np.random.RandomState(0) n, p = 100, 5 X = rng.randn(n, p) * .1 X[:10] += np.array([3, 4, 5, 1, 2]) pca = PCA(n_components='mle').fit(X) assert_equal(pca.n_components, 'mle') assert_equal(pca.n_components_, 1) def test_infer_dim_1(): # TODO: explain what this is testing # Or at least use explicit variable names... n, p = 1000, 5 rng = np.random.RandomState(0) X = (rng.randn(n, p) * .1 + rng.randn(n, 1) * np.array([3, 4, 5, 1, 2]) + np.array([1, 0, 7, 4, 6])) pca = PCA(n_components=p) pca.fit(X) spect = pca.explained_variance_ ll = [] for k in range(p): ll.append(_assess_dimension_(spect, k, n, p)) ll = np.array(ll) assert_greater(ll[1], ll.max() - .01 * n) def test_infer_dim_2(): # TODO: explain what this is testing # Or at least use explicit variable names... n, p = 1000, 5 rng = np.random.RandomState(0) X = rng.randn(n, p) * .1 X[:10] += np.array([3, 4, 5, 1, 2]) X[10:20] += np.array([6, 0, 7, 2, -1]) pca = PCA(n_components=p) pca.fit(X) spect = pca.explained_variance_ assert_greater(_infer_dimension_(spect, n, p), 1) def test_infer_dim_3(): n, p = 100, 5 rng = np.random.RandomState(0) X = rng.randn(n, p) * .1 X[:10] += np.array([3, 4, 5, 1, 2]) X[10:20] += np.array([6, 0, 7, 2, -1]) X[30:40] += 2 * np.array([-1, 1, -1, 1, -1]) pca = PCA(n_components=p) pca.fit(X) spect = pca.explained_variance_ assert_greater(_infer_dimension_(spect, n, p), 2) def test_infer_dim_by_explained_variance(): X = iris.data pca = PCA(n_components=0.95) pca.fit(X) assert_equal(pca.n_components, 0.95) assert_equal(pca.n_components_, 2) pca = PCA(n_components=0.01) pca.fit(X) assert_equal(pca.n_components, 0.01) assert_equal(pca.n_components_, 1) rng = np.random.RandomState(0) # more features than samples X = rng.rand(5, 20) pca = PCA(n_components=.5).fit(X) assert_equal(pca.n_components, 0.5) assert_equal(pca.n_components_, 2) def test_pca_score(): # Test that probabilistic PCA scoring yields a reasonable score n, p = 1000, 3 rng = np.random.RandomState(0) X = rng.randn(n, p) * .1 + np.array([3, 4, 5]) pca = PCA(n_components=2) pca.fit(X) ll1 = pca.score(X) h = -0.5 * np.log(2 * np.pi * np.exp(1) * 0.1 ** 2) * p np.testing.assert_almost_equal(ll1 / h, 1, 0) def test_pca_score2(): # Test that probabilistic PCA correctly separated different datasets n, p = 100, 3 rng = np.random.RandomState(0) X = rng.randn(n, p) * .1 + np.array([3, 4, 5]) pca = PCA(n_components=2) pca.fit(X) ll1 = pca.score(X) ll2 = pca.score(rng.randn(n, p) * .2 + np.array([3, 4, 5])) assert_greater(ll1, ll2) # Test that it gives the same scores if whiten=True pca = PCA(n_components=2, whiten=True) pca.fit(X) ll2 = pca.score(X) assert_almost_equal(ll1, ll2) def test_pca_score3(): # Check that probabilistic PCA selects the right model n, p = 200, 3 rng = np.random.RandomState(0) Xl = (rng.randn(n, p) + rng.randn(n, 1) * np.array([3, 4, 5]) + np.array([1, 0, 7])) Xt = (rng.randn(n, p) + rng.randn(n, 1) * np.array([3, 4, 5]) + np.array([1, 0, 7])) ll = np.zeros(p) for k in range(p): pca = PCA(n_components=k) pca.fit(Xl) ll[k] = pca.score(Xt) assert_true(ll.argmax() == 1)
bsd-3-clause
AgainstWind/python-demos
mathematics/function_visual.py
1
1714
#!/usr/bin/python # -*- coding: UTF-8 -*- import numpy as np import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D def printTwoDimension(): plt.figure(1) ax = plt.subplot(111) x = np.linspace(0, np.pi * 2, 200) # 在0到2pi之间,均匀产生200点的数组 # r = 2cosθ r = 2 * np.cos(x) # 半径 ax.plot(r * np.cos(x), r * np.sin(x)) # r = 1 r = 1 ax.plot(r * np.cos(x), r * np.sin(x)) plt.show() def printThreeDimension(): fig = plt.figure(1) ax = fig.add_subplot(1, 1, 1, projection='3d') # 指定三维空间做图 t = np.linspace(0, 4, 200) # 在0到4之间,均匀产生200点的数组 theta = t * 2 * np.pi # 角度 # r(t)=(sint,cost,t) z = t x = np.sin(theta) y = np.cos(theta) ax.plot(x, y, z, label='r(t)') # r’(t) z = 1 x = np.cos(theta) y = -np.sin(theta) ax.plot(x, y, z, label='r\'(t)') ax.legend() plt.show() def printCurve(): fig = plt.figure() ax = Axes3D(fig) X = np.arange(-2, 2, 0.1) Y = np.arange(-2, 2, 0.1) X, Y = np.meshgrid(X, Y) Z = X ** 2 + Y ** 2 ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap='rainbow') plt.show() #printThreeDimension() #printCurve() def fun(): x = np.arange(-50,50,0.1) y = np.arange(-50,50,0.1) z = np.power(x,2)+np.sin(y) return (x,y,z) def functionShow(): fig = plt.figure() ax = Axes3D(fig) X = np.arange(0, 3, 0.03) Y = np.arange(0, 3, 0.03) X, Y = np.meshgrid(X, Y) #Z = np.cos(X) + np.sin(Y)* 2 Z = np.sin(X**X+Y**Y)/(X**X+Y**Y+1) ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap='rainbow') plt.show() functionShow()
apache-2.0
ammarkhann/FinalSeniorCode
lib/python2.7/site-packages/pandas/core/resample.py
3
44151
from datetime import timedelta import numpy as np import warnings import copy from textwrap import dedent import pandas as pd from pandas.core.base import AbstractMethodError, GroupByMixin from pandas.core.groupby import (BinGrouper, Grouper, _GroupBy, GroupBy, SeriesGroupBy, groupby, PanelGroupBy) from pandas.tseries.frequencies import to_offset, is_subperiod, is_superperiod from pandas.core.indexes.datetimes import DatetimeIndex, date_range from pandas.core.indexes.timedeltas import TimedeltaIndex from pandas.tseries.offsets import DateOffset, Tick, Day, _delta_to_nanoseconds from pandas.core.indexes.period import PeriodIndex, period_range import pandas.core.common as com import pandas.core.algorithms as algos import pandas.compat as compat from pandas.compat.numpy import function as nv from pandas._libs import lib, tslib from pandas._libs.lib import Timestamp from pandas._libs.period import IncompatibleFrequency from pandas.util._decorators import Appender from pandas.core.generic import _shared_docs _shared_docs_kwargs = dict() class Resampler(_GroupBy): """ Class for resampling datetimelike data, a groupby-like operation. See aggregate, transform, and apply functions on this object. It's easiest to use obj.resample(...) to use Resampler. Parameters ---------- obj : pandas object groupby : a TimeGrouper object axis : int, default 0 kind : str or None 'period', 'timestamp' to override default index treatement Notes ----- After resampling, see aggregate, apply, and transform functions. Returns ------- a Resampler of the appropriate type """ # to the groupby descriptor _attributes = ['freq', 'axis', 'closed', 'label', 'convention', 'loffset', 'base', 'kind'] # API compat of allowed attributes _deprecated_valids = _attributes + ['__doc__', '_cache', '_attributes', 'binner', 'grouper', 'groupby', 'sort', 'kind', 'squeeze', 'keys', 'group_keys', 'as_index', 'exclusions', '_groupby'] # don't raise deprecation warning on attributes starting with these # patterns - prevents warnings caused by IPython introspection _deprecated_valid_patterns = ['_ipython', '_repr'] # API compat of disallowed attributes _deprecated_invalids = ['iloc', 'loc', 'ix', 'iat', 'at'] def __init__(self, obj, groupby=None, axis=0, kind=None, **kwargs): self.groupby = groupby self.keys = None self.sort = True self.axis = axis self.kind = kind self.squeeze = False self.group_keys = True self.as_index = True self.exclusions = set() self.binner = None self.grouper = None if self.groupby is not None: self.groupby._set_grouper(self._convert_obj(obj), sort=True) def __unicode__(self): """ provide a nice str repr of our rolling object """ attrs = ["{k}={v}".format(k=k, v=getattr(self.groupby, k)) for k in self._attributes if getattr(self.groupby, k, None) is not None] return "{klass} [{attrs}]".format(klass=self.__class__.__name__, attrs=', '.join(attrs)) @property def obj(self): return self.groupby.obj @property def ax(self): return self.groupby.ax @property def _typ(self): """ masquerade for compat as a Series or a DataFrame """ if isinstance(self._selected_obj, pd.Series): return 'series' return 'dataframe' @property def _from_selection(self): """ is the resampling from a DataFrame column or MultiIndex level """ # upsampling and PeriodIndex resampling do not work # with selection, this state used to catch and raise an error return (self.groupby is not None and (self.groupby.key is not None or self.groupby.level is not None)) def _deprecated(self, op): warnings.warn(("\n.resample() is now a deferred operation\n" "You called {op}(...) on this deferred object " "which materialized it into a {klass}\nby implicitly " "taking the mean. Use .resample(...).mean() " "instead").format(op=op, klass=self._typ), FutureWarning, stacklevel=3) return self.mean() def _make_deprecated_binop(op): # op is a string def _evaluate_numeric_binop(self, other): result = self._deprecated(op) return getattr(result, op)(other) return _evaluate_numeric_binop def _make_deprecated_unary(op, name): # op is a callable def _evaluate_numeric_unary(self): result = self._deprecated(name) return op(result) return _evaluate_numeric_unary def __array__(self): return self._deprecated('__array__').__array__() __gt__ = _make_deprecated_binop('__gt__') __ge__ = _make_deprecated_binop('__ge__') __lt__ = _make_deprecated_binop('__lt__') __le__ = _make_deprecated_binop('__le__') __eq__ = _make_deprecated_binop('__eq__') __ne__ = _make_deprecated_binop('__ne__') __add__ = __radd__ = _make_deprecated_binop('__add__') __sub__ = __rsub__ = _make_deprecated_binop('__sub__') __mul__ = __rmul__ = _make_deprecated_binop('__mul__') __floordiv__ = __rfloordiv__ = _make_deprecated_binop('__floordiv__') __truediv__ = __rtruediv__ = _make_deprecated_binop('__truediv__') if not compat.PY3: __div__ = __rdiv__ = _make_deprecated_binop('__div__') __neg__ = _make_deprecated_unary(lambda x: -x, '__neg__') __pos__ = _make_deprecated_unary(lambda x: x, '__pos__') __abs__ = _make_deprecated_unary(lambda x: np.abs(x), '__abs__') __inv__ = _make_deprecated_unary(lambda x: -x, '__inv__') def __getattr__(self, attr): if attr in self._internal_names_set: return object.__getattribute__(self, attr) if attr in self._attributes: return getattr(self.groupby, attr) if attr in self.obj: return self[attr] if attr in self._deprecated_invalids: raise ValueError(".resample() is now a deferred operation\n" "\tuse .resample(...).mean() instead of " ".resample(...)") matches_pattern = any(attr.startswith(x) for x in self._deprecated_valid_patterns) if not matches_pattern and attr not in self._deprecated_valids: self = self._deprecated(attr) return object.__getattribute__(self, attr) def __setattr__(self, attr, value): if attr not in self._deprecated_valids: raise ValueError("cannot set values on {0}".format( self.__class__.__name__)) object.__setattr__(self, attr, value) def __getitem__(self, key): try: return super(Resampler, self).__getitem__(key) except (KeyError, com.AbstractMethodError): # compat for deprecated if isinstance(self.obj, com.ABCSeries): return self._deprecated('__getitem__')[key] raise def __setitem__(self, attr, value): raise ValueError("cannot set items on {0}".format( self.__class__.__name__)) def _convert_obj(self, obj): """ provide any conversions for the object in order to correctly handle Parameters ---------- obj : the object to be resampled Returns ------- obj : converted object """ obj = obj._consolidate() return obj def _get_binner_for_time(self): raise AbstractMethodError(self) def _set_binner(self): """ setup our binners cache these as we are an immutable object """ if self.binner is None: self.binner, self.grouper = self._get_binner() def _get_binner(self): """ create the BinGrouper, assume that self.set_grouper(obj) has already been called """ binner, bins, binlabels = self._get_binner_for_time() bin_grouper = BinGrouper(bins, binlabels) return binner, bin_grouper def _assure_grouper(self): """ make sure that we are creating our binner & grouper """ self._set_binner() def plot(self, *args, **kwargs): # for compat with prior versions, we want to # have the warnings shown here and just have this work return self._deprecated('plot').plot(*args, **kwargs) _agg_doc = dedent(""" Examples -------- >>> s = Series([1,2,3,4,5], index=pd.date_range('20130101', periods=5,freq='s')) 2013-01-01 00:00:00 1 2013-01-01 00:00:01 2 2013-01-01 00:00:02 3 2013-01-01 00:00:03 4 2013-01-01 00:00:04 5 Freq: S, dtype: int64 >>> r = s.resample('2s') DatetimeIndexResampler [freq=<2 * Seconds>, axis=0, closed=left, label=left, convention=start, base=0] >>> r.agg(np.sum) 2013-01-01 00:00:00 3 2013-01-01 00:00:02 7 2013-01-01 00:00:04 5 Freq: 2S, dtype: int64 >>> r.agg(['sum','mean','max']) sum mean max 2013-01-01 00:00:00 3 1.5 2 2013-01-01 00:00:02 7 3.5 4 2013-01-01 00:00:04 5 5.0 5 >>> r.agg({'result' : lambda x: x.mean() / x.std(), 'total' : np.sum}) total result 2013-01-01 00:00:00 3 2.121320 2013-01-01 00:00:02 7 4.949747 2013-01-01 00:00:04 5 NaN See also -------- pandas.DataFrame.groupby.aggregate pandas.DataFrame.resample.transform pandas.DataFrame.aggregate """) @Appender(_agg_doc) @Appender(_shared_docs['aggregate'] % dict( klass='DataFrame', versionadded='')) def aggregate(self, arg, *args, **kwargs): self._set_binner() result, how = self._aggregate(arg, *args, **kwargs) if result is None: result = self._groupby_and_aggregate(arg, *args, **kwargs) result = self._apply_loffset(result) return result agg = aggregate apply = aggregate def transform(self, arg, *args, **kwargs): """ Call function producing a like-indexed Series on each group and return a Series with the transformed values Parameters ---------- func : function To apply to each group. Should return a Series with the same index Examples -------- >>> resampled.transform(lambda x: (x - x.mean()) / x.std()) Returns ------- transformed : Series """ return self._selected_obj.groupby(self.groupby).transform( arg, *args, **kwargs) def _downsample(self, f): raise AbstractMethodError(self) def _upsample(self, f, limit=None, fill_value=None): raise AbstractMethodError(self) def _gotitem(self, key, ndim, subset=None): """ sub-classes to define return a sliced object Parameters ---------- key : string / list of selections ndim : 1,2 requested ndim of result subset : object, default None subset to act on """ self._set_binner() grouper = self.grouper if subset is None: subset = self.obj grouped = groupby(subset, by=None, grouper=grouper, axis=self.axis) # try the key selection try: return grouped[key] except KeyError: return grouped def _groupby_and_aggregate(self, how, grouper=None, *args, **kwargs): """ re-evaluate the obj with a groupby aggregation """ if grouper is None: self._set_binner() grouper = self.grouper obj = self._selected_obj try: grouped = groupby(obj, by=None, grouper=grouper, axis=self.axis) except TypeError: # panel grouper grouped = PanelGroupBy(obj, grouper=grouper, axis=self.axis) try: result = grouped.aggregate(how, *args, **kwargs) except Exception: # we have a non-reducing function # try to evaluate result = grouped.apply(how, *args, **kwargs) result = self._apply_loffset(result) return self._wrap_result(result) def _apply_loffset(self, result): """ if loffset is set, offset the result index This is NOT an idempotent routine, it will be applied exactly once to the result. Parameters ---------- result : Series or DataFrame the result of resample """ needs_offset = ( isinstance(self.loffset, (DateOffset, timedelta)) and isinstance(result.index, DatetimeIndex) and len(result.index) > 0 ) if needs_offset: result.index = result.index + self.loffset self.loffset = None return result def _get_resampler_for_grouping(self, groupby, **kwargs): """ return the correct class for resampling with groupby """ return self._resampler_for_grouping(self, groupby=groupby, **kwargs) def _wrap_result(self, result): """ potentially wrap any results """ if isinstance(result, com.ABCSeries) and self._selection is not None: result.name = self._selection return result def pad(self, limit=None): """ Forward fill the values Parameters ---------- limit : integer, optional limit of how many values to fill See Also -------- Series.fillna DataFrame.fillna """ return self._upsample('pad', limit=limit) ffill = pad def backfill(self, limit=None): """ Backward fill the values Parameters ---------- limit : integer, optional limit of how many values to fill See Also -------- Series.fillna DataFrame.fillna """ return self._upsample('backfill', limit=limit) bfill = backfill def fillna(self, method, limit=None): """ Fill missing values Parameters ---------- method : str, method of resampling ('ffill', 'bfill') limit : integer, optional limit of how many values to fill See Also -------- Series.fillna DataFrame.fillna """ return self._upsample(method, limit=limit) @Appender(_shared_docs['interpolate'] % _shared_docs_kwargs) def interpolate(self, method='linear', axis=0, limit=None, inplace=False, limit_direction='forward', downcast=None, **kwargs): """ Interpolate values according to different methods. .. versionadded:: 0.18.1 """ result = self._upsample(None) return result.interpolate(method=method, axis=axis, limit=limit, inplace=inplace, limit_direction=limit_direction, downcast=downcast, **kwargs) def asfreq(self, fill_value=None): """ return the values at the new freq, essentially a reindex Parameters ---------- fill_value: scalar, optional Value to use for missing values, applied during upsampling (note this does not fill NaNs that already were present). .. versionadded:: 0.20.0 See Also -------- Series.asfreq DataFrame.asfreq """ return self._upsample('asfreq', fill_value=fill_value) def std(self, ddof=1, *args, **kwargs): """ Compute standard deviation of groups, excluding missing values Parameters ---------- ddof : integer, default 1 degrees of freedom """ nv.validate_resampler_func('std', args, kwargs) return self._downsample('std', ddof=ddof) def var(self, ddof=1, *args, **kwargs): """ Compute variance of groups, excluding missing values Parameters ---------- ddof : integer, default 1 degrees of freedom """ nv.validate_resampler_func('var', args, kwargs) return self._downsample('var', ddof=ddof) Resampler._deprecated_valids += dir(Resampler) # downsample methods for method in ['min', 'max', 'first', 'last', 'sum', 'mean', 'sem', 'median', 'prod', 'ohlc']: def f(self, _method=method, *args, **kwargs): nv.validate_resampler_func(_method, args, kwargs) return self._downsample(_method) f.__doc__ = getattr(GroupBy, method).__doc__ setattr(Resampler, method, f) # groupby & aggregate methods for method in ['count', 'size']: def f(self, _method=method): return self._downsample(_method) f.__doc__ = getattr(GroupBy, method).__doc__ setattr(Resampler, method, f) # series only methods for method in ['nunique']: def f(self, _method=method): return self._downsample(_method) f.__doc__ = getattr(SeriesGroupBy, method).__doc__ setattr(Resampler, method, f) def _maybe_process_deprecations(r, how=None, fill_method=None, limit=None): """ potentially we might have a deprecation warning, show it but call the appropriate methods anyhow """ if how is not None: # .resample(..., how='sum') if isinstance(how, compat.string_types): method = "{0}()".format(how) # .resample(..., how=lambda x: ....) else: method = ".apply(<func>)" # if we have both a how and fill_method, then show # the following warning if fill_method is None: warnings.warn("how in .resample() is deprecated\n" "the new syntax is " ".resample(...).{method}".format( method=method), FutureWarning, stacklevel=3) r = r.aggregate(how) if fill_method is not None: # show the prior function call method = '.' + method if how is not None else '' args = "limit={0}".format(limit) if limit is not None else "" warnings.warn("fill_method is deprecated to .resample()\n" "the new syntax is .resample(...){method}" ".{fill_method}({args})".format( method=method, fill_method=fill_method, args=args), FutureWarning, stacklevel=3) if how is not None: r = getattr(r, fill_method)(limit=limit) else: r = r.aggregate(fill_method, limit=limit) return r class _GroupByMixin(GroupByMixin): """ provide the groupby facilities """ def __init__(self, obj, *args, **kwargs): parent = kwargs.pop('parent', None) groupby = kwargs.pop('groupby', None) if parent is None: parent = obj # initialize our GroupByMixin object with # the resampler attributes for attr in self._attributes: setattr(self, attr, kwargs.get(attr, getattr(parent, attr))) super(_GroupByMixin, self).__init__(None) self._groupby = groupby self._groupby.mutated = True self._groupby.grouper.mutated = True self.groupby = copy.copy(parent.groupby) def _apply(self, f, **kwargs): """ dispatch to _upsample; we are stripping all of the _upsample kwargs and performing the original function call on the grouped object """ def func(x): x = self._shallow_copy(x, groupby=self.groupby) if isinstance(f, compat.string_types): return getattr(x, f)(**kwargs) return x.apply(f, **kwargs) result = self._groupby.apply(func) return self._wrap_result(result) _upsample = _apply _downsample = _apply _groupby_and_aggregate = _apply class DatetimeIndexResampler(Resampler): @property def _resampler_for_grouping(self): return DatetimeIndexResamplerGroupby def _get_binner_for_time(self): # this is how we are actually creating the bins if self.kind == 'period': return self.groupby._get_time_period_bins(self.ax) return self.groupby._get_time_bins(self.ax) def _downsample(self, how, **kwargs): """ Downsample the cython defined function Parameters ---------- how : string / cython mapped function **kwargs : kw args passed to how function """ self._set_binner() how = self._is_cython_func(how) or how ax = self.ax obj = self._selected_obj if not len(ax): # reset to the new freq obj = obj.copy() obj.index.freq = self.freq return obj # do we have a regular frequency if ax.freq is not None or ax.inferred_freq is not None: if len(self.grouper.binlabels) > len(ax) and how is None: # let's do an asfreq return self.asfreq() # we are downsampling # we want to call the actual grouper method here result = obj.groupby( self.grouper, axis=self.axis).aggregate(how, **kwargs) result = self._apply_loffset(result) return self._wrap_result(result) def _adjust_binner_for_upsample(self, binner): """ adjust our binner when upsampling """ if self.closed == 'right': binner = binner[1:] else: binner = binner[:-1] return binner def _upsample(self, method, limit=None, fill_value=None): """ method : string {'backfill', 'bfill', 'pad', 'ffill', 'asfreq'} method for upsampling limit : int, default None Maximum size gap to fill when reindexing fill_value : scalar, default None Value to use for missing values See also -------- .fillna """ self._set_binner() if self.axis: raise AssertionError('axis must be 0') if self._from_selection: raise ValueError("Upsampling from level= or on= selection" " is not supported, use .set_index(...)" " to explicitly set index to" " datetime-like") ax = self.ax obj = self._selected_obj binner = self.binner res_index = self._adjust_binner_for_upsample(binner) # if we have the same frequency as our axis, then we are equal sampling if limit is None and to_offset(ax.inferred_freq) == self.freq: result = obj.copy() result.index = res_index else: result = obj.reindex(res_index, method=method, limit=limit, fill_value=fill_value) return self._wrap_result(result) def _wrap_result(self, result): result = super(DatetimeIndexResampler, self)._wrap_result(result) # we may have a different kind that we were asked originally # convert if needed if self.kind == 'period' and not isinstance(result.index, PeriodIndex): result.index = result.index.to_period(self.freq) return result class DatetimeIndexResamplerGroupby(_GroupByMixin, DatetimeIndexResampler): """ Provides a resample of a groupby implementation .. versionadded:: 0.18.1 """ @property def _constructor(self): return DatetimeIndexResampler class PeriodIndexResampler(DatetimeIndexResampler): @property def _resampler_for_grouping(self): return PeriodIndexResamplerGroupby def _convert_obj(self, obj): obj = super(PeriodIndexResampler, self)._convert_obj(obj) offset = to_offset(self.freq) if offset.n > 1: if self.kind == 'period': # pragma: no cover print('Warning: multiple of frequency -> timestamps') # Cannot have multiple of periods, convert to timestamp self.kind = 'timestamp' # convert to timestamp if not (self.kind is None or self.kind == 'period'): if self._from_selection: # see GH 14008, GH 12871 msg = ("Resampling from level= or on= selection" " with a PeriodIndex is not currently supported," " use .set_index(...) to explicitly set index") raise NotImplementedError(msg) else: obj = obj.to_timestamp(how=self.convention) return obj def aggregate(self, arg, *args, **kwargs): result, how = self._aggregate(arg, *args, **kwargs) if result is None: result = self._downsample(arg, *args, **kwargs) result = self._apply_loffset(result) return result agg = aggregate def _get_new_index(self): """ return our new index """ ax = self.ax if len(ax) == 0: values = [] else: start = ax[0].asfreq(self.freq, how=self.convention) end = ax[-1].asfreq(self.freq, how='end') values = period_range(start, end, freq=self.freq).asi8 return ax._shallow_copy(values, freq=self.freq) def _downsample(self, how, **kwargs): """ Downsample the cython defined function Parameters ---------- how : string / cython mapped function **kwargs : kw args passed to how function """ # we may need to actually resample as if we are timestamps if self.kind == 'timestamp': return super(PeriodIndexResampler, self)._downsample(how, **kwargs) how = self._is_cython_func(how) or how ax = self.ax new_index = self._get_new_index() # Start vs. end of period memb = ax.asfreq(self.freq, how=self.convention) if is_subperiod(ax.freq, self.freq): # Downsampling if len(new_index) == 0: bins = [] else: i8 = memb.asi8 rng = np.arange(i8[0], i8[-1] + 1) bins = memb.searchsorted(rng, side='right') grouper = BinGrouper(bins, new_index) return self._groupby_and_aggregate(how, grouper=grouper) elif is_superperiod(ax.freq, self.freq): return self.asfreq() elif ax.freq == self.freq: return self.asfreq() raise IncompatibleFrequency( 'Frequency {} cannot be resampled to {}, as they are not ' 'sub or super periods'.format(ax.freq, self.freq)) def _upsample(self, method, limit=None, fill_value=None): """ method : string {'backfill', 'bfill', 'pad', 'ffill'} method for upsampling limit : int, default None Maximum size gap to fill when reindexing fill_value : scalar, default None Value to use for missing values See also -------- .fillna """ if self._from_selection: raise ValueError("Upsampling from level= or on= selection" " is not supported, use .set_index(...)" " to explicitly set index to" " datetime-like") # we may need to actually resample as if we are timestamps if self.kind == 'timestamp': return super(PeriodIndexResampler, self)._upsample( method, limit=limit, fill_value=fill_value) ax = self.ax obj = self.obj new_index = self._get_new_index() # Start vs. end of period memb = ax.asfreq(self.freq, how=self.convention) # Get the fill indexer indexer = memb.get_indexer(new_index, method=method, limit=limit) return self._wrap_result(_take_new_index( obj, indexer, new_index, axis=self.axis)) class PeriodIndexResamplerGroupby(_GroupByMixin, PeriodIndexResampler): """ Provides a resample of a groupby implementation .. versionadded:: 0.18.1 """ @property def _constructor(self): return PeriodIndexResampler class TimedeltaIndexResampler(DatetimeIndexResampler): @property def _resampler_for_grouping(self): return TimedeltaIndexResamplerGroupby def _get_binner_for_time(self): return self.groupby._get_time_delta_bins(self.ax) def _adjust_binner_for_upsample(self, binner): """ adjust our binner when upsampling """ ax = self.ax if is_subperiod(ax.freq, self.freq): # We are actually downsampling # but are in the asfreq path # GH 12926 if self.closed == 'right': binner = binner[1:] else: binner = binner[:-1] return binner class TimedeltaIndexResamplerGroupby(_GroupByMixin, TimedeltaIndexResampler): """ Provides a resample of a groupby implementation .. versionadded:: 0.18.1 """ @property def _constructor(self): return TimedeltaIndexResampler def resample(obj, kind=None, **kwds): """ create a TimeGrouper and return our resampler """ tg = TimeGrouper(**kwds) return tg._get_resampler(obj, kind=kind) resample.__doc__ = Resampler.__doc__ def get_resampler_for_grouping(groupby, rule, how=None, fill_method=None, limit=None, kind=None, **kwargs): """ return our appropriate resampler when grouping as well """ # .resample uses 'on' similar to how .groupby uses 'key' kwargs['key'] = kwargs.pop('on', None) tg = TimeGrouper(freq=rule, **kwargs) resampler = tg._get_resampler(groupby.obj, kind=kind) r = resampler._get_resampler_for_grouping(groupby=groupby) return _maybe_process_deprecations(r, how=how, fill_method=fill_method, limit=limit) class TimeGrouper(Grouper): """ Custom groupby class for time-interval grouping Parameters ---------- freq : pandas date offset or offset alias for identifying bin edges closed : closed end of interval; left or right label : interval boundary to use for labeling; left or right nperiods : optional, integer convention : {'start', 'end', 'e', 's'} If axis is PeriodIndex Notes ----- Use begin, end, nperiods to generate intervals that cannot be derived directly from the associated object """ def __init__(self, freq='Min', closed=None, label=None, how='mean', nperiods=None, axis=0, fill_method=None, limit=None, loffset=None, kind=None, convention=None, base=0, **kwargs): freq = to_offset(freq) end_types = set(['M', 'A', 'Q', 'BM', 'BA', 'BQ', 'W']) rule = freq.rule_code if (rule in end_types or ('-' in rule and rule[:rule.find('-')] in end_types)): if closed is None: closed = 'right' if label is None: label = 'right' else: if closed is None: closed = 'left' if label is None: label = 'left' self.closed = closed self.label = label self.nperiods = nperiods self.kind = kind self.convention = convention or 'E' self.convention = self.convention.lower() if isinstance(loffset, compat.string_types): loffset = to_offset(loffset) self.loffset = loffset self.how = how self.fill_method = fill_method self.limit = limit self.base = base # always sort time groupers kwargs['sort'] = True super(TimeGrouper, self).__init__(freq=freq, axis=axis, **kwargs) def _get_resampler(self, obj, kind=None): """ return my resampler or raise if we have an invalid axis Parameters ---------- obj : input object kind : string, optional 'period','timestamp','timedelta' are valid Returns ------- a Resampler Raises ------ TypeError if incompatible axis """ self._set_grouper(obj) ax = self.ax if isinstance(ax, DatetimeIndex): return DatetimeIndexResampler(obj, groupby=self, kind=kind, axis=self.axis) elif isinstance(ax, PeriodIndex) or kind == 'period': return PeriodIndexResampler(obj, groupby=self, kind=kind, axis=self.axis) elif isinstance(ax, TimedeltaIndex): return TimedeltaIndexResampler(obj, groupby=self, axis=self.axis) raise TypeError("Only valid with DatetimeIndex, " "TimedeltaIndex or PeriodIndex, " "but got an instance of %r" % type(ax).__name__) def _get_grouper(self, obj): # create the resampler and return our binner r = self._get_resampler(obj) r._set_binner() return r.binner, r.grouper, r.obj def _get_binner_for_grouping(self, obj): # return an ordering of the transformed group labels, # suitable for multi-grouping, e.g the labels for # the resampled intervals binner, grouper, obj = self._get_grouper(obj) l = [] for key, group in grouper.get_iterator(self.ax): l.extend([key] * len(group)) if isinstance(self.ax, PeriodIndex): grouper = binner.__class__(l, freq=binner.freq, name=binner.name) else: # resampling causes duplicated values, specifying freq is invalid grouper = binner.__class__(l, name=binner.name) # since we may have had to sort # may need to reorder groups here if self.indexer is not None: indexer = self.indexer.argsort(kind='quicksort') grouper = grouper.take(indexer) return grouper def _get_time_bins(self, ax): if not isinstance(ax, DatetimeIndex): raise TypeError('axis must be a DatetimeIndex, but got ' 'an instance of %r' % type(ax).__name__) if len(ax) == 0: binner = labels = DatetimeIndex( data=[], freq=self.freq, name=ax.name) return binner, [], labels first, last = ax.min(), ax.max() first, last = _get_range_edges(first, last, self.freq, closed=self.closed, base=self.base) tz = ax.tz # GH #12037 # use first/last directly instead of call replace() on them # because replace() will swallow the nanosecond part # thus last bin maybe slightly before the end if the end contains # nanosecond part and lead to `Values falls after last bin` error binner = labels = DatetimeIndex(freq=self.freq, start=first, end=last, tz=tz, name=ax.name) # a little hack trimmed = False if (len(binner) > 2 and binner[-2] == last and self.closed == 'right'): binner = binner[:-1] trimmed = True ax_values = ax.asi8 binner, bin_edges = self._adjust_bin_edges(binner, ax_values) # general version, knowing nothing about relative frequencies bins = lib.generate_bins_dt64( ax_values, bin_edges, self.closed, hasnans=ax.hasnans) if self.closed == 'right': labels = binner if self.label == 'right': labels = labels[1:] elif not trimmed: labels = labels[:-1] else: if self.label == 'right': labels = labels[1:] elif not trimmed: labels = labels[:-1] if ax.hasnans: binner = binner.insert(0, tslib.NaT) labels = labels.insert(0, tslib.NaT) # if we end up with more labels than bins # adjust the labels # GH4076 if len(bins) < len(labels): labels = labels[:len(bins)] return binner, bins, labels def _adjust_bin_edges(self, binner, ax_values): # Some hacks for > daily data, see #1471, #1458, #1483 bin_edges = binner.asi8 if self.freq != 'D' and is_superperiod(self.freq, 'D'): day_nanos = _delta_to_nanoseconds(timedelta(1)) if self.closed == 'right': bin_edges = bin_edges + day_nanos - 1 # intraday values on last day if bin_edges[-2] > ax_values.max(): bin_edges = bin_edges[:-1] binner = binner[:-1] return binner, bin_edges def _get_time_delta_bins(self, ax): if not isinstance(ax, TimedeltaIndex): raise TypeError('axis must be a TimedeltaIndex, but got ' 'an instance of %r' % type(ax).__name__) if not len(ax): binner = labels = TimedeltaIndex( data=[], freq=self.freq, name=ax.name) return binner, [], labels start = ax[0] end = ax[-1] labels = binner = TimedeltaIndex(start=start, end=end, freq=self.freq, name=ax.name) end_stamps = labels + 1 bins = ax.searchsorted(end_stamps, side='left') # Addresses GH #10530 if self.base > 0: labels += type(self.freq)(self.base) return binner, bins, labels def _get_time_period_bins(self, ax): if not isinstance(ax, DatetimeIndex): raise TypeError('axis must be a DatetimeIndex, but got ' 'an instance of %r' % type(ax).__name__) if not len(ax): binner = labels = PeriodIndex( data=[], freq=self.freq, name=ax.name) return binner, [], labels labels = binner = PeriodIndex(start=ax[0], end=ax[-1], freq=self.freq, name=ax.name) end_stamps = (labels + 1).asfreq(self.freq, 's').to_timestamp() if ax.tzinfo: end_stamps = end_stamps.tz_localize(ax.tzinfo) bins = ax.searchsorted(end_stamps, side='left') return binner, bins, labels def _take_new_index(obj, indexer, new_index, axis=0): from pandas.core.api import Series, DataFrame if isinstance(obj, Series): new_values = algos.take_1d(obj.values, indexer) return Series(new_values, index=new_index, name=obj.name) elif isinstance(obj, DataFrame): if axis == 1: raise NotImplementedError("axis 1 is not supported") return DataFrame(obj._data.reindex_indexer( new_axis=new_index, indexer=indexer, axis=1)) else: raise ValueError("'obj' should be either a Series or a DataFrame") def _get_range_edges(first, last, offset, closed='left', base=0): if isinstance(offset, compat.string_types): offset = to_offset(offset) if isinstance(offset, Tick): is_day = isinstance(offset, Day) day_nanos = _delta_to_nanoseconds(timedelta(1)) # #1165 if (is_day and day_nanos % offset.nanos == 0) or not is_day: return _adjust_dates_anchored(first, last, offset, closed=closed, base=base) if not isinstance(offset, Tick): # and first.time() != last.time(): # hack! first = first.normalize() last = last.normalize() if closed == 'left': first = Timestamp(offset.rollback(first)) else: first = Timestamp(first - offset) last = Timestamp(last + offset) return first, last def _adjust_dates_anchored(first, last, offset, closed='right', base=0): # First and last offsets should be calculated from the start day to fix an # error cause by resampling across multiple days when a one day period is # not a multiple of the frequency. # # See https://github.com/pandas-dev/pandas/issues/8683 # 14682 - Since we need to drop the TZ information to perform # the adjustment in the presence of a DST change, # save TZ Info and the DST state of the first and last parameters # so that we can accurately rebuild them at the end. first_tzinfo = first.tzinfo last_tzinfo = last.tzinfo first_dst = bool(first.dst()) last_dst = bool(last.dst()) first = first.tz_localize(None) last = last.tz_localize(None) start_day_nanos = first.normalize().value base_nanos = (base % offset.n) * offset.nanos // offset.n start_day_nanos += base_nanos foffset = (first.value - start_day_nanos) % offset.nanos loffset = (last.value - start_day_nanos) % offset.nanos if closed == 'right': if foffset > 0: # roll back fresult = first.value - foffset else: fresult = first.value - offset.nanos if loffset > 0: # roll forward lresult = last.value + (offset.nanos - loffset) else: # already the end of the road lresult = last.value else: # closed == 'left' if foffset > 0: fresult = first.value - foffset else: # start of the road fresult = first.value if loffset > 0: # roll forward lresult = last.value + (offset.nanos - loffset) else: lresult = last.value + offset.nanos return (Timestamp(fresult).tz_localize(first_tzinfo, ambiguous=first_dst), Timestamp(lresult).tz_localize(last_tzinfo, ambiguous=last_dst)) def asfreq(obj, freq, method=None, how=None, normalize=False, fill_value=None): """ Utility frequency conversion method for Series/DataFrame """ if isinstance(obj.index, PeriodIndex): if method is not None: raise NotImplementedError("'method' argument is not supported") if how is None: how = 'E' new_obj = obj.copy() new_obj.index = obj.index.asfreq(freq, how=how) elif len(obj.index) == 0: new_obj = obj.copy() new_obj.index = obj.index._shallow_copy(freq=to_offset(freq)) else: dti = date_range(obj.index[0], obj.index[-1], freq=freq) dti.name = obj.index.name new_obj = obj.reindex(dti, method=method, fill_value=fill_value) if normalize: new_obj.index = new_obj.index.normalize() return new_obj
mit
wzbozon/scikit-learn
sklearn/feature_selection/tests/test_from_model.py
244
1593
import numpy as np import scipy.sparse as sp from nose.tools import assert_raises, assert_true from sklearn.utils.testing import assert_less from sklearn.utils.testing import assert_greater from sklearn.datasets import load_iris from sklearn.linear_model import LogisticRegression from sklearn.linear_model import SGDClassifier from sklearn.svm import LinearSVC iris = load_iris() def test_transform_linear_model(): for clf in (LogisticRegression(C=0.1), LinearSVC(C=0.01, dual=False), SGDClassifier(alpha=0.001, n_iter=50, shuffle=True, random_state=0)): for thresh in (None, ".09*mean", "1e-5 * median"): for func in (np.array, sp.csr_matrix): X = func(iris.data) clf.set_params(penalty="l1") clf.fit(X, iris.target) X_new = clf.transform(X, thresh) if isinstance(clf, SGDClassifier): assert_true(X_new.shape[1] <= X.shape[1]) else: assert_less(X_new.shape[1], X.shape[1]) clf.set_params(penalty="l2") clf.fit(X_new, iris.target) pred = clf.predict(X_new) assert_greater(np.mean(pred == iris.target), 0.7) def test_invalid_input(): clf = SGDClassifier(alpha=0.1, n_iter=10, shuffle=True, random_state=None) clf.fit(iris.data, iris.target) assert_raises(ValueError, clf.transform, iris.data, "gobbledigook") assert_raises(ValueError, clf.transform, iris.data, ".5 * gobbledigook")
bsd-3-clause
orion-42/numerics-physics-stuff
solar_system.py
1
2094
import numpy as np import matplotlib.pyplot as plt from scipy.integrate import odeint from scipy.constants import gravitational_constant as G # newtonian dynamics of an n body system def rhs(y, t, *masses): num_planets = len(masses) y = y.reshape((num_planets, 2, 3)) ans = y.copy() for i in range(num_planets): ans[i, 0, :] = y[i, 1, :] # dx/dt = v accel = np.zeros(3) for j in range(num_planets): if i != j: between = y[j, 0, :] - y[i, 0, :] dist = np.linalg.norm(between) accel += G*masses[j]/dist**3*between ans[i, 1, :] = accel # dv/dt = a return ans.reshape(ans.size) # Parameter # earth mars sun masses = (5.97237e24, 6.4171e23, 1.988544e30) # kg # Initial Conditions # km, km/s # Data from Ephemerides from some day earth_pos = [-1.012268338703987E+08, -1.111875886682171E+08, -1.939665193599463E+04] earth_vel = [2.152795356301499E+01, -2.018669837471565E+01, 1.000460883457954E-03] mars_pos = [-1.345930796446981E+08, -1.863155816469951E+08, -6.188645620241463E+05] mars_vel = [2.053477661794103E+01, -1.212126142710785E+01, -7.582624591585443E-01] sun_pos = [5.626768185365887E+05, 3.432765388815567E+05, -2.436414149240617E+04] sun_vel = [-7.612831360793502E-04, 1.210783982822092E-02, -1.274982680357986E-06] y0 = np.array([ [earth_pos, earth_vel], [mars_pos, mars_vel], [sun_pos, sun_vel], ]) # [km, km/s] num_planets = y0.shape[0] y0 *= 1000.0 # km -> m y0 = y0.reshape(y0.size) # Timeframe T = 2*365*24*60*60.0 # 2y -> [s] steps = 10000 ts = np.linspace(0, T, steps) # Integration of the equations ans = odeint(rhs, y0, ts, args=masses) ans = ans.reshape((steps, num_planets, 2, 3)) # Plot the result planet_names = ["Earth", "Mars", "Sun"] colors = ["blue", "red", "yellow"] for i in range(num_planets): xs = ans[:, i, 0, 0]/1000.0 ys = ans[:, i, 0, 1]/1000.0 plt.plot(xs, ys, label=planet_names[i], color=colors[i]) plt.legend(loc=3) plt.title("Solarsystem Simulation") plt.xlabel("x/km") plt.ylabel("y/km") plt.grid() plt.show()
mit
hainm/scikit-learn
sklearn/utils/multiclass.py
83
12343
# Author: Arnaud Joly, Joel Nothman, Hamzeh Alsalhi # # License: BSD 3 clause """ Multi-class / multi-label utility function ========================================== """ from __future__ import division from collections import Sequence from itertools import chain from scipy.sparse import issparse from scipy.sparse.base import spmatrix from scipy.sparse import dok_matrix from scipy.sparse import lil_matrix import numpy as np from ..externals.six import string_types from .validation import check_array from ..utils.fixes import bincount def _unique_multiclass(y): if hasattr(y, '__array__'): return np.unique(np.asarray(y)) else: return set(y) def _unique_indicator(y): return np.arange(check_array(y, ['csr', 'csc', 'coo']).shape[1]) _FN_UNIQUE_LABELS = { 'binary': _unique_multiclass, 'multiclass': _unique_multiclass, 'multilabel-indicator': _unique_indicator, } def unique_labels(*ys): """Extract an ordered array of unique labels We don't allow: - mix of multilabel and multiclass (single label) targets - mix of label indicator matrix and anything else, because there are no explicit labels) - mix of label indicator matrices of different sizes - mix of string and integer labels At the moment, we also don't allow "multiclass-multioutput" input type. Parameters ---------- *ys : array-likes, Returns ------- out : numpy array of shape [n_unique_labels] An ordered array of unique labels. Examples -------- >>> from sklearn.utils.multiclass import unique_labels >>> unique_labels([3, 5, 5, 5, 7, 7]) array([3, 5, 7]) >>> unique_labels([1, 2, 3, 4], [2, 2, 3, 4]) array([1, 2, 3, 4]) >>> unique_labels([1, 2, 10], [5, 11]) array([ 1, 2, 5, 10, 11]) """ if not ys: raise ValueError('No argument has been passed.') # Check that we don't mix label format ys_types = set(type_of_target(x) for x in ys) if ys_types == set(["binary", "multiclass"]): ys_types = set(["multiclass"]) if len(ys_types) > 1: raise ValueError("Mix type of y not allowed, got types %s" % ys_types) label_type = ys_types.pop() # Check consistency for the indicator format if (label_type == "multilabel-indicator" and len(set(check_array(y, ['csr', 'csc', 'coo']).shape[1] for y in ys)) > 1): raise ValueError("Multi-label binary indicator input with " "different numbers of labels") # Get the unique set of labels _unique_labels = _FN_UNIQUE_LABELS.get(label_type, None) if not _unique_labels: raise ValueError("Unknown label type: %s" % repr(ys)) ys_labels = set(chain.from_iterable(_unique_labels(y) for y in ys)) # Check that we don't mix string type with number type if (len(set(isinstance(label, string_types) for label in ys_labels)) > 1): raise ValueError("Mix of label input types (string and number)") return np.array(sorted(ys_labels)) def _is_integral_float(y): return y.dtype.kind == 'f' and np.all(y.astype(int) == y) def is_multilabel(y): """ Check if ``y`` is in a multilabel format. Parameters ---------- y : numpy array of shape [n_samples] Target values. Returns ------- out : bool, Return ``True``, if ``y`` is in a multilabel format, else ```False``. Examples -------- >>> import numpy as np >>> from sklearn.utils.multiclass import is_multilabel >>> is_multilabel([0, 1, 0, 1]) False >>> is_multilabel([[1], [0, 2], []]) False >>> is_multilabel(np.array([[1, 0], [0, 0]])) True >>> is_multilabel(np.array([[1], [0], [0]])) False >>> is_multilabel(np.array([[1, 0, 0]])) True """ if hasattr(y, '__array__'): y = np.asarray(y) if not (hasattr(y, "shape") and y.ndim == 2 and y.shape[1] > 1): return False if issparse(y): if isinstance(y, (dok_matrix, lil_matrix)): y = y.tocsr() return (len(y.data) == 0 or np.ptp(y.data) == 0 and (y.dtype.kind in 'biu' or # bool, int, uint _is_integral_float(np.unique(y.data)))) else: labels = np.unique(y) return len(labels) < 3 and (y.dtype.kind in 'biu' or # bool, int, uint _is_integral_float(labels)) def type_of_target(y): """Determine the type of data indicated by target `y` Parameters ---------- y : array-like Returns ------- target_type : string One of: * 'continuous': `y` is an array-like of floats that are not all integers, and is 1d or a column vector. * 'continuous-multioutput': `y` is a 2d array of floats that are not all integers, and both dimensions are of size > 1. * 'binary': `y` contains <= 2 discrete values and is 1d or a column vector. * 'multiclass': `y` contains more than two discrete values, is not a sequence of sequences, and is 1d or a column vector. * 'multiclass-multioutput': `y` is a 2d array that contains more than two discrete values, is not a sequence of sequences, and both dimensions are of size > 1. * 'multilabel-indicator': `y` is a label indicator matrix, an array of two dimensions with at least two columns, and at most 2 unique values. * 'unknown': `y` is array-like but none of the above, such as a 3d array, sequence of sequences, or an array of non-sequence objects. Examples -------- >>> import numpy as np >>> type_of_target([0.1, 0.6]) 'continuous' >>> type_of_target([1, -1, -1, 1]) 'binary' >>> type_of_target(['a', 'b', 'a']) 'binary' >>> type_of_target([1.0, 2.0]) 'binary' >>> type_of_target([1, 0, 2]) 'multiclass' >>> type_of_target([1.0, 0.0, 3.0]) 'multiclass' >>> type_of_target(['a', 'b', 'c']) 'multiclass' >>> type_of_target(np.array([[1, 2], [3, 1]])) 'multiclass-multioutput' >>> type_of_target([[1, 2]]) 'multiclass-multioutput' >>> type_of_target(np.array([[1.5, 2.0], [3.0, 1.6]])) 'continuous-multioutput' >>> type_of_target(np.array([[0, 1], [1, 1]])) 'multilabel-indicator' """ valid = ((isinstance(y, (Sequence, spmatrix)) or hasattr(y, '__array__')) and not isinstance(y, string_types)) if not valid: raise ValueError('Expected array-like (array or non-string sequence), ' 'got %r' % y) if is_multilabel(y): return 'multilabel-indicator' try: y = np.asarray(y) except ValueError: # Known to fail in numpy 1.3 for array of arrays return 'unknown' # The old sequence of sequences format try: if (not hasattr(y[0], '__array__') and isinstance(y[0], Sequence) and not isinstance(y[0], string_types)): raise ValueError('You appear to be using a legacy multi-label data' ' representation. Sequence of sequences are no' ' longer supported; use a binary array or sparse' ' matrix instead.') except IndexError: pass # Invalid inputs if y.ndim > 2 or (y.dtype == object and len(y) and not isinstance(y.flat[0], string_types)): return 'unknown' # [[[1, 2]]] or [obj_1] and not ["label_1"] if y.ndim == 2 and y.shape[1] == 0: return 'unknown' # [[]] if y.ndim == 2 and y.shape[1] > 1: suffix = "-multioutput" # [[1, 2], [1, 2]] else: suffix = "" # [1, 2, 3] or [[1], [2], [3]] # check float and contains non-integer float values if y.dtype.kind == 'f' and np.any(y != y.astype(int)): # [.1, .2, 3] or [[.1, .2, 3]] or [[1., .2]] and not [1., 2., 3.] return 'continuous' + suffix if (len(np.unique(y)) > 2) or (y.ndim >= 2 and len(y[0]) > 1): return 'multiclass' + suffix # [1, 2, 3] or [[1., 2., 3]] or [[1, 2]] else: return 'binary' # [1, 2] or [["a"], ["b"]] def _check_partial_fit_first_call(clf, classes=None): """Private helper function for factorizing common classes param logic Estimators that implement the ``partial_fit`` API need to be provided with the list of possible classes at the first call to partial_fit. Subsequent calls to partial_fit should check that ``classes`` is still consistent with a previous value of ``clf.classes_`` when provided. This function returns True if it detects that this was the first call to ``partial_fit`` on ``clf``. In that case the ``classes_`` attribute is also set on ``clf``. """ if getattr(clf, 'classes_', None) is None and classes is None: raise ValueError("classes must be passed on the first call " "to partial_fit.") elif classes is not None: if getattr(clf, 'classes_', None) is not None: if not np.all(clf.classes_ == unique_labels(classes)): raise ValueError( "`classes=%r` is not the same as on last call " "to partial_fit, was: %r" % (classes, clf.classes_)) else: # This is the first call to partial_fit clf.classes_ = unique_labels(classes) return True # classes is None and clf.classes_ has already previously been set: # nothing to do return False def class_distribution(y, sample_weight=None): """Compute class priors from multioutput-multiclass target data Parameters ---------- y : array like or sparse matrix of size (n_samples, n_outputs) The labels for each example. sample_weight : array-like of shape = (n_samples,), optional Sample weights. Returns ------- classes : list of size n_outputs of arrays of size (n_classes,) List of classes for each column. n_classes : list of integrs of size n_outputs Number of classes in each column class_prior : list of size n_outputs of arrays of size (n_classes,) Class distribution of each column. """ classes = [] n_classes = [] class_prior = [] n_samples, n_outputs = y.shape if issparse(y): y = y.tocsc() y_nnz = np.diff(y.indptr) for k in range(n_outputs): col_nonzero = y.indices[y.indptr[k]:y.indptr[k + 1]] # separate sample weights for zero and non-zero elements if sample_weight is not None: nz_samp_weight = np.asarray(sample_weight)[col_nonzero] zeros_samp_weight_sum = (np.sum(sample_weight) - np.sum(nz_samp_weight)) else: nz_samp_weight = None zeros_samp_weight_sum = y.shape[0] - y_nnz[k] classes_k, y_k = np.unique(y.data[y.indptr[k]:y.indptr[k + 1]], return_inverse=True) class_prior_k = bincount(y_k, weights=nz_samp_weight) # An explicit zero was found, combine its wieght with the wieght # of the implicit zeros if 0 in classes_k: class_prior_k[classes_k == 0] += zeros_samp_weight_sum # If an there is an implict zero and it is not in classes and # class_prior, make an entry for it if 0 not in classes_k and y_nnz[k] < y.shape[0]: classes_k = np.insert(classes_k, 0, 0) class_prior_k = np.insert(class_prior_k, 0, zeros_samp_weight_sum) classes.append(classes_k) n_classes.append(classes_k.shape[0]) class_prior.append(class_prior_k / class_prior_k.sum()) else: for k in range(n_outputs): classes_k, y_k = np.unique(y[:, k], return_inverse=True) classes.append(classes_k) n_classes.append(classes_k.shape[0]) class_prior_k = bincount(y_k, weights=sample_weight) class_prior.append(class_prior_k / class_prior_k.sum()) return (classes, n_classes, class_prior)
bsd-3-clause
kagayakidan/scikit-learn
sklearn/cluster/tests/test_mean_shift.py
150
3651
""" Testing for mean shift clustering methods """ import numpy as np import warnings from sklearn.utils.testing import assert_equal from sklearn.utils.testing import assert_false from sklearn.utils.testing import assert_true from sklearn.utils.testing import assert_array_equal from sklearn.utils.testing import assert_raise_message from sklearn.cluster import MeanShift from sklearn.cluster import mean_shift from sklearn.cluster import estimate_bandwidth from sklearn.cluster import get_bin_seeds from sklearn.datasets.samples_generator import make_blobs n_clusters = 3 centers = np.array([[1, 1], [-1, -1], [1, -1]]) + 10 X, _ = make_blobs(n_samples=300, n_features=2, centers=centers, cluster_std=0.4, shuffle=True, random_state=11) def test_estimate_bandwidth(): # Test estimate_bandwidth bandwidth = estimate_bandwidth(X, n_samples=200) assert_true(0.9 <= bandwidth <= 1.5) def test_mean_shift(): # Test MeanShift algorithm bandwidth = 1.2 ms = MeanShift(bandwidth=bandwidth) labels = ms.fit(X).labels_ labels_unique = np.unique(labels) n_clusters_ = len(labels_unique) assert_equal(n_clusters_, n_clusters) cluster_centers, labels = mean_shift(X, bandwidth=bandwidth) labels_unique = np.unique(labels) n_clusters_ = len(labels_unique) assert_equal(n_clusters_, n_clusters) def test_parallel(): ms1 = MeanShift(n_jobs=2) ms1.fit(X) ms2 = MeanShift() ms2.fit(X) assert_array_equal(ms1.cluster_centers_,ms2.cluster_centers_) assert_array_equal(ms1.labels_,ms2.labels_) def test_meanshift_predict(): # Test MeanShift.predict ms = MeanShift(bandwidth=1.2) labels = ms.fit_predict(X) labels2 = ms.predict(X) assert_array_equal(labels, labels2) def test_meanshift_all_orphans(): # init away from the data, crash with a sensible warning ms = MeanShift(bandwidth=0.1, seeds=[[-9, -9], [-10, -10]]) msg = "No point was within bandwidth=0.1" assert_raise_message(ValueError, msg, ms.fit, X,) def test_unfitted(): # Non-regression: before fit, there should be not fitted attributes. ms = MeanShift() assert_false(hasattr(ms, "cluster_centers_")) assert_false(hasattr(ms, "labels_")) def test_bin_seeds(): # Test the bin seeding technique which can be used in the mean shift # algorithm # Data is just 6 points in the plane X = np.array([[1., 1.], [1.4, 1.4], [1.8, 1.2], [2., 1.], [2.1, 1.1], [0., 0.]]) # With a bin coarseness of 1.0 and min_bin_freq of 1, 3 bins should be # found ground_truth = set([(1., 1.), (2., 1.), (0., 0.)]) test_bins = get_bin_seeds(X, 1, 1) test_result = set([tuple(p) for p in test_bins]) assert_true(len(ground_truth.symmetric_difference(test_result)) == 0) # With a bin coarseness of 1.0 and min_bin_freq of 2, 2 bins should be # found ground_truth = set([(1., 1.), (2., 1.)]) test_bins = get_bin_seeds(X, 1, 2) test_result = set([tuple(p) for p in test_bins]) assert_true(len(ground_truth.symmetric_difference(test_result)) == 0) # With a bin size of 0.01 and min_bin_freq of 1, 6 bins should be found # we bail and use the whole data here. with warnings.catch_warnings(record=True): test_bins = get_bin_seeds(X, 0.01, 1) assert_array_equal(test_bins, X) # tight clusters around [0, 0] and [1, 1], only get two bins X, _ = make_blobs(n_samples=100, n_features=2, centers=[[0, 0], [1, 1]], cluster_std=0.1, random_state=0) test_bins = get_bin_seeds(X, 1) assert_array_equal(test_bins, [[0, 0], [1, 1]])
bsd-3-clause
juanprietob/ExtractMSLesion
py/sgd_fit_ms_wm.py
1
5045
from __future__ import print_function import matplotlib.pyplot as plt import numpy as np import os import sys import tarfile from IPython.display import display, Image from scipy import ndimage from sklearn import neighbors, linear_model from six.moves.urllib.request import urlretrieve from six.moves import cPickle as pickle from sklearn.decomposition import PCA from sklearn.cluster import KMeans import argparse parser = argparse.ArgumentParser() parser.add_argument('--pickle', help='Pickle file, check the script readImages to generate this file.', required=True) args = parser.parse_args() pickle_file = args.pickle f = open(pickle_file, 'rb') data = pickle.load(f) train_dataset = data["train_dataset"] train_labels = data["train_labels"] valid_dataset = data["valid_dataset"] valid_labels = data["valid_labels"] test_dataset = data["test_dataset"] test_labels = data["test_labels"] img_head = data["img_head"] img_size = img_head["sizes"] h = .02 # Reformat into a TensorFlow-friendly shape: # - convolutions need the image data formatted as a cube (width by height by #channels) # - labels as float 1-hot encodings. num_labels = 3 in_depth = img_size[3] #zdim in_height = img_size[2] #ydim in_width = img_size[1] #xdim num_channels = img_size[0] #num channels print('Training set', train_dataset.shape, train_labels.shape) print('Validation set', valid_dataset.shape, valid_labels.shape) print('Test set', test_dataset.shape, test_labels.shape) (samples, depth, height, width, num_channels) = train_dataset.shape train_dataset = np.reshape(train_dataset,(samples,depth*height*width*num_channels))[0:samples] (samples, depth, height, width, num_channels) = valid_dataset.shape valid_dataset = np.reshape(valid_dataset,(samples,depth*height*width*num_channels))[0:samples] (samples, depth, height, width, num_channels) = test_dataset.shape test_dataset = np.reshape(test_dataset,(samples,depth*height*width*num_channels))[0:samples] print('Training set reshaped', train_dataset.shape, train_labels.shape) print('Validation set reshaped', valid_dataset.shape, valid_labels.shape) print('Validation set reshaped', test_dataset.shape, test_labels.shape) basedir = os.path.dirname(pickle_file); filename = os.path.splitext(os.path.basename(pickle_file))[0] pickle_file = os.path.join(basedir, filename + "sgdfit.pickle") try: f = open(pickle_file, 'rb') data = pickle.load(f) sgdc = data['sgdc'] except Exception as e: print(e) sgdc = linear_model.SGDClassifier(n_iter=100, n_jobs=-1, loss='log') sgdc.fit(train_dataset, train_labels) try: f = open(pickle_file, 'wb') save = { 'sgdc': sgdc } pickle.dump(save, f, pickle.HIGHEST_PROTOCOL) f.close() except Exception as e: print('Unable to save data to', pickle_file, ':', e) raise print('SGDC test score: %f' % sgdc.score(test_dataset, test_labels)) print('SGDC validation score: %f' % sgdc.score(valid_dataset, valid_labels)) pickle_file = os.path.join(basedir, filename + 'pca.pickle') try: f = open(pickle_file, 'rb') data = pickle.load(f) reduced_data = data['pca'] except Exception as e: print('Decomposing data:') reduced_data = PCA(n_components=2).fit_transform(train_dataset) try: print('Saving data:') f = open(pickle_file, 'wb') save = { 'pca': reduced_data } pickle.dump(save, f, pickle.HIGHEST_PROTOCOL) f.close() except Exception as e: print('Unable to save data to', pickle_file, ':', e) raise pickle_file = os.path.join(basedir, filename + '_kmeans.pickle') try: f = open(pickle_file, 'rb') data = pickle.load(f) kmeans = data['kmeans'] except Exception as e: print('Clustering data:') kmeans = KMeans(init='k-means++', n_clusters=num_labels, n_init=10) kmeans.fit(reduced_data) try: f = open(pickle_file, 'wb') save = { 'kmeans': kmeans } pickle.dump(save, f, pickle.HIGHEST_PROTOCOL) f.close() except Exception as e: print('Unable to save data to', pickle_file, ':', e) raise # Plot the decision boundary. For that, we will assign a color to each x_min, x_max = reduced_data[:, 0].min() - 1, reduced_data[:, 0].max() + 1 y_min, y_max = reduced_data[:, 1].min() - 1, reduced_data[:, 1].max() + 1 plt.figure(1) plt.clf() # plt.imshow(reduced_data, interpolation='nearest', # extent=(x_min, x_max, y_min, y_max), # cmap=plt.cm.Paired, # aspect='auto', origin='lower') def getColor(l): if l == 0: return 'magenta' elif l == 1: return 'yellow' return 'cyan' colors = [getColor(l) for l in train_labels] plt.scatter(reduced_data[:, 0], reduced_data[:, 1], c=colors) #Plot the centroids as a white X centroids = kmeans.cluster_centers_ plt.scatter(centroids[:, 0], centroids[:, 1], marker='x', s=169, linewidths=3, c=['r','b'], zorder=10) #plt.title('K-means clustering on the digits dataset (PCA-reduced data) Centroids are marked with white cross') plt.xlim(x_min, x_max) plt.ylim(y_min, y_max) plt.xticks(()) plt.yticks(()) plt.show()
apache-2.0
NWine/trading-with-python
lib/cboe.py
76
4433
# -*- coding: utf-8 -*- """ toolset working with cboe data @author: Jev Kuznetsov Licence: BSD """ from datetime import datetime, date import urllib2 from pandas import DataFrame, Index from pandas.core import datetools import numpy as np import pandas as pd def monthCode(month): """ perform month->code and back conversion Input: either month nr (int) or month code (str) Returns: code or month nr """ codes = ('F','G','H','J','K','M','N','Q','U','V','X','Z') if isinstance(month,int): return codes[month-1] elif isinstance(month,str): return codes.index(month)+1 else: raise ValueError('Function accepts int or str') def vixExpiration(year,month): """ expriration date of a VX future """ t = datetime(year,month,1)+datetools.relativedelta(months=1) offset = datetools.Week(weekday=4) if t.weekday()<>4: t_new = t+3*offset else: t_new = t+2*offset t_exp = t_new-datetools.relativedelta(days=30) return t_exp def getPutCallRatio(): """ download current Put/Call ratio""" urlStr = 'http://www.cboe.com/publish/ScheduledTask/MktData/datahouse/totalpc.csv' try: lines = urllib2.urlopen(urlStr).readlines() except Exception, e: s = "Failed to download:\n{0}".format(e); print s headerLine = 2 header = lines[headerLine].strip().split(',') data = [[] for i in range(len(header))] for line in lines[(headerLine+1):]: fields = line.rstrip().split(',') data[0].append(datetime.strptime(fields[0],'%m/%d/%Y')) for i,field in enumerate(fields[1:]): data[i+1].append(float(field)) return DataFrame(dict(zip(header[1:],data[1:])), index = Index(data[0])) def getHistoricData(symbols = ['VIX','VXV','VXMT','VVIX']): ''' get historic data from CBOE return dataframe ''' if not isinstance(symbols,list): symbols = [symbols] urls = {'VIX':'http://www.cboe.com/publish/ScheduledTask/MktData/datahouse/vixcurrent.csv', 'VXV':'http://www.cboe.com/publish/scheduledtask/mktdata/datahouse/vxvdailyprices.csv', 'VXMT':'http://www.cboe.com/publish/ScheduledTask/MktData/datahouse/vxmtdailyprices.csv', 'VVIX':'http://www.cboe.com/publish/scheduledtask/mktdata/datahouse/VVIXtimeseries.csv'} startLines = {'VIX':1,'VXV':2,'VXMT':2,'VVIX':1} cols = {'VIX':'VIX Close','VXV':'CLOSE','VXMT':'Close','VVIX':'VVIX'} data = {} for symbol in symbols: urlStr = urls[symbol] print 'Downloading %s from %s' % (symbol,urlStr) data[symbol] = pd.read_csv(urllib2.urlopen(urlStr), header=startLines[symbol],index_col=0,parse_dates=True)[cols[symbol]] return pd.DataFrame(data) #---------------------classes-------------------------------------------- class VixFuture(object): """ Class for easy handling of futures data. """ def __init__(self,year,month): self.year = year self.month = month def expirationDate(self): return vixExpiration(self.year,self.month) def daysLeft(self,date): """ business days to expiration date """ from pandas import DateRange # this will cause a problem with pandas 0.14 and higher... Method is depreciated and replaced by DatetimeIndex r = DateRange(date,self.expirationDate()) return len(r) def __repr__(self): return 'VX future [%i-%i %s] Exprires: %s' % (self.year,self.month,monthCode(self.month), self.expirationDate()) #-------------------test functions--------------------------------------- def testDownload(): vix = getHistoricData('VIX') vxv = getHistoricData('VXV') vix.plot() vxv.plot() def testExpiration(): for month in xrange(1,13): d = vixExpiration(2011,month) print d.strftime("%B, %d %Y (%A)") if __name__ == '__main__': #testExpiration() v = VixFuture(2011,11) print v print v.daysLeft(datetime(2011,11,10))
bsd-3-clause
karstenw/nodebox-pyobjc
examples/Extended Application/matplotlib/examples/api/quad_bezier.py
1
1362
""" ============ Bezier Curve ============ This example showcases the PathPatch object to create a Bezier polycurve path patch. """ import matplotlib.path as mpath import matplotlib.patches as mpatches import matplotlib.pyplot as plt # nodebox section if __name__ == '__builtin__': # were in nodebox import os import tempfile W = 800 inset = 20 size(W, 600) plt.cla() plt.clf() plt.close('all') def tempimage(): fob = tempfile.NamedTemporaryFile(mode='w+b', suffix='.png', delete=False) fname = fob.name fob.close() return fname imgx = 20 imgy = 0 def pltshow(plt, dpi=150): global imgx, imgy temppath = tempimage() plt.savefig(temppath, dpi=dpi) dx,dy = imagesize(temppath) w = min(W,dx) image(temppath,imgx,imgy,width=w) imgy = imgy + dy + 20 os.remove(temppath) size(W, HEIGHT+dy+40) else: def pltshow(mplpyplot): mplpyplot.show() # nodebox section end Path = mpath.Path fig, ax = plt.subplots() pp1 = mpatches.PathPatch( Path([(0, 0), (1, 0), (1, 1), (0, 0)], [Path.MOVETO, Path.CURVE3, Path.CURVE3, Path.CLOSEPOLY]), fc="none", transform=ax.transData) ax.add_patch(pp1) ax.plot([0.75], [0.25], "ro") ax.set_title('The red point should be on the path') pltshow(plt)
mit
shenzebang/scikit-learn
sklearn/feature_extraction/image.py
263
17600
""" The :mod:`sklearn.feature_extraction.image` submodule gathers utilities to extract features from images. """ # Authors: Emmanuelle Gouillart <[email protected]> # Gael Varoquaux <[email protected]> # Olivier Grisel # Vlad Niculae # License: BSD 3 clause from itertools import product import numbers import numpy as np from scipy import sparse from numpy.lib.stride_tricks import as_strided from ..utils import check_array, check_random_state from ..utils.fixes import astype from ..base import BaseEstimator __all__ = ['PatchExtractor', 'extract_patches_2d', 'grid_to_graph', 'img_to_graph', 'reconstruct_from_patches_2d'] ############################################################################### # From an image to a graph def _make_edges_3d(n_x, n_y, n_z=1): """Returns a list of edges for a 3D image. Parameters =========== n_x: integer The size of the grid in the x direction. n_y: integer The size of the grid in the y direction. n_z: integer, optional The size of the grid in the z direction, defaults to 1 """ vertices = np.arange(n_x * n_y * n_z).reshape((n_x, n_y, n_z)) edges_deep = np.vstack((vertices[:, :, :-1].ravel(), vertices[:, :, 1:].ravel())) edges_right = np.vstack((vertices[:, :-1].ravel(), vertices[:, 1:].ravel())) edges_down = np.vstack((vertices[:-1].ravel(), vertices[1:].ravel())) edges = np.hstack((edges_deep, edges_right, edges_down)) return edges def _compute_gradient_3d(edges, img): n_x, n_y, n_z = img.shape gradient = np.abs(img[edges[0] // (n_y * n_z), (edges[0] % (n_y * n_z)) // n_z, (edges[0] % (n_y * n_z)) % n_z] - img[edges[1] // (n_y * n_z), (edges[1] % (n_y * n_z)) // n_z, (edges[1] % (n_y * n_z)) % n_z]) return gradient # XXX: Why mask the image after computing the weights? def _mask_edges_weights(mask, edges, weights=None): """Apply a mask to edges (weighted or not)""" inds = np.arange(mask.size) inds = inds[mask.ravel()] ind_mask = np.logical_and(np.in1d(edges[0], inds), np.in1d(edges[1], inds)) edges = edges[:, ind_mask] if weights is not None: weights = weights[ind_mask] if len(edges.ravel()): maxval = edges.max() else: maxval = 0 order = np.searchsorted(np.unique(edges.ravel()), np.arange(maxval + 1)) edges = order[edges] if weights is None: return edges else: return edges, weights def _to_graph(n_x, n_y, n_z, mask=None, img=None, return_as=sparse.coo_matrix, dtype=None): """Auxiliary function for img_to_graph and grid_to_graph """ edges = _make_edges_3d(n_x, n_y, n_z) if dtype is None: if img is None: dtype = np.int else: dtype = img.dtype if img is not None: img = np.atleast_3d(img) weights = _compute_gradient_3d(edges, img) if mask is not None: edges, weights = _mask_edges_weights(mask, edges, weights) diag = img.squeeze()[mask] else: diag = img.ravel() n_voxels = diag.size else: if mask is not None: mask = astype(mask, dtype=np.bool, copy=False) mask = np.asarray(mask, dtype=np.bool) edges = _mask_edges_weights(mask, edges) n_voxels = np.sum(mask) else: n_voxels = n_x * n_y * n_z weights = np.ones(edges.shape[1], dtype=dtype) diag = np.ones(n_voxels, dtype=dtype) diag_idx = np.arange(n_voxels) i_idx = np.hstack((edges[0], edges[1])) j_idx = np.hstack((edges[1], edges[0])) graph = sparse.coo_matrix((np.hstack((weights, weights, diag)), (np.hstack((i_idx, diag_idx)), np.hstack((j_idx, diag_idx)))), (n_voxels, n_voxels), dtype=dtype) if return_as is np.ndarray: return graph.toarray() return return_as(graph) def img_to_graph(img, mask=None, return_as=sparse.coo_matrix, dtype=None): """Graph of the pixel-to-pixel gradient connections Edges are weighted with the gradient values. Read more in the :ref:`User Guide <image_feature_extraction>`. Parameters ---------- img : ndarray, 2D or 3D 2D or 3D image mask : ndarray of booleans, optional An optional mask of the image, to consider only part of the pixels. return_as : np.ndarray or a sparse matrix class, optional The class to use to build the returned adjacency matrix. dtype : None or dtype, optional The data of the returned sparse matrix. By default it is the dtype of img Notes ----- For sklearn versions 0.14.1 and prior, return_as=np.ndarray was handled by returning a dense np.matrix instance. Going forward, np.ndarray returns an np.ndarray, as expected. For compatibility, user code relying on this method should wrap its calls in ``np.asarray`` to avoid type issues. """ img = np.atleast_3d(img) n_x, n_y, n_z = img.shape return _to_graph(n_x, n_y, n_z, mask, img, return_as, dtype) def grid_to_graph(n_x, n_y, n_z=1, mask=None, return_as=sparse.coo_matrix, dtype=np.int): """Graph of the pixel-to-pixel connections Edges exist if 2 voxels are connected. Parameters ---------- n_x : int Dimension in x axis n_y : int Dimension in y axis n_z : int, optional, default 1 Dimension in z axis mask : ndarray of booleans, optional An optional mask of the image, to consider only part of the pixels. return_as : np.ndarray or a sparse matrix class, optional The class to use to build the returned adjacency matrix. dtype : dtype, optional, default int The data of the returned sparse matrix. By default it is int Notes ----- For sklearn versions 0.14.1 and prior, return_as=np.ndarray was handled by returning a dense np.matrix instance. Going forward, np.ndarray returns an np.ndarray, as expected. For compatibility, user code relying on this method should wrap its calls in ``np.asarray`` to avoid type issues. """ return _to_graph(n_x, n_y, n_z, mask=mask, return_as=return_as, dtype=dtype) ############################################################################### # From an image to a set of small image patches def _compute_n_patches(i_h, i_w, p_h, p_w, max_patches=None): """Compute the number of patches that will be extracted in an image. Read more in the :ref:`User Guide <image_feature_extraction>`. Parameters ---------- i_h : int The image height i_w : int The image with p_h : int The height of a patch p_w : int The width of a patch max_patches : integer or float, optional default is None The maximum number of patches to extract. If max_patches is a float between 0 and 1, it is taken to be a proportion of the total number of patches. """ n_h = i_h - p_h + 1 n_w = i_w - p_w + 1 all_patches = n_h * n_w if max_patches: if (isinstance(max_patches, (numbers.Integral)) and max_patches < all_patches): return max_patches elif (isinstance(max_patches, (numbers.Real)) and 0 < max_patches < 1): return int(max_patches * all_patches) else: raise ValueError("Invalid value for max_patches: %r" % max_patches) else: return all_patches def extract_patches(arr, patch_shape=8, extraction_step=1): """Extracts patches of any n-dimensional array in place using strides. Given an n-dimensional array it will return a 2n-dimensional array with the first n dimensions indexing patch position and the last n indexing the patch content. This operation is immediate (O(1)). A reshape performed on the first n dimensions will cause numpy to copy data, leading to a list of extracted patches. Read more in the :ref:`User Guide <image_feature_extraction>`. Parameters ---------- arr : ndarray n-dimensional array of which patches are to be extracted patch_shape : integer or tuple of length arr.ndim Indicates the shape of the patches to be extracted. If an integer is given, the shape will be a hypercube of sidelength given by its value. extraction_step : integer or tuple of length arr.ndim Indicates step size at which extraction shall be performed. If integer is given, then the step is uniform in all dimensions. Returns ------- patches : strided ndarray 2n-dimensional array indexing patches on first n dimensions and containing patches on the last n dimensions. These dimensions are fake, but this way no data is copied. A simple reshape invokes a copying operation to obtain a list of patches: result.reshape([-1] + list(patch_shape)) """ arr_ndim = arr.ndim if isinstance(patch_shape, numbers.Number): patch_shape = tuple([patch_shape] * arr_ndim) if isinstance(extraction_step, numbers.Number): extraction_step = tuple([extraction_step] * arr_ndim) patch_strides = arr.strides slices = [slice(None, None, st) for st in extraction_step] indexing_strides = arr[slices].strides patch_indices_shape = ((np.array(arr.shape) - np.array(patch_shape)) // np.array(extraction_step)) + 1 shape = tuple(list(patch_indices_shape) + list(patch_shape)) strides = tuple(list(indexing_strides) + list(patch_strides)) patches = as_strided(arr, shape=shape, strides=strides) return patches def extract_patches_2d(image, patch_size, max_patches=None, random_state=None): """Reshape a 2D image into a collection of patches The resulting patches are allocated in a dedicated array. Read more in the :ref:`User Guide <image_feature_extraction>`. Parameters ---------- image : array, shape = (image_height, image_width) or (image_height, image_width, n_channels) The original image data. For color images, the last dimension specifies the channel: a RGB image would have `n_channels=3`. patch_size : tuple of ints (patch_height, patch_width) the dimensions of one patch max_patches : integer or float, optional default is None The maximum number of patches to extract. If max_patches is a float between 0 and 1, it is taken to be a proportion of the total number of patches. random_state : int or RandomState Pseudo number generator state used for random sampling to use if `max_patches` is not None. Returns ------- patches : array, shape = (n_patches, patch_height, patch_width) or (n_patches, patch_height, patch_width, n_channels) The collection of patches extracted from the image, where `n_patches` is either `max_patches` or the total number of patches that can be extracted. Examples -------- >>> from sklearn.feature_extraction import image >>> one_image = np.arange(16).reshape((4, 4)) >>> one_image array([[ 0, 1, 2, 3], [ 4, 5, 6, 7], [ 8, 9, 10, 11], [12, 13, 14, 15]]) >>> patches = image.extract_patches_2d(one_image, (2, 2)) >>> print(patches.shape) (9, 2, 2) >>> patches[0] array([[0, 1], [4, 5]]) >>> patches[1] array([[1, 2], [5, 6]]) >>> patches[8] array([[10, 11], [14, 15]]) """ i_h, i_w = image.shape[:2] p_h, p_w = patch_size if p_h > i_h: raise ValueError("Height of the patch should be less than the height" " of the image.") if p_w > i_w: raise ValueError("Width of the patch should be less than the width" " of the image.") image = check_array(image, allow_nd=True) image = image.reshape((i_h, i_w, -1)) n_colors = image.shape[-1] extracted_patches = extract_patches(image, patch_shape=(p_h, p_w, n_colors), extraction_step=1) n_patches = _compute_n_patches(i_h, i_w, p_h, p_w, max_patches) if max_patches: rng = check_random_state(random_state) i_s = rng.randint(i_h - p_h + 1, size=n_patches) j_s = rng.randint(i_w - p_w + 1, size=n_patches) patches = extracted_patches[i_s, j_s, 0] else: patches = extracted_patches patches = patches.reshape(-1, p_h, p_w, n_colors) # remove the color dimension if useless if patches.shape[-1] == 1: return patches.reshape((n_patches, p_h, p_w)) else: return patches def reconstruct_from_patches_2d(patches, image_size): """Reconstruct the image from all of its patches. Patches are assumed to overlap and the image is constructed by filling in the patches from left to right, top to bottom, averaging the overlapping regions. Read more in the :ref:`User Guide <image_feature_extraction>`. Parameters ---------- patches : array, shape = (n_patches, patch_height, patch_width) or (n_patches, patch_height, patch_width, n_channels) The complete set of patches. If the patches contain colour information, channels are indexed along the last dimension: RGB patches would have `n_channels=3`. image_size : tuple of ints (image_height, image_width) or (image_height, image_width, n_channels) the size of the image that will be reconstructed Returns ------- image : array, shape = image_size the reconstructed image """ i_h, i_w = image_size[:2] p_h, p_w = patches.shape[1:3] img = np.zeros(image_size) # compute the dimensions of the patches array n_h = i_h - p_h + 1 n_w = i_w - p_w + 1 for p, (i, j) in zip(patches, product(range(n_h), range(n_w))): img[i:i + p_h, j:j + p_w] += p for i in range(i_h): for j in range(i_w): # divide by the amount of overlap # XXX: is this the most efficient way? memory-wise yes, cpu wise? img[i, j] /= float(min(i + 1, p_h, i_h - i) * min(j + 1, p_w, i_w - j)) return img class PatchExtractor(BaseEstimator): """Extracts patches from a collection of images Read more in the :ref:`User Guide <image_feature_extraction>`. Parameters ---------- patch_size : tuple of ints (patch_height, patch_width) the dimensions of one patch max_patches : integer or float, optional default is None The maximum number of patches per image to extract. If max_patches is a float in (0, 1), it is taken to mean a proportion of the total number of patches. random_state : int or RandomState Pseudo number generator state used for random sampling. """ def __init__(self, patch_size=None, max_patches=None, random_state=None): self.patch_size = patch_size self.max_patches = max_patches self.random_state = random_state def fit(self, X, y=None): """Do nothing and return the estimator unchanged This method is just there to implement the usual API and hence work in pipelines. """ return self def transform(self, X): """Transforms the image samples in X into a matrix of patch data. Parameters ---------- X : array, shape = (n_samples, image_height, image_width) or (n_samples, image_height, image_width, n_channels) Array of images from which to extract patches. For color images, the last dimension specifies the channel: a RGB image would have `n_channels=3`. Returns ------- patches: array, shape = (n_patches, patch_height, patch_width) or (n_patches, patch_height, patch_width, n_channels) The collection of patches extracted from the images, where `n_patches` is either `n_samples * max_patches` or the total number of patches that can be extracted. """ self.random_state = check_random_state(self.random_state) n_images, i_h, i_w = X.shape[:3] X = np.reshape(X, (n_images, i_h, i_w, -1)) n_channels = X.shape[-1] if self.patch_size is None: patch_size = i_h // 10, i_w // 10 else: patch_size = self.patch_size # compute the dimensions of the patches array p_h, p_w = patch_size n_patches = _compute_n_patches(i_h, i_w, p_h, p_w, self.max_patches) patches_shape = (n_images * n_patches,) + patch_size if n_channels > 1: patches_shape += (n_channels,) # extract the patches patches = np.empty(patches_shape) for ii, image in enumerate(X): patches[ii * n_patches:(ii + 1) * n_patches] = extract_patches_2d( image, patch_size, self.max_patches, self.random_state) return patches
bsd-3-clause
blockstack/pybitcoin
pybitcoin/passphrases/english_words.py
2
753213
# -*- coding: utf-8 -*- """ pybitcoin ~~~~~ :copyright: (c) 2014 by Halfmoon Labs :license: MIT, see LICENSE for more details. """ # 2048 words english_words_bip39 = 'abandon,ability,able,about,above,absent,absorb,abstract,absurd,abuse,access,accident,account,accuse,achieve,acid,acoustic,acquire,across,act,action,actor,actress,actual,adapt,add,addict,address,adjust,admit,adult,advance,advice,aerobic,affair,afford,afraid,again,age,agent,agree,ahead,aim,air,airport,aisle,alarm,album,alcohol,alert,alien,all,alley,allow,almost,alone,alpha,already,also,alter,always,amateur,amazing,among,amount,amused,analyst,anchor,ancient,anger,angle,angry,animal,ankle,announce,annual,another,answer,antenna,antique,anxiety,any,apart,apology,appear,apple,approve,april,arch,arctic,area,arena,argue,arm,armed,armor,army,around,arrange,arrest,arrive,arrow,art,artefact,artist,artwork,ask,aspect,assault,asset,assist,assume,asthma,athlete,atom,attack,attend,attitude,attract,auction,audit,august,aunt,author,auto,autumn,average,avocado,avoid,awake,aware,away,awesome,awful,awkward,axis,baby,bachelor,bacon,badge,bag,balance,balcony,ball,bamboo,banana,banner,bar,barely,bargain,barrel,base,basic,basket,battle,beach,bean,beauty,because,become,beef,before,begin,behave,behind,believe,below,belt,bench,benefit,best,betray,better,between,beyond,bicycle,bid,bike,bind,biology,bird,birth,bitter,black,blade,blame,blanket,blast,bleak,bless,blind,blood,blossom,blouse,blue,blur,blush,board,boat,body,boil,bomb,bone,bonus,book,boost,border,boring,borrow,boss,bottom,bounce,box,boy,bracket,brain,brand,brass,brave,bread,breeze,brick,bridge,brief,bright,bring,brisk,broccoli,broken,bronze,broom,brother,brown,brush,bubble,buddy,budget,buffalo,build,bulb,bulk,bullet,bundle,bunker,burden,burger,burst,bus,business,busy,butter,buyer,buzz,cabbage,cabin,cable,cactus,cage,cake,call,calm,camera,camp,can,canal,cancel,candy,cannon,canoe,canvas,canyon,capable,capital,captain,car,carbon,card,cargo,carpet,carry,cart,case,cash,casino,castle,casual,cat,catalog,catch,category,cattle,caught,cause,caution,cave,ceiling,celery,cement,census,century,cereal,certain,chair,chalk,champion,change,chaos,chapter,charge,chase,chat,cheap,check,cheese,chef,cherry,chest,chicken,chief,child,chimney,choice,choose,chronic,chuckle,chunk,churn,cigar,cinnamon,circle,citizen,city,civil,claim,clap,clarify,claw,clay,clean,clerk,clever,click,client,cliff,climb,clinic,clip,clock,clog,close,cloth,cloud,clown,club,clump,cluster,clutch,coach,coast,coconut,code,coffee,coil,coin,collect,color,column,combine,come,comfort,comic,common,company,concert,conduct,confirm,congress,connect,consider,control,convince,cook,cool,copper,copy,coral,core,corn,correct,cost,cotton,couch,country,couple,course,cousin,cover,coyote,crack,cradle,craft,cram,crane,crash,crater,crawl,crazy,cream,credit,creek,crew,cricket,crime,crisp,critic,crop,cross,crouch,crowd,crucial,cruel,cruise,crumble,crunch,crush,cry,crystal,cube,culture,cup,cupboard,curious,current,curtain,curve,cushion,custom,cute,cycle,dad,damage,damp,dance,danger,daring,dash,daughter,dawn,day,deal,debate,debris,decade,december,decide,decline,decorate,decrease,deer,defense,define,defy,degree,delay,deliver,demand,demise,denial,dentist,deny,depart,depend,deposit,depth,deputy,derive,describe,desert,design,desk,despair,destroy,detail,detect,develop,device,devote,diagram,dial,diamond,diary,dice,diesel,diet,differ,digital,dignity,dilemma,dinner,dinosaur,direct,dirt,disagree,discover,disease,dish,dismiss,disorder,display,distance,divert,divide,divorce,dizzy,doctor,document,dog,doll,dolphin,domain,donate,donkey,donor,door,dose,double,dove,draft,dragon,drama,drastic,draw,dream,dress,drift,drill,drink,drip,drive,drop,drum,dry,duck,dumb,dune,during,dust,dutch,duty,dwarf,dynamic,eager,eagle,early,earn,earth,easily,east,easy,echo,ecology,economy,edge,edit,educate,effort,egg,eight,either,elbow,elder,electric,elegant,element,elephant,elevator,elite,else,embark,embody,embrace,emerge,emotion,employ,empower,empty,enable,enact,end,endless,endorse,enemy,energy,enforce,engage,engine,enhance,enjoy,enlist,enough,enrich,enroll,ensure,enter,entire,entry,envelope,episode,equal,equip,era,erase,erode,erosion,error,erupt,escape,essay,essence,estate,eternal,ethics,evidence,evil,evoke,evolve,exact,example,excess,exchange,excite,exclude,excuse,execute,exercise,exhaust,exhibit,exile,exist,exit,exotic,expand,expect,expire,explain,expose,express,extend,extra,eye,eyebrow,fabric,face,faculty,fade,faint,faith,fall,false,fame,family,famous,fan,fancy,fantasy,farm,fashion,fat,fatal,father,fatigue,fault,favorite,feature,february,federal,fee,feed,feel,female,fence,festival,fetch,fever,few,fiber,fiction,field,figure,file,film,filter,final,find,fine,finger,finish,fire,firm,first,fiscal,fish,fit,fitness,fix,flag,flame,flash,flat,flavor,flee,flight,flip,float,flock,floor,flower,fluid,flush,fly,foam,focus,fog,foil,fold,follow,food,foot,force,forest,forget,fork,fortune,forum,forward,fossil,foster,found,fox,fragile,frame,frequent,fresh,friend,fringe,frog,front,frost,frown,frozen,fruit,fuel,fun,funny,furnace,fury,future,gadget,gain,galaxy,gallery,game,gap,garage,garbage,garden,garlic,garment,gas,gasp,gate,gather,gauge,gaze,general,genius,genre,gentle,genuine,gesture,ghost,giant,gift,giggle,ginger,giraffe,girl,give,glad,glance,glare,glass,glide,glimpse,globe,gloom,glory,glove,glow,glue,goat,goddess,gold,good,goose,gorilla,gospel,gossip,govern,gown,grab,grace,grain,grant,grape,grass,gravity,great,green,grid,grief,grit,grocery,group,grow,grunt,guard,guess,guide,guilt,guitar,gun,gym,habit,hair,half,hammer,hamster,hand,happy,harbor,hard,harsh,harvest,hat,have,hawk,hazard,head,health,heart,heavy,hedgehog,height,hello,helmet,help,hen,hero,hidden,high,hill,hint,hip,hire,history,hobby,hockey,hold,hole,holiday,hollow,home,honey,hood,hope,horn,horror,horse,hospital,host,hotel,hour,hover,hub,huge,human,humble,humor,hundred,hungry,hunt,hurdle,hurry,hurt,husband,hybrid,ice,icon,idea,identify,idle,ignore,ill,illegal,illness,image,imitate,immense,immune,impact,impose,improve,impulse,inch,include,income,increase,index,indicate,indoor,industry,infant,inflict,inform,inhale,inherit,initial,inject,injury,inmate,inner,innocent,input,inquiry,insane,insect,inside,inspire,install,intact,interest,into,invest,invite,involve,iron,island,isolate,issue,item,ivory,jacket,jaguar,jar,jazz,jealous,jeans,jelly,jewel,job,join,joke,journey,joy,judge,juice,jump,jungle,junior,junk,just,kangaroo,keen,keep,ketchup,key,kick,kid,kidney,kind,kingdom,kiss,kit,kitchen,kite,kitten,kiwi,knee,knife,knock,know,lab,label,labor,ladder,lady,lake,lamp,language,laptop,large,later,latin,laugh,laundry,lava,law,lawn,lawsuit,layer,lazy,leader,leaf,learn,leave,lecture,left,leg,legal,legend,leisure,lemon,lend,length,lens,leopard,lesson,letter,level,liar,liberty,library,license,life,lift,light,like,limb,limit,link,lion,liquid,list,little,live,lizard,load,loan,lobster,local,lock,logic,lonely,long,loop,lottery,loud,lounge,love,loyal,lucky,luggage,lumber,lunar,lunch,luxury,lyrics,machine,mad,magic,magnet,maid,mail,main,major,make,mammal,man,manage,mandate,mango,mansion,manual,maple,marble,march,margin,marine,market,marriage,mask,mass,master,match,material,math,matrix,matter,maximum,maze,meadow,mean,measure,meat,mechanic,medal,media,melody,melt,member,memory,mention,menu,mercy,merge,merit,merry,mesh,message,metal,method,middle,midnight,milk,million,mimic,mind,minimum,minor,minute,miracle,mirror,misery,miss,mistake,mix,mixed,mixture,mobile,model,modify,mom,moment,monitor,monkey,monster,month,moon,moral,more,morning,mosquito,mother,motion,motor,mountain,mouse,move,movie,much,muffin,mule,multiply,muscle,museum,mushroom,music,must,mutual,myself,mystery,myth,naive,name,napkin,narrow,nasty,nation,nature,near,neck,need,negative,neglect,neither,nephew,nerve,nest,net,network,neutral,never,news,next,nice,night,noble,noise,nominee,noodle,normal,north,nose,notable,note,nothing,notice,novel,now,nuclear,number,nurse,nut,oak,obey,object,oblige,obscure,observe,obtain,obvious,occur,ocean,october,odor,off,offer,office,often,oil,okay,old,olive,olympic,omit,once,one,onion,online,only,open,opera,opinion,oppose,option,orange,orbit,orchard,order,ordinary,organ,orient,original,orphan,ostrich,other,outdoor,outer,output,outside,oval,oven,over,own,owner,oxygen,oyster,ozone,pact,paddle,page,pair,palace,palm,panda,panel,panic,panther,paper,parade,parent,park,parrot,party,pass,patch,path,patient,patrol,pattern,pause,pave,payment,peace,peanut,pear,peasant,pelican,pen,penalty,pencil,people,pepper,perfect,permit,person,pet,phone,photo,phrase,physical,piano,picnic,picture,piece,pig,pigeon,pill,pilot,pink,pioneer,pipe,pistol,pitch,pizza,place,planet,plastic,plate,play,please,pledge,pluck,plug,plunge,poem,poet,point,polar,pole,police,pond,pony,pool,popular,portion,position,possible,post,potato,pottery,poverty,powder,power,practice,praise,predict,prefer,prepare,present,pretty,prevent,price,pride,primary,print,priority,prison,private,prize,problem,process,produce,profit,program,project,promote,proof,property,prosper,protect,proud,provide,public,pudding,pull,pulp,pulse,pumpkin,punch,pupil,puppy,purchase,purity,purpose,purse,push,put,puzzle,pyramid,quality,quantum,quarter,question,quick,quit,quiz,quote,rabbit,raccoon,race,rack,radar,radio,rail,rain,raise,rally,ramp,ranch,random,range,rapid,rare,rate,rather,raven,raw,razor,ready,real,reason,rebel,rebuild,recall,receive,recipe,record,recycle,reduce,reflect,reform,refuse,region,regret,regular,reject,relax,release,relief,rely,remain,remember,remind,remove,render,renew,rent,reopen,repair,repeat,replace,report,require,rescue,resemble,resist,resource,response,result,retire,retreat,return,reunion,reveal,review,reward,rhythm,rib,ribbon,rice,rich,ride,ridge,rifle,right,rigid,ring,riot,ripple,risk,ritual,rival,river,road,roast,robot,robust,rocket,romance,roof,rookie,room,rose,rotate,rough,round,route,royal,rubber,rude,rug,rule,run,runway,rural,sad,saddle,sadness,safe,sail,salad,salmon,salon,salt,salute,same,sample,sand,satisfy,satoshi,sauce,sausage,save,say,scale,scan,scare,scatter,scene,scheme,school,science,scissors,scorpion,scout,scrap,screen,script,scrub,sea,search,season,seat,second,secret,section,security,seed,seek,segment,select,sell,seminar,senior,sense,sentence,series,service,session,settle,setup,seven,shadow,shaft,shallow,share,shed,shell,sheriff,shield,shift,shine,ship,shiver,shock,shoe,shoot,shop,short,shoulder,shove,shrimp,shrug,shuffle,shy,sibling,sick,side,siege,sight,sign,silent,silk,silly,silver,similar,simple,since,sing,siren,sister,situate,six,size,skate,sketch,ski,skill,skin,skirt,skull,slab,slam,sleep,slender,slice,slide,slight,slim,slogan,slot,slow,slush,small,smart,smile,smoke,smooth,snack,snake,snap,sniff,snow,soap,soccer,social,sock,soda,soft,solar,soldier,solid,solution,solve,someone,song,soon,sorry,sort,soul,sound,soup,source,south,space,spare,spatial,spawn,speak,special,speed,spell,spend,sphere,spice,spider,spike,spin,spirit,split,spoil,sponsor,spoon,sport,spot,spray,spread,spring,spy,square,squeeze,squirrel,stable,stadium,staff,stage,stairs,stamp,stand,start,state,stay,steak,steel,stem,step,stereo,stick,still,sting,stock,stomach,stone,stool,story,stove,strategy,street,strike,strong,struggle,student,stuff,stumble,style,subject,submit,subway,success,such,sudden,suffer,sugar,suggest,suit,summer,sun,sunny,sunset,super,supply,supreme,sure,surface,surge,surprise,surround,survey,suspect,sustain,swallow,swamp,swap,swarm,swear,sweet,swift,swim,swing,switch,sword,symbol,symptom,syrup,system,table,tackle,tag,tail,talent,talk,tank,tape,target,task,taste,tattoo,taxi,teach,team,tell,ten,tenant,tennis,tent,term,test,text,thank,that,theme,then,theory,there,they,thing,this,thought,three,thrive,throw,thumb,thunder,ticket,tide,tiger,tilt,timber,time,tiny,tip,tired,tissue,title,toast,tobacco,today,toddler,toe,together,toilet,token,tomato,tomorrow,tone,tongue,tonight,tool,tooth,top,topic,topple,torch,tornado,tortoise,toss,total,tourist,toward,tower,town,toy,track,trade,traffic,tragic,train,transfer,trap,trash,travel,tray,treat,tree,trend,trial,tribe,trick,trigger,trim,trip,trophy,trouble,truck,true,truly,trumpet,trust,truth,try,tube,tuition,tumble,tuna,tunnel,turkey,turn,turtle,twelve,twenty,twice,twin,twist,two,type,typical,ugly,umbrella,unable,unaware,uncle,uncover,under,undo,unfair,unfold,unhappy,uniform,unique,unit,universe,unknown,unlock,until,unusual,unveil,update,upgrade,uphold,upon,upper,upset,urban,urge,usage,use,used,useful,useless,usual,utility,vacant,vacuum,vague,valid,valley,valve,van,vanish,vapor,various,vast,vault,vehicle,velvet,vendor,venture,venue,verb,verify,version,very,vessel,veteran,viable,vibrant,vicious,victory,video,view,village,vintage,violin,virtual,virus,visa,visit,visual,vital,vivid,vocal,voice,void,volcano,volume,vote,voyage,wage,wagon,wait,walk,wall,walnut,want,warfare,warm,warrior,wash,wasp,waste,water,wave,way,wealth,weapon,wear,weasel,weather,web,wedding,weekend,weird,welcome,west,wet,whale,what,wheat,wheel,when,where,whip,whisper,wide,width,wife,wild,will,win,window,wine,wing,wink,winner,winter,wire,wisdom,wise,wish,witness,wolf,woman,wonder,wood,wool,word,work,world,worry,worth,wrap,wreck,wrestle,wrist,write,wrong,yard,year,yellow,you,young,youth,zebra,zero,zone,zoo' # 25202 words english_words_wiktionary = 'aardvark,aargh,aback,abacus,abandon,abandoned,abandoning,abandonment,abandons,abba,abbey,abbot,abbots,abdomen,abdominal,abduct,abducted,abducting,abduction,abductions,aberration,abetted,abide,abiding,abigail,abilities,ability,ablaze,able,abnormal,abnormalities,abnormality,abnormally,aboard,abode,abolish,abominable,abomination,aboriginal,abort,abortion,abortions,abound,about,above,aboveboard,abrasive,abreast,abroad,abrupt,abruptly,abs,absconded,absence,absences,absent,absentee,absolute,absolutely,absolutes,absolution,absolved,absorb,absorbed,absorbent,absorbing,absorption,abstain,abstinence,abstract,absurd,absurdity,absurdly,abundance,abundant,abundantly,abuse,abused,abuser,abusing,abusive,abut,abysmal,academia,academic,academics,academy,accelerant,accelerate,accelerated,accelerating,acceleration,accent,accents,accept,acceptable,acceptance,accepted,accepting,accepts,access,accessible,accessing,accessories,accessory,accident,accidental,accidentally,accidently,accidents,acclaim,acclimate,acclimated,accolades,accommodate,accommodating,accommodation,accommodations,accompanied,accompany,accompanying,accomplice,accomplices,accomplish,accomplished,accomplishing,accomplishment,accomplishments,accord,accordance,accorded,according,accordingly,account,accountability,accountable,accountant,accountants,accounted,accounting,accounts,accoutrements,accumulate,accumulated,accumulation,accuracy,accurate,accurately,accursed,accusation,accusations,accuse,accused,accuser,accusers,accuses,accusing,accustomed,ace,aced,aces,acetate,ache,ached,aches,achieve,achieved,achievement,achieving,aching,achingly,acid,acids,acing,acknowledge,acknowledged,acknowledgement,acknowledges,acme,acne,acorn,acoustic,acoustics,acquaint,acquaintance,acquaintances,acquainted,acquire,acquired,acquisition,acquisitions,acquitted,acre,acres,acrobat,across,act,acted,actin,acting,action,actionable,actions,activate,activated,activating,activation,activators,active,actively,activists,activities,activity,actor,actors,actress,actresses,acts,actual,actuality,actualization,actually,actuarial,acupuncture,acupuncturist,acute,ad,adage,adamant,adapt,adaptable,adapted,adapting,add,added,addendum,addict,addicted,addiction,addictive,addicts,adding,addition,additional,additions,additives,addled,address,addressed,addresses,addressing,adds,adenoids,adept,adequate,adequately,adhere,adherence,adhesive,adjacent,adjective,adjectives,adjourn,adjourned,adjust,adjustable,adjusted,adjusting,adjustment,adjustments,adjutant,administer,administered,administering,administration,administrative,administrator,administrators,admirable,admirably,admiral,admiration,admire,admired,admirer,admirers,admires,admiring,admission,admissions,admit,admits,admittance,admitted,admittedly,admitting,admonish,admonished,admonition,ado,adobe,adolescence,adolescent,adolescents,adopt,adopted,adopting,adoption,adoptive,adorable,adoration,adore,adored,adores,adoring,adrenaline,adrenals,ads,adult,adults,advance,advanced,advancement,advancements,advances,advancing,advantage,advantageous,advantages,advent,adventure,adventurer,adventures,adventurous,adversaries,adversary,adverse,adversely,adversity,advert,advertise,advertised,advertisement,advertisers,advertises,advertising,advice,advisable,advise,advised,adviser,advises,advising,advisor,advocacy,advocate,advocating,aerial,aerobic,aerobics,aerodynamics,aeroplane,aerosol,aerospace,aesthetic,aesthetics,afar,affair,affairs,affect,affected,affecting,affection,affectionate,affectionately,affections,affects,affidavit,affidavits,affiliated,affiliates,affiliation,affinity,affirm,affirmative,affirming,afflicted,afford,afforded,affront,afloat,afoot,aforementioned,aforethought,afraid,aft,after,afterglow,afterlife,aftermath,afternoon,afternoons,afterthought,afterward,afterwards,ag,aga,again,against,age,aged,ageing,ageless,agencies,agency,agenda,agendas,agent,agents,ages,aggravate,aggravated,aggravating,aggravation,aggression,aggressive,aggressively,aggressor,agile,agility,agin,aging,agitate,agitated,agitation,agitators,ago,agonized,agonizing,agony,agree,agreeable,agreed,agreeing,agreement,agreements,agrees,agricultural,aground,ah,aha,ahead,ahem,ahold,ahoy,ai,aid,aide,aided,aides,aiding,aids,ail,ailing,ailment,ailments,ails,aim,aimed,aiming,aimless,aimlessly,aims,ain,air,airborne,aircraft,airfield,airhead,airing,airlift,airlifted,airline,airlines,airman,airmen,airplane,airplanes,airport,airports,airs,airspace,airstrip,airtight,airwaves,airway,airways,aisle,aisles,ajar,al,ala,alabaster,alameda,alamo,alan,alarm,alarmed,alarming,alarmist,alarms,alas,alaska,alastor,albacore,albatross,albino,album,albums,alcazar,alchemist,alchemy,alcohol,alcoholic,alcoholics,alcoholism,alcove,alderman,ale,alec,alert,alerted,alerting,alerts,ales,alexander,alfalfa,algae,algebra,algorithms,alias,aliases,alibi,alibis,alien,alienate,alienated,alienating,alienation,aliens,alight,aligning,alike,alimony,alive,all,allee,allegation,allegations,alleged,allegedly,allegiance,allegiances,alleging,alleluia,allergic,allergies,allergy,alleviate,alley,alleys,alliance,allies,alligator,alligators,alliteration,allotted,allow,allowable,allowance,allowed,allowing,allows,alluding,allure,alluring,ally,alma,almighty,almond,almonds,almost,aloe,aloft,aloha,alone,along,alongside,aloud,alpha,alphabet,alphabetically,alphabetized,alpine,already,alright,als,also,alt,altar,alter,alteration,alterations,altercation,altered,altering,alternate,alternates,alternating,alternative,alternatives,alternator,alters,although,altitude,alto,altogether,altruistic,aluminum,alumni,alumnus,always,am,ama,amah,amaretto,amassed,amateur,amateurs,amaze,amazed,amazes,amazing,amazingly,amazon,ambassador,amber,ambiance,ambience,ambient,ambiguity,ambiguous,ambition,ambitions,ambitious,ambivalence,ambivalent,ambrosia,ambulance,ambulances,ambush,ambushed,amen,amend,amended,amendment,amendments,amends,amenities,ami,amiable,amicable,amigo,amigos,amin,amino,amiss,ammo,ammonia,ammunition,amnesia,amniotic,among,amongst,amoral,amorous,amount,amounted,amounts,amp,amphetamines,ampicillin,ample,amply,ampule,amputate,amputated,amputation,amulet,amulets,amuse,amused,amusement,amuses,amusing,an,ana,anachronism,anaesthetic,anagram,analogy,analyse,analysis,analyst,analysts,analyze,analyzed,analyzing,anaphylactic,anarchist,anarchists,anarchy,anatomically,anatomy,ancestor,ancestors,anchor,anchorage,anchoring,anchors,anchovies,ancient,ancients,and,android,ane,anecdote,anecdotes,anemia,anemic,anesthesia,anesthesiologist,anesthesiology,anesthetic,anesthetics,aneurysm,anew,angel,angels,angelus,anger,angina,angiogram,angioplasty,angle,angles,angling,angora,angrier,angrily,angry,angst,anguish,anguished,ani,animal,animals,animated,animation,animosity,anise,ankle,ankles,anna,annals,annex,annihilate,annihilated,annihilation,anniversary,announce,announced,announcement,announcements,announcer,announces,announcing,annoy,annoyance,annoyances,annoyed,annoying,annoyingly,annoys,annual,annually,annul,annulled,annulment,anoint,anointed,anomalies,anomaly,anon,anonymity,anonymous,anorexia,anorexic,another,answer,answered,answering,answers,ant,antacid,antagonism,antagonistic,antagonize,antagonizing,ante,antelope,antenna,antennae,anthem,anthology,anthrax,anthropologist,anthropologists,anthropology,anti,antibiotic,antibiotics,antibodies,antibody,anticipate,anticipated,anticipating,anticipation,antics,antidepressant,antidepressants,antidote,antihistamine,antihistamines,antiquated,antique,antiques,antiquing,antiquities,antiquity,antisocial,antivenin,antler,ants,antsy,anvil,anxiety,anxious,anxiously,any,anybody,anyhow,anymore,anyone,anyplace,anything,anytime,anyway,anyways,anywhere,apache,apart,apartheid,apartment,apartments,apathy,ape,apes,apex,aphrodisiac,apiece,aplastic,apocalypse,apocalyptic,apollo,apologetic,apologies,apologise,apologize,apologized,apologizes,apologizing,apology,apostle,apostles,apostrophe,appalled,appalling,apparatus,apparel,apparent,apparently,apparition,appeal,appealed,appealing,appeals,appear,appearance,appearances,appeared,appearing,appears,appease,appeased,appendage,appendages,appendectomy,appendicitis,appendix,appetite,appetites,appetizer,appetizers,appetizing,applaud,applauded,applauding,applause,apple,applejack,apples,applesauce,appliance,appliances,applicants,application,applications,applied,applies,apply,applying,appoint,appointed,appointing,appointment,appointments,appraisal,appraise,appreciate,appreciated,appreciates,appreciation,appreciative,apprehend,apprehended,apprehension,apprehensive,apprentice,apprised,approach,approached,approaches,approaching,appropriate,appropriately,appropriations,approval,approve,approved,approving,approximate,approximately,approximation,apricot,apron,aprons,apropos,aptitude,aptly,aqua,aquarium,aquatic,ar,arachnid,arachnids,arbitrary,arbitration,arbitrator,arbor,arboretum,arc,arcade,arch,archaic,arched,archenemy,archeological,archeology,archer,arches,architect,architects,architecture,archives,arctic,ardent,are,area,areas,arena,arf,argentine,argon,arguably,argue,argued,argues,arguing,argument,argumentative,arguments,argyle,aria,arid,ariel,aright,arise,arises,aristocratic,arithmetic,ark,arm,armaments,armed,armies,arming,armor,armored,armory,armour,armpits,arms,army,aroma,arose,around,arouse,aroused,arousing,arraigned,arraignment,arrange,arranged,arrangement,arrangements,arranging,array,arrears,arrest,arrested,arresting,arrests,arrhythmia,arrival,arrivals,arrive,arrived,arrives,arriving,arrogance,arrogant,arrow,arrowhead,arrows,arroyo,arse,arsenal,arsenic,arson,arsonist,art,arterial,arteries,artery,artful,arthritis,artichoke,artichokes,article,articles,articulate,artifact,artifacts,artificial,artillery,artist,artiste,artistic,artistry,artists,arts,artsy,artwork,arty,arugula,as,asbestos,ascension,ascot,ash,ashamed,ashes,ashore,ashram,ashtray,ashtrays,aside,asinine,ask,asked,askew,asking,asks,asleep,asparagus,aspect,aspects,aspen,asphalt,asphyxiation,aspirations,aspire,aspirin,aspirins,assailant,assassin,assassinate,assassination,assassins,assault,assaulted,assaulting,assaults,assemble,assembled,assembler,assemblies,assembling,assembly,assertive,assertiveness,asses,assess,assessed,assessing,assessment,asset,assets,assign,assigned,assigning,assignment,assignments,assimilate,assimilated,assist,assistance,assistant,assistants,assisted,assisting,associate,associated,associates,associating,association,associations,assorted,assortment,assume,assumed,assumes,assuming,assumption,assumptions,assurance,assurances,assure,assured,assuredly,assures,assuring,asteroid,asteroids,asthma,astonished,astonishing,astonishment,astound,astounding,astral,astray,astronaut,astronauts,astronomer,astronomical,astronomy,astrophysics,astute,asunder,asylum,at,ate,atheists,athlete,athletes,athletic,atlas,atmosphere,atom,atomic,atonement,atop,atrium,atrocious,atrocities,atrophied,atropine,att,attaboy,attach,attache,attached,attachment,attachments,attack,attacked,attacker,attackers,attacking,attacks,attain,attained,attempt,attempted,attempting,attempts,attend,attendance,attendant,attendants,attended,attending,attends,attention,attentions,attentive,attest,attic,attired,attitude,attitudes,attorney,attorneys,attract,attracted,attracting,attraction,attractive,attracts,attribute,attributed,attributes,attuned,auction,auctioneer,auctioning,audacity,audible,audience,audiences,audio,audiotape,audit,audited,auditing,audition,auditioning,auditions,auditor,auditorium,auditory,auger,aught,augmentation,august,auk,auld,aunt,auntie,aunties,aunts,aura,auras,aurora,auspicious,authentic,authenticate,authenticated,authenticity,author,authoritative,authorities,authority,authorization,authorize,authorized,authorizing,authors,autism,autistic,auto,autobiographical,autograph,autographed,autographs,automated,automatic,automatically,automatics,automaton,automobile,automobiles,automotive,autonomous,autonomy,autopsies,autopsy,autumn,auxiliary,ava,availability,available,avalanche,avatar,avatars,ave,avenge,avenged,avenger,avengers,avenue,average,averages,averse,aversion,aviation,avid,avocado,avoid,avoidance,avoided,avoiding,avoids,aw,await,awaiting,awaits,awake,awaken,awakened,awakening,awakes,award,awarded,awards,aware,awareness,away,awe,awed,awesome,awful,awfully,awhile,awkward,awkwardly,awkwardness,awoke,awol,awry,ax,axe,axel,axis,axle,ay,aye,baba,babble,babbling,babe,babes,babies,baboon,baboons,babu,baby,babysitter,babysitters,bach,bachelor,bachelorette,bachelors,back,backbone,backdoor,backdrop,backed,backer,backfire,backfired,backfires,backfiring,backgammon,background,backhand,backing,backpack,backpacking,backpacks,backroom,backs,backseat,backside,backslide,backstabbing,backstage,backstreet,backstroke,backup,backups,backward,backwards,backyard,bacon,bacteria,bacterial,bad,badder,baddest,badge,badgered,badgering,badges,badly,badminton,badness,baffled,baffles,baffling,bag,bagel,bagels,baggage,bagged,baggies,bagging,baggy,bagman,bagpipes,bags,bah,bail,bailed,bailey,bailiff,bailiffs,bailing,bails,bait,baited,baiting,bake,baked,baker,bakeries,bakers,bakery,bakes,baking,baklava,balance,balanced,balances,balancing,balboa,balconies,balcony,bald,balding,baldness,bale,balk,ball,ballads,ballast,balled,baller,ballerina,ballet,ballgame,ballistic,ballistics,ballon,balloon,balloons,ballot,ballots,ballpark,ballplayer,ballplayers,ballpoint,ballroom,balls,ballsy,balm,balmoral,baloney,bam,bambino,bamboo,bamboozled,ban,banal,banality,banana,bananas,band,bandage,bandages,bandit,bandits,bands,bandwagon,bane,bang,banged,bangers,banging,bangles,bangs,banish,banished,banister,banjo,bank,bankbooks,banker,bankers,banking,bankroll,bankrolled,bankrupt,bankruptcy,bankrupted,banks,banned,banner,banners,banning,bannister,banquet,banshee,banter,banzai,baptism,baptist,baptists,baptize,baptized,bar,barb,barbarian,barbarians,barbaric,barbecue,barbecued,barbecues,barbed,barber,barbers,barbershop,barbs,bard,bare,bared,barefoot,barely,barf,barfed,barfing,bargain,bargained,bargaining,barge,barged,barges,barging,baring,barium,bark,barked,barkeep,barker,barking,barks,barley,barlow,barmaid,barman,barn,barnacle,barnyard,barometer,baron,baroness,baronet,baroque,barracks,barracuda,barrage,barre,barred,barrel,barreling,barrels,barren,barricade,barricaded,barricades,barrier,barring,barrio,barrister,barroom,barrow,bars,barstool,bartender,bartending,barter,base,baseball,based,baseless,baseman,basement,basements,bases,bash,bashed,bashful,bashing,basic,basically,basics,basil,basin,basis,bask,basket,basketball,basketballs,baskets,basking,bass,bassett,bassinet,bassoon,baste,bastille,bat,batch,bates,bath,bathe,bathed,bathing,bathrobe,bathrobes,bathroom,bathrooms,baths,bathtub,batman,baton,bats,batted,batter,battered,batteries,battering,battery,batting,battle,battlefield,battleground,battles,battleship,battling,batty,bauble,baubles,bawdy,bawl,bawling,bay,bayberry,bayonet,bayou,bays,bazaar,bazooka,be,beach,beaches,beacon,beaded,beads,beagle,beak,beakers,beam,beamed,beaming,beams,bean,beanbag,beanie,beans,bear,bearable,beard,bearded,beards,bearer,bearers,bearing,bearings,bears,beast,beastie,beasts,beat,beaten,beater,beating,beatings,beatnik,beats,beau,beaucoup,beauties,beautiful,beautifully,beauty,beaver,beavers,became,because,beck,beckons,becks,become,becomes,becoming,bed,bedbug,bedbugs,bedlam,bedpan,bedpans,bedridden,bedrock,bedroom,bedrooms,beds,bedside,bedspread,bedtime,bee,beech,beef,beefcake,beefed,beefs,beefy,been,beep,beeped,beeper,beepers,beeps,beer,beers,beery,bees,beeswax,beetle,beetles,beets,befall,befitting,before,beforehand,beg,began,begat,begets,beggar,beggars,begged,begging,begin,beginner,beginning,beginnings,begins,begrudge,begs,begun,behalf,behave,behaved,behaving,behavior,behavioral,behaviour,beheading,behind,behold,behooves,beige,being,beings,bel,belabor,belated,beleaguered,belie,belief,beliefs,believable,believe,believed,believer,believers,believes,believing,belittle,belive,bell,bellboy,belle,bellhops,bellies,belligerent,bellman,bells,belly,bellyaching,bellybutton,belong,belonged,belonging,belongings,belongs,beloved,below,belt,belted,belts,belvedere,ben,bench,benched,benches,benching,bend,bended,bender,bending,bends,bendy,bene,beneath,benedict,benefactor,benefactors,beneficial,benefit,benefited,benefits,benes,benevolence,benevolent,benign,benjamin,benjamins,bennet,benny,bent,benthic,bequest,berate,berating,bereavement,bereft,beret,berg,berlin,berries,berry,berserk,berserker,bertha,beryllium,beseech,beside,besides,besieged,besmirch,best,bested,bestow,bestowed,bestseller,bet,beta,beth,bethesda,betray,betrayal,betrayals,betrayed,betrayer,betraying,betrays,bets,better,betting,between,beverage,beware,bewitched,bey,beyond,bi,bialy,bias,biased,bib,bible,bibles,biblical,bibliography,bicentennial,bicker,bickering,bicuspids,bicycle,bicycles,bid,bidder,bidding,bide,biding,bids,biff,big,bigamist,bigamy,bigfoot,bigger,biggest,biggie,bighorn,bigmouth,bigness,bigot,bigotry,bijou,bike,biker,bikers,bikes,biking,bikini,bikinis,bilateral,bilge,bilingual,bill,billboard,billboards,billiard,billie,billing,billion,billionaire,billionaires,billions,bills,billy,bimbo,bimbos,bin,binary,bind,binder,binding,binds,binge,bingo,binoculars,bins,bio,biochemist,biochemistry,biographical,biographies,biography,biohazard,biological,biologically,biology,bionic,biopsy,bios,biotech,bipartisan,bipolar,birch,bird,birdcage,birdie,birdies,birds,birdseed,birth,birthday,birthdays,birthing,birthmark,birthplace,birthright,births,biscuit,biscuits,bishop,bishops,bison,bisque,bistro,bit,bite,bites,biting,bits,bitsy,bitten,bitter,bitterness,bitty,biz,bizarre,blab,blabbermouth,blabbing,black,blackberry,blackbird,blackboard,blacked,blacking,blackjack,blackmail,blackmailed,blackmailer,blackmailing,blackness,blackout,blackouts,blacks,blacksmith,bladder,bladders,blade,blades,blah,blam,blame,blamed,blameless,blames,blaming,bland,blank,blanket,blankets,blankly,blanks,blaring,blarney,blasphemous,blasphemy,blast,blasted,blasters,blasting,blatant,blatantly,blather,blathering,blaze,blazer,blazers,blazes,blazing,bleach,bleached,bleachers,bleaching,bleak,bled,bleed,bleeder,bleeding,bleeds,bleep,blemish,blend,blended,blender,blending,blends,bless,blessed,blessing,blessings,blew,blight,blimey,blimp,blind,blinded,blinders,blindfold,blindfolded,blinding,blindly,blindness,blinds,blindsided,blink,blinked,blinking,blinks,blip,blips,bliss,blissful,blissfully,blisters,blithely,blithering,blitz,blizzard,bloated,blob,bloc,block,blockade,blockage,blockbusters,blocked,blockhead,blocking,blocks,bloke,blokes,blond,blonde,blondes,blonds,blood,blooded,bloodhound,bloodied,bloodless,bloodline,bloods,bloodshed,bloodshot,bloodstream,bloodsucker,bloodsucking,bloody,bloom,blooming,blooms,blossom,blossomed,blossoms,blot,blotchy,blotter,blotto,blouse,blow,blowed,blower,blowfish,blowhard,blowing,blowjob,blowjobs,blown,blowout,blows,blowtorch,blowup,blubber,blubbering,bludgeoned,blue,bluebells,blueberries,blueberry,bluepoint,blueprints,bluer,blues,bluest,bluff,bluffing,bluffs,blume,blunder,blundering,blunders,blunt,blur,blurb,blurred,blurry,blurt,blurted,blurting,blush,blushing,bluster,bo,boa,boar,board,boarded,boarder,boarding,boardinghouse,boardroom,boards,boardwalk,boast,boat,boathouse,boating,boatload,boatman,boats,bob,bobbin,bobbing,bobby,bobcat,bod,bodega,bodies,bodily,body,bodyguard,bodyguards,bogeyman,boggle,boggles,boggling,bogs,bogus,bohemian,boil,boiled,boiler,boilers,boiling,boils,bold,bolder,boldly,bollocks,bologna,bolster,bolt,bolted,bolts,bomb,bombarded,bombarding,bombed,bomber,bombers,bombing,bombings,bombs,bombshell,bonbon,bond,bondage,bonded,bonding,bonds,bondsman,bone,boned,bonehead,boneless,bones,bonfire,bong,bongo,bongos,boning,bonkers,bonnet,bonnie,bonus,bonuses,bony,boo,boob,boobies,boobs,booby,booger,boogey,boogeyman,boogie,book,bookcase,booked,bookends,booker,bookie,booking,bookish,bookkeeper,booklet,booklets,bookman,bookmark,books,bookshelf,bookshelves,bookstore,boom,boombox,boomer,boomerang,boon,boonies,boorish,boost,boosted,booster,boosters,boosts,boot,booted,booth,booths,booties,bootleg,boots,booty,booze,boozer,boozing,bop,bora,bordeaux,bordello,border,bordering,borderline,borders,bore,bored,boredom,boring,born,borrow,borrowed,borrowing,bosom,bosomy,boss,bossed,bosses,bossing,bossy,boston,bot,botanical,botany,botched,both,bother,bothered,bothering,bothers,bottle,bottled,bottles,bottling,bottom,bottomed,bottomless,bottoms,botulism,bought,boulder,boulevard,bounce,bounced,bouncer,bouncing,bouncy,bound,boundaries,boundary,bounds,bounty,bouquet,bouquets,bourbon,bourgeois,bourne,bout,boutique,boutiques,boutonniere,bouts,bow,bowed,bowel,bowels,bowers,bowery,bowing,bowl,bowled,bowler,bowline,bowling,bowls,bowman,bows,box,boxcar,boxed,boxer,boxers,boxes,boxing,boy,boycott,boyfriend,boyfriends,boyhood,boys,boysenberry,bozo,bozos,bra,brace,bracelet,bracelets,braces,bracing,bracken,bracket,brad,brag,braggart,bragged,bragging,brags,braid,braided,braiding,braille,brain,brained,brainiest,brains,brainstorm,brainstorming,brainwash,brainwashed,brainwashing,brake,brakes,bran,branch,branched,branches,branching,brand,brandies,brandy,bras,brash,brass,brassiere,brassieres,brat,brats,bratwurst,brava,bravado,brave,braved,bravely,braver,bravery,bravest,bravo,brawl,brays,brazen,brazil,breach,breached,bread,breadth,break,breakable,breakdown,breakdowns,breaker,breakfast,breaking,breakout,breaks,breakthrough,breakthroughs,breakup,breakups,breakwater,breast,breasted,breasts,breath,breathable,breathe,breathed,breather,breathes,breathing,breathless,breaths,breathtaking,bred,bree,breech,breed,breeding,breeds,breeze,breezing,breezy,bren,brent,brethren,brew,brewed,brewer,brewery,brewing,brews,briar,bribe,bribed,bribery,bribes,bribing,brick,bricked,bricks,bridal,bride,bridegroom,brides,bridesmaid,bridesmaids,bridge,bridges,brie,brief,briefcase,briefcases,briefed,briefing,briefings,briefly,briefs,brig,brigade,brigadier,bright,brighten,brighter,brightest,brightly,brill,brilliance,brilliant,brilliantly,brim,brin,bring,bringing,brings,brink,brioche,bris,brisket,briskly,bristol,brit,britches,britt,brittle,bro,broad,broadcast,broadcasting,broadcasts,broadening,broader,broads,broccoli,brochure,brochures,brock,broiled,broiler,broke,broken,brokenhearted,broker,brokerage,bronchial,bronco,bronze,bronzed,bronzing,brooch,brood,brooding,broody,brook,brooks,broom,brooms,broomstick,broomsticks,bros,broth,brothel,brother,brotherhood,brotherly,brothers,brought,brouhaha,brow,browbeat,browbeating,brown,brownie,brownies,browning,brownout,browns,brownstone,browse,browsing,brr,bruise,bruised,bruises,bruising,brumby,brunch,brunette,brunettes,brunt,brush,brushed,brushes,brushing,brutal,brutality,brutally,brute,bubbies,bubble,bubbles,bubbly,buck,buckaroo,bucket,buckets,buckeyes,buckle,buckled,buckling,bucko,bucks,buckshot,bud,buddies,budding,buddy,budge,budget,budgeted,budgets,budging,buds,buff,buffalo,buffer,buffet,buffoon,buffoons,buffs,buffy,bug,bugged,bugger,buggered,bugging,buggy,bugle,bugs,build,builder,building,buildings,builds,buildup,built,bulb,bulbous,bulbs,bulge,bulging,bulimic,bulk,bulky,bull,bulldog,bulldoze,bulldozers,bullet,bulletin,bulletins,bulletproof,bullets,bullheaded,bullied,bullies,bullion,bullpen,bulls,bully,bum,bumble,bumbling,bummed,bummer,bummers,bumming,bump,bumped,bumper,bumping,bumpkins,bumps,bumpy,bums,bun,bunch,bunches,bundle,bundles,bungalow,bungalows,bungee,bungled,bunion,bunions,bunk,bunker,bunking,bunks,bunnies,bunny,buns,bunt,bunting,bura,burbs,burden,burdened,burdens,bureau,bureaucrat,bureaucrats,burgeoning,burger,burgers,burgess,burglar,burglaries,burglary,burgundy,burial,buried,buries,burke,burlap,burley,burly,burn,burned,burner,burning,burnout,burns,burnt,burp,burping,burrito,burritos,burro,burrows,burst,bursting,burton,bury,burying,bus,busboy,busboys,buses,bush,bushel,bushes,busier,busiest,business,businesses,businessman,businessmen,businesswoman,businesswomen,busload,bussing,bust,busted,buster,bustier,busting,bustle,bustling,busts,busty,busy,busybody,but,butch,butcher,butchered,butchers,butler,butlers,buts,butt,butted,butter,butterball,buttercup,butterflies,butterfly,buttering,butters,butterscotch,buttery,butting,buttocks,button,buttoned,buttoning,buttons,butts,buy,buyer,buyers,buying,buyout,buys,buzz,buzzard,buzzards,buzzed,buzzer,buzzes,buzzing,bwana,by,bye,byes,bygones,bylaws,byline,bypass,byproduct,bystander,bystanders,cab,caballero,cabaret,cabbage,cabdriver,cabernet,cabin,cabinet,cabinets,cabins,cable,cables,caboose,cabs,cacciatore,cache,cachet,cackle,cackling,cacophony,cactus,cad,cadavers,caddie,caddy,cadet,cadmium,caesar,cafe,cafeteria,caff,caffeinated,caffeine,cage,caged,cages,cagey,cahoots,cain,cake,cakes,cakewalk,calamitous,calamity,calcium,calculate,calculated,calculating,calculation,calculations,calculator,calculators,calculus,calendar,calendars,calender,calf,caliber,calibre,calico,call,called,caller,callers,calling,callous,calls,calm,calmed,calmer,calming,calmly,calms,calorie,calories,calves,calzone,calzones,cam,camcorder,came,camel,camels,camera,cameraman,cameras,camp,campaign,campaigned,campaigning,campaigns,camped,camper,campers,campfire,camping,campos,camps,campus,campuses,cams,camshaft,can,canal,canals,canape,canaries,canary,canasta,cancel,canceled,canceling,cancellation,cancellations,cancelled,cancels,cancer,cancers,candid,candidacy,candidate,candidates,candies,candle,candlelight,candlelit,candles,candlestick,candlesticks,candor,candy,cane,canine,canines,canisters,cannabis,canned,cannery,cannibal,cannibals,cannoli,cannon,cannonball,cannons,cannot,canoe,canoes,canopy,cans,cant,canteen,canter,canton,canvas,canvass,canyon,canyons,cap,capabilities,capable,capacity,cape,caper,capital,capitalism,capitalist,capitalists,capitalize,capitals,capitol,capo,capote,capper,capping,cappuccino,caprice,caps,capsize,capsized,capsule,capsules,captain,captains,captioning,captivated,captivating,captive,captives,captivity,capture,captured,capturing,car,caramba,caramel,carat,carats,carb,carbo,carbohydrates,carbon,carbs,carbuncle,carburetor,carcass,carcinogens,card,cardboard,cardiac,cardigan,cardinal,cardinals,cardiogram,cardiologist,cardiology,cardiovascular,cards,care,cared,career,careers,carefree,careful,carefully,caregiver,careless,carelessness,cares,caress,caretaker,cargo,caribou,caring,caritas,carjacking,carl,carlin,carmen,carmine,carney,carnie,carnival,carnivore,carnivorous,carol,carolers,caroling,carolling,carotid,carousel,carp,carpenter,carpentry,carpet,carpeting,carpool,carr,carriage,carriages,carried,carrier,carriers,carries,carrot,carrots,carry,carrying,cars,cart,carted,cartel,cartels,carter,cartilage,carting,cartographers,carton,cartons,cartoon,cartoonist,cartoons,cartouche,cartridge,carts,carve,carved,carvel,carver,carvers,carving,carvings,carwash,casa,casbah,cascade,case,cased,caseload,cases,cash,cashed,cashews,cashier,cashing,cashmere,casing,casings,casino,casinos,casitas,casket,caskets,casserole,cassette,cassettes,cast,caste,casting,castle,castles,castor,castrated,casts,casual,casually,casualties,casualty,cat,cataclysmic,catalog,cataloging,catalogue,catalyst,catapult,cataracts,catastrophe,catastrophic,catatonic,catch,catcher,catchers,catches,catching,catchy,categorically,categories,categorized,category,cater,caterer,caterers,catering,caterpillar,caterpillars,caters,caterwauling,cates,catfight,catharsis,cathartic,cathedral,catheter,catholic,cats,catsup,cattle,catty,catwalk,caucus,caught,cauldron,cauliflower,cause,caused,causes,causing,caution,cautionary,cautious,cautiously,cavalcade,cavalier,cavalry,cave,caved,caveman,cavern,caverns,caves,caviar,cavities,cavity,cayman,caymans,cease,ceased,cedar,cedars,cee,ceiling,celebrate,celebrated,celebrates,celebrating,celebration,celebrities,celebrity,celery,celeste,celibacy,celibate,cell,cellar,cellars,celled,cellist,cellmate,cellmates,cello,cells,cellular,cellulite,cement,cemeteries,cemetery,censor,censorship,censure,censured,census,cent,centennial,center,centered,centerfold,centerpiece,centerpieces,centers,centimeter,centimeters,central,centre,centred,cents,centuries,century,ceramic,cereal,cerebellum,cerebral,cerebrum,ceremonial,ceremonies,ceremony,certain,certainly,certainties,certainty,certifiable,certifiably,certificate,certificates,certification,certified,certify,cerulean,cervical,cessation,cesspool,chad,chaff,chafing,chagrined,chain,chained,chains,chainsaw,chainsaws,chair,chairman,chairs,chalet,chalk,chalkboard,chalked,challenge,challenged,challenger,challenges,challenging,chamber,chamberlain,chambers,chameleon,chamomile,champ,champagne,champion,champions,championship,championships,champs,chance,chancellor,chances,chandelier,chandeliers,chandler,chang,change,changed,changes,changing,channel,channeled,channels,chant,chanteuse,chanting,chants,chaos,chaotic,chap,chapel,chaperon,chaperone,chaperoned,chaperones,chaperoning,chaplain,chapman,chapped,chaps,chapter,chapters,character,characteristics,characterize,characterized,characterizing,characters,charade,charades,charcoal,chardonnay,charge,charged,charger,charges,charging,chariot,chariots,charismatic,charitable,charities,charity,charlatan,charley,charlie,charlies,charlotte,charm,charmed,charmer,charming,charmingly,charms,charred,chart,charted,charter,chartered,chartreuse,charts,chase,chased,chaser,chases,chasing,chasm,chassis,chastity,chat,chateau,chatted,chatter,chatterbox,chatting,chatty,chauffeur,chauvinistic,cheap,cheapen,cheaper,cheapest,cheat,cheated,cheater,cheaters,cheating,cheats,check,checkbook,checked,checker,checkered,checkers,checking,checkmate,checkout,checkpoints,checks,checkup,checkups,cheddar,cheek,cheekbones,cheeks,cheep,cheer,cheered,cheerful,cheering,cheerio,cheerleader,cheerleaders,cheerleading,cheers,cheery,cheese,cheeseburger,cheeseburgers,cheesecake,cheesed,cheeses,cheesy,cheetah,chef,chefs,chemical,chemically,chemicals,chemist,chemistry,chemo,chenille,cheque,cherish,cherished,cherries,cherry,cherub,chess,chessboard,chest,chestnut,chestnuts,chests,chesty,chevalier,chevron,chevy,chew,chewed,chewing,chewy,chez,chi,chic,chicano,chick,chickadee,chicken,chickened,chickening,chickenpox,chickens,chickenshit,chicks,chico,chief,chiefs,chiffon,chigger,chihuahua,child,childbirth,childhood,childhoods,childish,childlike,children,chile,chiles,chili,chill,chilled,chilling,chills,chilly,chime,chimera,chimney,chimp,chimpanzee,chimps,chin,china,chink,chinks,chino,chins,chip,chipmunk,chipped,chipper,chipping,chips,chiropractor,chirp,chirping,chirpy,chisel,chiseling,chit,chitchat,chivalrous,chlamydia,chloride,chlorine,chloroformed,chocolate,chocolates,chocolatey,choice,choices,choir,choirboy,choirs,choke,choked,choker,chokes,choking,cholera,cholesterol,cholinesterase,chomp,chompers,chomping,choose,choosers,chooses,choosing,choosy,chop,chopped,chopper,choppers,chopping,choppy,chops,chopsticks,chord,chords,chore,choreography,chores,chorus,chose,chosen,chow,chowder,christen,christened,christening,christie,christy,chrome,chromic,chromosomes,chronic,chronically,chronicle,chronology,chubby,chuck,chucked,chuckle,chuckles,chucks,chug,chugging,chum,chummy,chump,chumps,chums,chunk,chunks,chunky,church,churches,churn,chute,chutes,ciao,cicely,cider,cigar,cigarette,cigarettes,cigars,cinder,cinema,cinematic,cinnabar,cinnamon,cipher,circle,circled,circles,circling,circuit,circuited,circuitry,circuits,circular,circulate,circulated,circulation,circulatory,circumcision,circumference,circumstance,circumstances,circumstantial,circumvent,circus,cirrhosis,cissy,citation,citations,cite,cited,cities,citing,citizen,citizens,citizenship,citrus,city,citywide,civic,civics,civil,civilian,civilians,civilisation,civility,civilization,civilizations,civilized,civvies,clack,clad,claim,claimed,claiming,claims,clairvoyant,clam,clambake,clammed,clammy,clamoring,clamp,clamped,clamping,clams,clan,clandestine,clang,clanging,clanking,clap,clapped,clapper,clapping,clarence,clarification,clarify,clarifying,clarinet,clarity,clash,clashing,clasp,class,classes,classic,classical,classics,classier,classification,classified,classify,classless,classmate,classmates,classroom,classrooms,classy,clause,clauses,claustrophobia,claustrophobic,clavicle,claw,clawed,claws,clay,claymore,claymores,clean,cleaned,cleaner,cleaners,cleanest,cleaning,cleanliness,cleans,cleanse,cleansed,cleanser,cleanses,cleansing,cleanup,clear,clearance,clearances,cleared,clearer,clearing,clearly,clears,cleats,cleavage,cleave,clef,clemency,clement,clench,clergy,clergyman,clerical,clerk,clerks,clever,cleverly,cleverness,cliche,click,clicked,clicker,clicking,clicks,client,clientele,clients,cliff,cliffhanger,cliffs,climate,climax,climb,climbed,climbers,climbing,clinched,clincher,clinches,cling,clinging,clings,clingy,clinic,clinical,clinically,clinics,clink,clip,clipboard,clipped,clipper,clippers,clipping,clippings,clips,clique,clitoris,cloak,clobbered,clock,clocked,clocking,clocks,clockwise,clockwork,clod,clods,clogged,clogging,clogs,cloistered,clone,cloned,clones,clop,close,closed,closely,closeness,closer,closes,closest,closet,closeted,closets,closing,closure,clot,cloth,clothe,clothed,clothes,clothesline,clothing,cloths,clots,clotted,clotting,cloud,clouded,clouds,cloudy,clout,cloven,clover,cloverleaf,cloves,clown,clowning,clowns,club,clubbed,clubhouse,clubs,cluck,clucking,clue,clueless,clues,clump,clumps,clumsily,clumsiness,clumsy,clung,clunk,clunker,clunkers,clusters,clutch,clutched,clutches,clutter,cluttering,coach,coached,coaches,coaching,coal,coalition,coals,coarse,coast,coastal,coaster,coasters,coasting,coat,coating,coats,coattails,coax,coaxing,cob,cobb,cobbler,cobra,cobras,cobweb,cobwebs,coca,cocaine,cockamamie,cocked,cockles,cockney,cockpit,cockroach,cockroaches,cocktail,cocktails,cocky,coco,cocoa,coconut,coconuts,cocoon,cod,coddle,coddling,code,coded,codependent,codes,codicil,coding,coed,coeds,coerce,coerced,coercion,coexist,coffee,coffeehouse,coffees,coffers,coffin,coffins,cog,cognac,cognizant,coherent,cohesion,cohesive,coiffure,coil,coiled,coin,coincide,coincidence,coincidences,coincidental,coincidentally,coins,coitus,coke,cokes,col,cola,colchicine,cold,colder,coldest,coldly,coldness,colds,cole,coles,coleslaw,colic,colin,coliseum,colitis,collaborate,collaborated,collaborating,collaboration,collaborator,collage,collagen,collapse,collapsed,collapses,collapsing,collar,collarbone,collars,collateral,colleague,colleagues,collect,collected,collecting,collection,collections,collective,collectively,collector,collectors,collects,colleen,college,colleges,collide,collided,collie,collier,collins,collision,cologne,colon,colonel,colonels,colonial,colonials,colonies,colonists,colonization,colonized,colonnade,colony,color,colorado,colored,colorful,coloring,colors,colossal,colosseum,colour,coloured,colours,column,columnist,columnists,columns,coma,comas,comatose,comb,combat,combative,combination,combine,combined,combines,combing,combo,combust,combusted,combustible,combustion,come,comeback,comebacks,comedian,comedians,comedic,comedies,comedy,comer,comers,comes,comet,comeuppance,comfort,comfortable,comfortably,comforted,comforter,comforting,comforts,comfy,comic,comical,comics,coming,comma,command,commandant,commander,commanders,commanding,commandment,commandments,commando,commandos,commands,commemorating,commence,commencing,commendable,commendation,commensurate,comment,commentaries,commentary,commentator,commented,commenting,comments,commerce,commercial,commercialism,commercially,commercials,commie,commies,commiserate,commissary,commission,commissioned,commissioner,commissioners,commit,commitment,commitments,commits,committed,committee,committees,committing,commode,commodities,commodity,common,commoner,commonly,commonplace,commonwealth,commotion,communal,commune,communicate,communicated,communicating,communication,communications,communicator,communing,communion,communique,communism,communist,communists,communities,community,commute,commuted,comp,compact,compactor,compadre,companies,companion,companions,companionship,company,comparable,comparative,comparatively,compare,compared,compares,comparing,comparison,compartment,compartments,compass,compassion,compassionate,compatibility,compatible,compel,compelled,compelling,compels,compensate,compensated,compensation,compete,competence,competent,competing,competition,competitions,competitive,competitiveness,competitor,competitors,compilation,compiling,complacency,complacent,complain,complained,complaining,complains,complaint,complaints,complete,completed,completely,completes,completing,completion,complex,complexion,complexities,complexity,compliant,complicate,complicated,complicates,complicating,complication,complications,compliment,complimentary,compliments,comply,component,components,composed,composer,composers,composite,composition,compost,composure,compound,compounds,comprehend,comprehending,comprehension,comprehensive,compressed,compression,compressions,compressor,comprise,comprised,compromise,compromised,compromises,compromising,compulsion,compulsive,compulsively,compulsory,compute,computer,computerized,computers,comrade,comrades,con,conceal,concealed,concealer,concealing,concealment,concede,conceding,conceited,conceivable,conceivably,conceive,conceived,conceiving,concentrate,concentrated,concentrating,concentration,concentric,concept,conception,concepts,concern,concerned,concerning,concerns,concert,concerts,concession,concessions,concierge,concise,conclave,conclude,concluded,concludes,concluding,conclusion,conclusions,conclusive,conclusively,concocted,concocting,concoction,concord,concourse,concrete,concur,concurrently,concussion,concussions,condemn,condemnation,condemned,condemning,condensation,condensed,condescending,condiment,condition,conditioned,conditioner,conditioners,conditioning,conditions,condo,condolences,condom,condominium,condoms,condone,condoned,condoning,condor,condos,conducive,conduct,conducted,conducting,conductor,conducts,cone,cones,coney,confederacy,confederate,confederates,confederation,confer,conference,conferences,conferred,conferring,confess,confessed,confessing,confession,confessional,confessions,confessor,confetti,confidant,confidante,confide,confided,confidence,confidences,confident,confidential,confidentiality,confidentially,confides,confiding,configuration,confine,confined,confinement,confining,confirm,confirmation,confirmed,confirming,confirms,confiscated,confiscating,confit,conflict,conflicted,conflicting,conflicts,confluence,conform,conformity,confound,confront,confrontation,confrontations,confronted,confronting,confronts,confuse,confused,confusing,confusion,congenial,congeniality,congestion,congrats,congratulate,congratulated,congratulating,congratulations,congregation,congress,congressional,congressman,congresswoman,conjecture,conjugal,conjugate,conjunction,conjure,conjured,conjures,conjuring,conk,conked,conn,connect,connected,connecting,connection,connections,connects,conned,conner,conning,conniption,conniving,connoisseur,conquer,conquered,conquering,conquers,conquest,cons,conscience,conscientious,conscious,consciously,consciousness,consecrated,consecutive,consensual,consensus,consent,consented,consenting,consequence,consequences,consequently,conservation,conservative,conservatory,conserve,consider,considerable,considerably,considerate,consideration,considered,considering,considers,consigliere,consisted,consistent,consistently,consists,consolation,console,consoled,consolidate,consolidated,consoling,consort,consorting,consortium,conspicuous,conspiracies,conspiracy,conspirator,conspirators,conspire,conspired,conspiring,constable,constant,constantly,constellation,constipation,constituents,constitute,constitutes,constitution,constitutional,constitutionally,constraints,constrictor,construct,constructed,construction,constructive,construed,consul,consulate,consult,consultant,consultation,consultations,consulted,consulting,consults,consume,consumed,consumer,consumers,consumes,consuming,consummate,consummated,consumption,contact,contacted,contacting,contacts,contagious,contain,contained,container,containers,containing,containment,contains,contaminate,contaminated,contaminating,contamination,contemplate,contemplated,contemplating,contemporary,contempt,contender,contenders,content,contented,contention,contentment,contents,contest,contestant,contestants,contesting,context,continent,continental,continents,contingencies,contingency,continually,continuance,continuation,continue,continued,continues,continuing,continuity,continuous,continuously,continuum,contortionist,contours,contra,contraband,contraceptives,contract,contracted,contraction,contractions,contractor,contracts,contradict,contradicted,contradicting,contradiction,contradictory,contraption,contrary,contrast,contribute,contributed,contributes,contributing,contribution,contributions,contributor,contributors,contrite,contrived,control,controlled,controller,controlling,controls,controversial,controversy,contusion,contusions,convene,convened,convenes,convenience,convenient,conveniently,convent,convention,conventional,converge,converging,conversation,conversational,conversationalist,conversations,converse,conversion,convert,converted,convertible,convertibles,converting,convey,conveyor,convict,convicted,conviction,convictions,convince,convinced,convinces,convincing,convoy,convulsing,coo,cooing,cook,cookbook,cookbooks,cooked,cooker,cookie,cookies,cooking,cooks,cool,coolant,cooled,cooler,coolers,coolest,cooling,coolly,coolness,cools,coop,cooped,cooper,cooperate,cooperated,cooperating,cooperation,cooperative,coopers,coordinate,coordinated,coordinates,coordinating,coordinator,coot,cooties,cop,copacetic,cope,copied,copier,copies,copilot,coping,copiously,copper,copperhead,coppers,copping,cops,copter,copy,copycat,copying,copyright,cor,coral,cord,cordial,cordless,cordon,cordoned,cords,corduroy,core,coriander,cork,corker,corks,corkscrew,corky,corn,cornball,corned,corner,cornered,cornering,corners,cornerstone,cornfield,cornflakes,cornucopia,corny,corollary,corona,coronary,coronation,coroner,coroners,corporal,corporate,corporation,corporations,corporeal,corps,corpse,corpses,corral,correct,corrected,correcting,correction,correctional,corrections,corrective,correctly,correlation,correspond,corresponded,correspondence,correspondent,correspondents,corresponding,corresponds,corridor,corroborate,corroboration,corrupt,corrupted,corrupting,corruption,corsage,corset,cortex,cory,cos,cosign,cosmetic,cosmetics,cosmic,cosmically,cosmology,cosmopolitan,cosmos,cost,costa,costing,costly,costs,costume,costumes,cosy,cot,cotillion,cots,cottage,cottages,cotton,couch,cougar,cougars,cough,coughing,coughs,could,council,councillor,councilors,counsel,counseling,counselling,counsellor,counselor,counselors,count,countdown,counted,countenance,counter,counterattack,counterfeit,counterfeiting,countermeasures,counteroffer,counterpart,counterproductive,countess,counties,counting,countless,countries,country,countrymen,countryside,counts,county,coup,coupe,couple,couples,coupling,coupon,coupons,courage,courageous,courier,couriers,course,courses,coursing,court,courted,courteous,courtesy,courthouse,courting,courtroom,courtrooms,courts,courtship,courtside,courtyard,cousin,cousins,couture,cove,coven,covenant,cover,coverage,coveralls,covered,covering,covers,covert,coverup,covet,coveted,coveting,cow,coward,cowardice,cowardly,cowards,cowboy,cowboys,cowed,cower,cowering,cowgirl,coworker,cows,cox,coy,coyote,coyotes,coz,cozier,cozy,cozying,crab,crabby,crabgrass,crabs,crack,cracked,cracker,crackerjack,crackers,cracking,crackling,crackpot,cracks,cradle,craft,crafted,crafting,crafts,craftsmanship,craftsmen,crafty,cram,crammed,cramming,cramp,cramped,cramping,cramps,cranberry,crane,cranes,cranial,cranium,crank,cranking,cranks,cranky,cranny,crap,crapped,crapper,crapping,crappy,craps,crash,crashed,crasher,crashers,crashes,crashing,crass,crate,crated,crater,crates,crave,craves,craving,crawl,crawled,crawlers,crawling,crawls,crawly,crayons,craze,crazed,crazier,craziest,craziness,crazy,creak,creaky,cream,creamed,creamer,creaming,creamy,crease,creased,creases,create,created,creates,creating,creation,creations,creative,creatively,creativity,creator,creature,creatures,credence,credentials,credenza,credibility,credible,credit,credited,credits,credo,creed,creeds,creek,creep,creepers,creeping,creeps,creepy,cremated,cremation,crematorium,creme,crepe,crepes,crept,crescent,crest,crested,cretins,crevasse,crew,crewman,crews,crib,cribbage,cribs,crick,cricket,cried,crier,cries,crikey,crime,crimes,criminal,criminalistics,criminally,criminals,criminology,crimp,crimson,cringe,cripes,cripple,crippled,cripples,cris,crises,crisis,crisp,crisps,crispy,criteria,criterion,critic,critical,critically,criticism,criticize,criticized,criticizing,critics,critter,critters,croak,croaker,croc,crock,crocket,crocodile,crocodiles,croft,croissants,cronies,crook,crooked,crooks,croon,crop,cropped,crops,croquet,cross,crossbow,crossed,crosses,crossfire,crosshairs,crossing,crossroads,crossword,crotch,crouch,crouched,crouching,croupier,croutons,crow,crowbar,crowd,crowded,crowding,crowds,crowed,crowing,crown,crowned,crowning,crowns,crows,crucial,crucible,crucified,crucifix,crucifixion,crucify,crud,cruddy,crude,crudely,cruel,cruelly,cruelty,cruise,cruised,cruiser,cruisers,cruises,cruising,crumb,crumble,crumbled,crumbles,crumbling,crumbs,crummy,crumpets,crumpled,crunch,crunched,crunches,crunchy,crusade,crusader,crusades,crush,crushed,crusher,crushes,crushing,crust,crusts,crusty,crutch,crutches,crux,cry,crying,cryogenic,crypt,cryptic,crypto,crypts,crystal,crystals,cub,cubbies,cubby,cube,cubed,cubes,cubic,cubicle,cubs,cuckoo,cucumber,cud,cuddle,cuddled,cuddles,cuddly,cuddy,cue,cued,cuff,cuffed,cuffing,cuffs,cuisine,culinary,culminating,culmination,culottes,culpa,culpability,culpable,cult,cultivate,cultivated,cultivating,cultural,culturally,culture,cultured,cultures,cumin,cummerbund,cumulative,cuneiform,cunning,cup,cupboard,cupboards,cupcake,cupcakes,cupid,cuppa,cups,cur,curacao,curate,curator,curb,curd,curdle,cure,cured,cures,curfew,curfews,curie,curing,curiosity,curious,curiously,curl,curled,curlers,curling,curly,curmudgeon,curran,currency,current,currently,currents,curriculum,curry,curse,cursed,curses,cursing,cursive,cursory,curt,curtain,curtains,curtsy,curvaceous,curve,curveball,curves,cushion,cushions,cushy,cusp,cuss,cussing,custard,custodial,custodian,custody,custom,customarily,customary,customer,customers,customs,cut,cutaway,cutbacks,cute,cuteness,cuter,cutest,cutesy,cuticle,cuticles,cutie,cutlass,cutler,cutlery,cutoff,cutoffs,cutout,cuts,cutter,cutters,cutthroat,cutting,cyanide,cybernetic,cyberspace,cyborg,cycle,cycles,cyclone,cyclops,cyclotron,cylinder,cylinders,cymbal,cynic,cynical,cynicism,cynics,cypher,cypress,cyprus,cyst,cystic,dab,dabble,dabbled,dabbling,dad,daddies,daddy,dads,daffodils,daffy,daft,dag,dagger,dago,dah,dahl,dahlia,dailies,daily,dainty,daiquiri,dairy,dais,daisies,daisy,dale,dalliance,dally,dallying,dalton,dam,damage,damaged,damages,damaging,dame,damn,damnable,damned,damnedest,damning,damp,dampened,dampener,damper,damsel,damsels,dance,danced,dancer,dancers,dances,dancing,dandelion,dandelions,dandruff,dandy,dang,danger,dangerous,dangerously,dangers,dangle,dangled,dangling,danish,daphne,dapper,dare,dared,daredevil,dares,daring,dark,darken,darkened,darker,darkest,darkness,darkroom,darks,darling,darlings,darn,darndest,darned,dart,darts,dash,dashboard,dashed,dasher,dashing,data,database,databases,date,dated,dateless,dateline,dater,dates,dating,dato,daughter,daughters,daunting,dauphin,davenport,davy,dawdling,dawn,dawned,dawning,day,daybreak,daydream,daydreaming,daylight,daylights,days,daytime,daze,dazzle,dazzled,dazzling,de,deacon,dead,deadbeat,deadbeats,deader,deadlier,deadliest,deadline,deadlines,deadlock,deadly,deaf,deafening,deal,dealer,dealers,dealership,dealing,dealings,deals,dealt,dean,dear,dearest,dearie,dearly,dears,death,deathbed,deathly,deaths,deb,debacle,debatable,debate,debated,debates,debating,debilitating,debit,debonair,debrief,debriefed,debriefing,debris,debt,debts,debut,debutante,debutantes,decade,decadence,decadent,decades,decaf,decanter,decapitate,decapitated,decapitation,decay,decaying,deceased,deceit,deceitful,deceive,deceived,deceiving,decency,decent,deception,deceptions,deceptive,deceptively,decibel,decibels,decide,decided,decides,deciding,decipher,deciphering,decision,decisions,decisive,deck,decked,decker,decks,declaration,declare,declared,declaring,decline,declined,declining,deco,decoded,decoder,decompose,decomposed,decomposing,decompress,deconstruction,decontamination,decor,decorate,decorated,decorating,decoration,decorations,decorative,decorator,decorators,decorum,decoupage,decoy,decoys,decreased,decree,decrypted,decryption,dedicate,dedicated,dedicating,dedication,deduce,deduct,deducted,deductible,deduction,deductions,dee,deed,deeded,deeds,deejay,deem,deemed,deep,deeper,deepest,deeply,deer,deets,defaced,defacing,default,defeat,defeated,defeats,defect,defected,defective,defector,defects,defence,defend,defendant,defendants,defended,defender,defending,defense,defenseless,defenses,defensive,deferred,defiance,defiant,defiantly,defibrillator,deficiencies,deficiency,deficient,deficit,defied,defies,define,defined,defining,definite,definitely,definition,definitions,definitive,definitively,deflate,deflated,deflecting,deflection,deflower,deformity,defraud,defrost,deft,deftly,defuse,defused,defy,defying,degenerate,degeneration,degenerative,degradation,degrade,degraded,degrading,degree,degrees,dehydrated,dehydration,deigned,deities,deity,deke,del,delay,delayed,delaying,delays,delectable,delegate,delegates,delegation,delete,deleted,deli,deliberate,deliberately,deliberation,deliberations,delicacy,delicate,delicately,delicates,delicious,delight,delighted,delightful,delightfully,delights,delinquency,delinquent,delirious,delirium,deliver,delivered,deliveries,delivering,delivers,delivery,dell,delly,delta,deltas,delude,deluded,deluding,deluge,delusion,delusional,delusions,deluxe,delve,delving,demand,demanded,demanding,demands,demean,demeaning,demented,dementia,demerits,demise,demo,democracy,democrat,democratic,democrats,demographic,demographics,demolish,demolition,demolitions,demon,demonic,demonology,demons,demonstrate,demonstrated,demonstrates,demonstration,demonstrations,demonstrators,demoted,demur,demure,den,denial,denied,denies,denning,denominational,denominations,denominators,denounce,denouncing,dense,density,dent,dental,dentist,dentists,dents,dentures,deny,denying,deodorant,depart,departed,departing,department,departmental,departments,departure,depend,dependable,dependant,depended,dependency,dependent,depending,depends,depict,depicted,depicting,depiction,depicts,deplete,deplorable,deplore,deploy,deployed,deport,deported,depose,deposed,deposing,deposit,deposited,deposition,depositions,depository,deposits,depot,depraved,deprecating,depress,depressed,depressing,depression,depressor,depressors,deprivation,deprive,deprived,depriving,depth,depths,deputies,deputized,deputy,derail,derailing,deranged,derby,derelict,derision,derivative,derive,derives,dermatologist,derogatory,derrick,derriere,derris,descend,descendants,descending,descends,descent,describe,described,describes,describing,description,descriptive,desecrate,desecrated,desecration,desert,deserted,deserter,deserting,deserts,deserve,deserved,deserves,deserving,design,designate,designated,designation,designed,designer,designers,designing,designs,desirable,desire,desired,desires,desist,desk,desks,desktop,desolate,despair,desperado,desperate,desperately,desperation,despicable,despise,despised,despises,despising,despite,despondent,dessert,desserts,destabilize,destination,destinations,destined,destiny,destitute,destroy,destroyed,destroyer,destroyers,destroying,destroys,destruct,destructing,destruction,destructive,destructs,detach,detached,detachment,detail,detailed,detailing,details,detain,detained,detaining,detect,detected,detecting,detection,detective,detectives,detector,detectors,detente,detention,detergent,deteriorated,deterioration,determination,determine,determined,determines,determining,deterrent,detest,detestable,detests,detonate,detonated,detonates,detonating,detonation,detonator,detonators,detour,detours,detoxing,detract,detrimental,deuce,deuces,devastate,devastated,devastating,devastatingly,devastation,develop,developed,developer,developing,development,developments,develops,deviants,deviated,deviation,device,devices,devil,deviled,devilishly,devils,devious,devise,devised,devon,devote,devoted,devoting,devotion,devour,devoured,devouring,devours,dew,dewars,dewy,dex,dey,diabetes,diabetic,diabetics,diabolical,diagnose,diagnosed,diagnoses,diagnosis,diagnostic,diagnostician,diagonal,diagram,dial,dialect,dialed,dialing,dialogue,dials,dialysis,diameter,diamond,diamonds,diaper,diapers,diaphragm,diaphragms,diaries,diarrhea,diary,diathesis,diatribes,diazepam,dibs,dice,diced,dicey,dickens,dictate,dictated,dictates,dictating,dictation,dictator,dictators,dictatorship,dictatorships,dictionary,did,diddling,diddly,die,died,dies,diesel,diet,dieter,dieting,diets,differ,difference,differences,different,differential,differently,differs,difficult,difficulties,difficulty,diffuse,diffusion,dig,digest,digested,digesting,digestion,digestive,digger,diggers,digging,digit,digital,digitally,digits,dignified,dignify,dignitaries,dignity,digress,digs,dike,dilated,dilation,dilemma,dilettante,diligence,diligent,diligently,dill,dills,dilly,dilute,diluted,dim,dime,dimension,dimensional,dimensions,dimes,diminish,diminished,diminutive,dimly,dimmer,dimming,dimple,dimpled,dimwit,dine,dined,diner,dinero,diners,ding,dinged,dingle,dingo,dings,dingy,dining,dink,dinks,dinky,dinner,dinners,dinnertime,dinning,dinosaur,dinosaurs,dioxide,dip,diphtheria,diploma,diplomas,diplomatic,dipped,dipping,dips,dipstick,dire,direct,directed,directing,direction,directions,directive,directly,director,directorate,directors,directory,dirk,dirt,dirtiest,dirty,dis,disabilities,disability,disable,disabled,disabling,disadvantage,disagree,disagreed,disagreeing,disagreement,disagreements,disagrees,disallowed,disappear,disappearance,disappearances,disappeared,disappearing,disappears,disappoint,disappointed,disappointing,disappointment,disappointments,disappoints,disapproval,disapprove,disapproved,disapproves,disapproving,disarm,disarmed,disarming,disarray,disassembled,disaster,disasters,disastrous,disbarred,disbelief,disc,discarded,discerning,discharge,discharged,disciples,disciplinary,discipline,disciplined,disciplines,disciplining,disclose,disclosed,disclosure,disco,discoloration,discolored,discomfort,disconnect,disconnected,discontent,discontinue,discontinued,discord,discotheque,discount,discounting,discounts,discourage,discouraged,discourse,discourteous,discover,discovered,discoveries,discovering,discovers,discovery,discredit,discreet,discreetly,discrepancies,discrepancy,discrete,discretion,discriminate,discriminated,discriminating,discrimination,discs,discus,discuss,discussed,discusses,discussing,discussion,discussions,disdain,disease,diseased,diseases,disfigured,disfiguring,disgrace,disgraced,disgruntled,disguise,disguised,disguises,disgust,disgusted,disgusting,disgustingly,dish,disheartening,dishes,disheveled,dishonest,dishonor,dishonorable,dishwasher,disillusioned,disillusionment,disinfect,disinfectant,disinformation,disingenuous,disinherit,disinherited,disintegrate,disintegrated,disk,disks,dislike,dislocated,dislodge,dislodged,disloyal,dismal,dismantle,dismantled,dismantling,dismay,dismember,dismemberment,dismiss,dismissal,dismissed,dismissing,dismissive,dismount,disobedience,disobey,disobeyed,disobeying,disorder,disorders,disorganized,disorientation,disoriented,disorienting,disown,disowned,disparaging,disparity,dispatch,dispatched,dispatcher,dispatches,dispensary,dispensation,dispense,dispensed,dispenser,dispensers,dispensing,disperse,displace,displaced,displacement,display,displayed,displaying,displays,displeased,displeasure,disposable,disposal,dispose,disposing,disposition,disproportionate,disprove,dispute,disputes,disputing,disqualified,disqualify,disregard,disregarded,disrespect,disrespected,disrespectful,disrespecting,disrupt,disrupted,disrupting,disruptions,disruptive,disrupts,dissatisfied,dissect,dissected,dissection,dissed,dissent,dissertation,disservice,dissident,dissing,dissipate,dissipated,dissolve,dissolved,dissolves,dissolving,dissuade,distance,distances,distancing,distant,distaste,distasteful,distended,distillery,distinct,distinction,distinctions,distinctive,distinctly,distinguish,distinguished,distinguishing,distort,distorted,distortion,distortions,distract,distracted,distracting,distraction,distractions,distraught,distress,distressed,distressing,distribute,distributed,distributing,distribution,distributor,distributors,district,districts,distrust,distrustful,disturb,disturbance,disturbances,disturbed,disturbing,dit,ditch,ditched,ditches,ditching,ditsy,ditto,ditty,ditz,diuretic,diuretics,diva,divas,dive,diver,divers,diversion,diversionary,diversions,divert,diverted,diverting,dives,divest,divide,divided,dividends,divine,diving,divining,divinity,division,divisional,divisions,divorce,divorced,divorcee,divorces,divorcing,divulged,divvy,dizziness,dizzy,dizzying,do,dobbin,dobbins,dobson,doc,dock,docked,docking,docks,docs,doctor,doctored,doctors,doctrine,document,documentaries,documentary,documented,documents,dodge,dodgeball,dodged,dodger,dodgers,dodging,dodgy,doe,doer,does,dog,dogged,doggie,doggies,doggone,doggy,doghouse,dogs,dogwood,doily,doing,dojo,dol,dolce,dole,doling,doll,dollar,dollars,dolled,dollhouse,dollop,dolls,dolly,dolphin,dolphins,dolt,dom,domain,dome,domes,domestic,domesticated,domesticity,domicile,dominance,dominant,dominate,dominates,dominating,domination,dominick,dominion,dominoes,don,donate,donated,donating,donation,donations,done,dong,dongs,donkey,donkeys,donna,donor,donors,donut,donuts,doodle,doodles,doofus,doohickey,doom,doomed,doomsday,door,doorbell,doorknob,doorknobs,doorman,doormat,doornail,doors,doorstep,doorway,doozy,dopamine,dope,dopes,dopey,doping,doppelganger,dorado,dork,dorky,dorm,dormant,dormitory,dorms,dorsal,dory,dos,dosage,dosages,dose,dosed,doses,dossier,dost,dot,dote,dotes,doth,dots,dotted,dotty,double,doubled,doubles,doubly,doubt,doubted,doubtful,doubting,doubts,dough,doughnut,doughnuts,dour,doused,dove,doves,dowager,dowdy,down,downed,downer,downfall,downgraded,downhill,downing,download,downloaded,downpour,downright,downriver,downs,downside,downsize,downsizing,downstairs,downstream,downtown,downtrodden,downward,downy,dowser,doze,dozed,dozen,dozens,dozer,dozing,drab,draft,drafted,drafting,drafts,drafty,drag,dragged,dragging,dragon,dragonfly,dragons,dragoons,drags,drain,drainage,drained,draining,drainpipe,drake,drama,dramas,dramatic,dramatically,drank,drape,draped,drapes,drastic,drastically,draw,drawback,drawer,drawers,drawing,drawings,drawn,draws,drawstring,dread,dreaded,dreadful,dreading,dream,dreamed,dreamer,dreamers,dreaming,dreamless,dreams,dreamt,dreamy,dreary,dreck,dredge,dredged,dredging,dregs,dreidel,dress,dressed,dresser,dresses,dressing,dressings,dressy,drew,dribble,dribbles,dribbling,dried,drier,dries,drift,drifted,drifter,drifting,driftwood,drill,drilled,drilling,drink,drinker,drinkers,drinking,drinks,drip,dripped,dripping,drippy,drips,drive,drivel,driven,driver,drivers,drives,driveway,driveways,driving,droll,drone,drones,drool,drooled,drooling,drools,droop,droopy,drop,dropout,dropped,dropper,dropping,droppings,drops,drosophila,drought,drove,droves,drown,drowned,drowning,drowsy,drudge,drug,drugged,drugging,druggist,drugs,drugstore,drum,drummed,drummer,drumming,drums,drumstick,drumsticks,drunk,drunkard,drunken,drunks,druthers,dry,dryer,dryers,drying,drywall,dual,duality,dub,dubbed,dubious,duce,duchess,duck,ducked,ducking,duckling,ducks,ducky,duct,ducts,dud,dude,dudes,duds,due,duel,dueling,dues,duet,duff,duffel,duffle,dug,dugout,dui,duke,dukes,dulcet,dull,dullard,dulled,dullest,dullness,duly,dumb,dumber,dumbest,dumdum,dummies,dummkopf,dummy,dump,dumped,dumper,dumping,dumpling,dumplings,dumps,dun,dunes,dung,dungeon,dunk,dunked,dunking,dunks,duo,duped,duplex,duplicate,durable,duration,duress,during,durned,dusk,dusky,dust,dusted,dusting,dusty,dutch,duties,dutiful,duty,duvet,dwarf,dwarfs,dwarves,dweeb,dwell,dweller,dwellers,dwelling,dwells,dwindling,dye,dyed,dyeing,dyer,dying,dynamic,dynamics,dynamite,dynamo,dynasty,dysentery,dysfunction,dysfunctional,each,eager,eagerly,eagle,eagles,ear,earful,earl,earlier,earliest,earlobe,earlobes,early,earmarked,earmarks,earn,earned,earnest,earning,earns,earphones,earpiece,earplugs,earring,earrings,ears,earshot,earth,earthlings,earthly,earthquake,earthquakes,earthy,earwig,ease,easel,eases,easier,easiest,easily,easing,east,eastbound,easter,eastern,easy,easygoing,eat,eaten,eater,eaters,eating,eats,eaves,eavesdrop,eavesdropped,eavesdropping,ebb,ebony,eccentric,eccentricities,echelon,echo,echoes,eclectic,eclipse,ecological,economic,economical,economically,economics,economy,ecosystem,ecstasy,ecstatic,ectopic,ectoplasm,ecumenical,eczema,ed,eddy,edema,edge,edged,edges,edgewise,edging,edgy,edible,edict,edit,edited,editing,edition,editor,editorial,editorials,editors,edits,educate,educated,educating,education,educational,educator,educators,eel,eels,eerie,eerily,effacing,effect,effected,effecting,effective,effectively,effectiveness,effects,effeminate,efficiency,efficient,efficiently,effort,effortless,efforts,egg,egghead,egging,eggnog,eggplant,eggs,eggshell,eggshells,ego,egocentric,egomaniac,egomaniacal,egos,egotistical,egregious,egyptian,eh,eight,eighteen,eighteenth,eighth,eighties,eights,eighty,einstein,either,ejaculate,eject,ejection,eking,el,elaborate,elapsed,elastic,elated,elbow,elbows,elder,elderly,elders,eldest,elect,elected,electing,election,elections,elective,electoral,electorate,electric,electrical,electrician,electricians,electricity,electrified,electro,electrocute,electrocuted,electrodes,electrolyte,electrolytes,electronic,electronically,electronics,electrons,electroshock,elegance,elegant,element,elemental,elementary,elements,elephant,elephants,elevate,elevated,elevates,elevation,elevator,elevators,eleven,eleventh,elf,elicit,eligible,eliminate,eliminated,eliminates,eliminating,elimination,elite,elitist,elixir,elk,elks,ell,elliptical,elm,elms,elope,eloped,eloping,eloquence,eloquent,eloquently,else,elsewhere,elude,eluded,eludes,elusive,elves,em,emanates,emancipated,emancipation,embalmed,embalming,embankment,embark,embarked,embarking,embarrass,embarrassed,embarrasses,embarrassing,embarrassingly,embarrassment,embassies,embassy,embedded,embellished,ember,embezzle,embezzled,embezzlement,embezzler,embezzling,embittered,emblem,embodied,embody,embossed,embrace,embraced,embraces,embracing,embroidered,embroiled,embryo,emerald,emeralds,emerge,emerged,emergencies,emergency,emerges,emery,eminence,eminent,eminently,emission,emissions,emit,emotion,emotional,emotionally,emotions,empathic,empathy,emperor,emperors,emphasis,emphasize,emphasized,emphatic,emphatically,empire,empirical,employ,employed,employee,employees,employer,employers,employing,employment,emporium,empowered,empowerment,empress,emptied,empties,emptiness,empty,emptying,ems,emulating,en,enable,enabled,enables,enabling,enact,enacted,enamel,enamored,encephalitis,enchant,enchanted,enchanting,enchantment,encinas,enclosed,encoded,encore,encounter,encountered,encounters,encourage,encouraged,encouragement,encourages,encouraging,encrusted,encrypted,encryption,encyclopedia,encyclopedias,end,endanger,endangered,endangering,endangerment,endear,endearing,endeavor,endeavors,endeavour,ended,ending,endings,endive,endless,endlessly,endorphins,endorse,endorsement,endorsements,endorsing,endowment,ends,endurance,endure,endured,enduring,enema,enemies,enemy,energized,energy,enforce,enforced,enforcement,enforcer,enforcing,eng,engage,engaged,engagement,engagements,engages,engaging,engine,engineer,engineered,engineering,engineers,engines,english,engraved,engraving,engrossed,engrossing,engulfed,enhance,enhanced,enhancements,enhancer,enhances,enigma,enjoy,enjoyable,enjoyed,enjoying,enjoyment,enjoys,enlarged,enlighten,enlightened,enlightenment,enlist,enlisted,enlisting,ennui,enormity,enormous,enormously,enough,enquiries,enrage,enraged,enrich,enriched,enriching,enrolled,enrolling,enrollment,ensconced,ensemble,ensign,enslave,enslaved,ensue,ensued,ensuing,ensure,ensuring,entail,entails,entanglements,enter,entered,entering,enterprise,enterprises,enterprising,enters,entertain,entertained,entertainer,entertaining,entertainment,enthralled,enthused,enthusiasm,enthusiast,enthusiastic,entice,enticed,entire,entirely,entirety,entities,entitle,entitled,entitles,entity,entomologist,entomology,entourage,entrails,entrance,entrances,entrapment,entree,entrenched,entrepreneur,entrepreneurial,entries,entrust,entrusted,entry,entwined,envelope,envelopes,envied,envious,environment,environmental,environmentalist,envision,envisioned,envoy,envy,enzyme,ephemeral,epic,epidemic,epidural,epilepsy,epileptic,epinephrine,epiphany,episode,episodes,epizootics,epoxy,epsilon,equal,equality,equally,equals,equation,equator,equestrian,equilibrium,equinox,equipment,equipped,equitable,equity,equivalent,er,era,erase,erased,eraser,erasers,erases,erasing,ere,erect,erection,ergo,erica,erode,eroding,erogenous,eros,erosion,erotic,err,errand,errands,errant,erratic,erratically,erred,erroneous,error,errors,ers,erupt,eruption,es,escalated,escalating,escalator,escapade,escapades,escape,escaped,escapee,escapes,escaping,escargot,escort,escorted,escorts,esophagus,esoteric,especially,espionage,espresso,espressos,ess,essay,essays,essence,essential,essentially,establish,established,establishes,establishing,establishment,establishments,estate,esteem,esteemed,ester,estimate,estimated,estimates,estimating,estimation,estranged,estrogen,et,eta,etcetera,etched,eternal,eternally,eternity,eth,ethanol,ether,ethic,ethical,ethically,ethics,ethnic,ethnicity,ethyl,etiquette,eucalyptus,eugenia,eulogy,eunuch,euphemism,euphemisms,euphoric,euro,euros,euthanasia,evacuate,evacuated,evacuating,evacuation,evade,evaluate,evaluated,evaluating,evaluation,evaluations,evangelical,evaporate,evaporated,evasions,evasive,eve,even,evening,evenings,evenly,evens,event,eventful,events,eventual,eventuality,eventually,ever,everglades,everlasting,every,everybody,everyday,everyman,everyone,everything,everywhere,eves,evict,evicted,evidence,evidenced,evident,evidently,evil,evils,eviscerate,eviscerated,evocative,evoked,evolution,evolutionary,evolve,evolved,evolving,ewe,ex,exacerbate,exact,exacting,exactly,exaggerate,exaggerated,exaggerating,exaggeration,exalted,exam,examination,examine,examined,examiner,examining,example,examples,exams,exasperated,exasperating,exceed,exceeded,exceeding,exceedingly,exceeds,excel,excellence,excellency,excellent,excels,except,excepted,exception,exceptional,exceptionally,exceptions,excess,excesses,excessive,excessively,exchange,exchanged,exchanges,exchanging,excite,excited,excitement,excites,exciting,exclamation,excluded,excludes,excluding,exclusion,exclusive,exclusively,excommunicated,excruciating,exculpatory,excursion,excursions,excuse,excused,excuses,excusing,execs,execute,executed,executing,execution,executioner,executions,executive,executives,executor,exemplary,exempt,exemption,exemptions,exercise,exercises,exercising,exerting,exertion,exes,exfoliate,exhale,exhaust,exhausted,exhausting,exhaustion,exhaustive,exhausts,exhibit,exhibited,exhibiting,exhibition,exhibits,exhilarated,exhilarating,exhilaration,exhumation,exhume,exhumed,exigent,exile,exiled,exiles,exist,existed,existence,existent,existential,existentialist,existing,exists,exit,exited,exiting,exits,exodus,exonerate,exonerated,exorbitant,exorcise,exorcism,exorcist,exotic,expand,expanded,expanding,expands,expansion,expect,expectancy,expectation,expectations,expected,expecting,expects,expedient,expedite,expedited,expedition,expel,expelled,expelling,expendable,expenditure,expenditures,expense,expenses,expensive,experience,experienced,experiences,experiencing,experiment,experimental,experimentation,experimented,experimenting,experiments,expert,expertise,experts,expiration,expired,expires,explain,explained,explaining,explains,explanation,explanations,explanatory,explicit,explicitly,explode,exploded,explodes,exploding,exploit,exploitation,exploited,exploiting,exploration,exploratory,explore,explored,explorer,explorers,exploring,explosion,explosions,explosive,explosives,exponential,export,exporter,exporting,exports,expose,exposed,exposes,exposing,exposure,express,expressed,expresses,expressing,expression,expressions,expressive,expressly,expulsion,expunged,exquisite,exquisitely,extend,extended,extending,extension,extensions,extensive,extent,extenuating,exterior,exterminate,exterminated,exterminating,extermination,exterminator,external,extinct,extinction,extinguish,extinguished,extinguisher,extort,extorted,extorting,extortion,extortionist,extra,extract,extracted,extracting,extraction,extracts,extracurricular,extracurriculars,extradite,extradited,extradition,extramarital,extraordinaire,extraordinarily,extraordinary,extras,extraterrestrial,extraterrestrials,extravagant,extravaganza,extreme,extremely,extremes,extremism,extremists,extremities,extricate,eye,eyeball,eyeballs,eyebrow,eyebrows,eyed,eyeful,eyeing,eyelash,eyelashes,eyelids,eyeliner,eyes,eyesight,eyesore,eyewitness,eyewitnesses,eyre,fa,fable,fabled,fabric,fabricate,fabricated,fabrication,fabulous,fabulously,face,faced,facedown,faceless,faces,facet,facetious,facets,facial,facials,facilitate,facilitated,facilities,facility,facing,fact,factions,factoid,factor,factories,factoring,factors,factory,facts,factual,faculties,faculty,fad,fade,faded,fades,fading,fads,fail,failed,failing,failings,fails,failure,failures,faint,fainted,fainter,faintest,fainting,fair,fairer,fairest,fairgrounds,fairies,fairly,fairness,fairway,fairy,faith,faithful,faithfulness,faithless,fajita,fake,faked,faking,falafel,falcon,fall,fallacy,fallback,fallen,fallible,falling,fallout,fallow,falls,false,falsely,falsify,faltered,fame,famed,familial,familiar,familiarize,familiars,families,family,famine,famished,famous,famously,fan,fanatic,fanaticism,fanatics,fancied,fanciful,fancy,fanfare,fang,fangs,fanning,fanny,fans,fantabulous,fantasies,fantasize,fantasized,fantasizing,fantastic,fantastically,fantasy,fantasyland,far,faraway,farce,fare,fared,farewell,farewells,farina,farm,farmer,farmers,farmhouse,farmland,farms,farrow,fart,farted,farther,farthing,farts,fascinate,fascinated,fascinating,fascination,fascism,fascist,fashion,fashionably,fashioned,fashions,fast,fastball,fasten,fastened,faster,fastest,fasting,fat,fatal,fatalities,fatality,fate,fates,father,fathered,fatherhood,fathering,fatherly,fathers,fathom,fatigue,fatigues,fatso,fatten,fattening,fattest,fatty,faucet,faucets,fault,faults,faulty,faun,fauna,faux,fave,favor,favorable,favorably,favored,favorite,favorites,favoritism,favors,favour,favours,fawning,fax,faxed,faxes,fay,fear,feared,fearful,fearing,fearless,fears,fearsome,feasible,feast,feasting,feat,feather,feathering,feathers,feats,feature,featured,features,featuring,feces,feckless,fed,federal,federation,fedora,feds,fee,feeble,feed,feedback,feeder,feeders,feeding,feeds,feel,feelers,feeling,feelings,feels,fees,feet,feign,feisty,felicity,feline,fell,fella,fellah,fellahs,fellas,fellatio,feller,fellers,felling,fellow,fellows,fellowship,felon,felonies,felonious,felons,felony,felt,female,females,feminine,feminist,feminists,femme,femmes,femur,fen,fence,fences,fencing,fend,fender,fenders,fending,fer,ferment,fern,ferret,ferrets,ferry,fertile,fertility,fertilization,fertilize,fertilizer,fervent,fervor,fess,fester,festering,festival,festivals,festive,festivities,feta,fetal,fetch,fetched,fetish,fetus,fetuses,feud,feudal,feuds,fever,feverish,few,fewer,fey,fez,fiance,fiancee,fiasco,fib,fibber,fibbing,fiber,fiberglass,fibers,fibre,fibrosis,fickle,fiction,fictional,ficus,fiddle,fiddler,fiddling,fidelity,fido,fiefdom,field,fielder,fielding,fields,fieldstone,fiend,fiendish,fiends,fierce,fiercest,fiery,fiesta,fife,fifteen,fifteenth,fifth,fifths,fifties,fiftieth,fifty,fig,fight,fighter,fighters,fighting,fights,figment,figurative,figuratively,figure,figured,figurehead,figures,figurines,figuring,file,filed,files,filet,filibuster,filing,fill,filled,filler,fillets,filling,fillings,fills,filly,film,filmed,filming,filmmaker,filmmakers,filmmaking,films,filter,filtered,filters,filth,filthy,filtration,fin,finagle,final,finale,finalist,finalists,finality,finalize,finalized,finalizing,finally,finals,finance,financed,finances,financial,financially,financing,finch,find,finder,finders,finding,findings,finds,fine,fined,finer,fines,finesse,finessed,finest,finger,fingered,fingernail,fingernails,fingerprint,fingerprints,fingers,fingertips,finish,finished,finishes,finishing,fink,fins,fir,fire,firearms,fireball,fireballs,firebird,firebug,firecracker,firecrackers,fired,firefight,firefighters,fireflies,firehouse,firelight,fireman,firemen,fireplace,fireplaces,firepower,fireproof,fires,firestorm,firewater,firewood,fireworks,firing,firm,firmer,firmly,firms,firs,first,firstborn,firsthand,firstly,fish,fished,fisher,fisherman,fishermen,fishes,fishing,fishnet,fishy,fission,fist,fisted,fistfight,fistful,fists,fit,fitch,fitness,fits,fitted,fitter,fittest,fitting,fittings,five,fiver,fives,fix,fixable,fixated,fixating,fixation,fixed,fixer,fixes,fixing,fixings,fixture,fixtures,fizz,fizzle,fizzled,flack,flag,flagged,flagging,flags,flagship,flail,flailing,flair,flak,flake,flaked,flakes,flaky,flame,flamenco,flames,flaming,flammable,flan,flank,flanking,flanks,flannel,flap,flapjacks,flapped,flapping,flaps,flare,flared,flares,flaring,flash,flashback,flashbacks,flashed,flashes,flashing,flashlight,flashlights,flashy,flask,flat,flatbed,flats,flattened,flatter,flattered,flatterer,flattering,flatters,flattery,flatulence,flatware,flaunt,flaunting,flavor,flavored,flavors,flavour,flavours,flaw,flawed,flawless,flawlessly,flaws,flay,flayed,flea,fleabag,fleas,flecks,fled,fledged,fledgling,flee,fleece,fleeing,fleet,fleeting,flesh,fleshy,fletcher,flew,flex,flexibility,flexible,flexing,flick,flicked,flicker,flickering,flicking,flicks,flier,fliers,flies,flight,flights,flighty,flimsy,flinch,flinching,fling,flinging,flint,flip,flipped,flipper,flippers,flipping,flips,flirt,flirtatious,flirted,flirting,float,floated,floater,floating,floats,flock,floe,flog,flogged,flogging,flood,flooded,floodgates,flooding,floods,floor,floorboard,floorboards,floored,floors,floozy,flop,flopped,flopping,floppy,flops,flora,floral,florence,florin,florist,florists,floss,flossing,flotation,flounder,floundering,flour,flourished,flow,flower,flowering,flowers,flowing,flown,flows,flu,flue,fluff,fluffed,fluffing,fluffy,fluid,fluids,fluke,flung,flunk,flunked,flunkies,flunking,flunky,fluorescent,flurries,flurry,flush,flushed,flustered,flute,flutes,fluttering,flux,fly,flyboy,flyer,flyers,flying,foal,foam,foaming,foamy,fob,focal,focus,focused,focuses,focusing,focussed,focussing,fodder,foe,fog,fogged,foggiest,foibles,foil,foiled,foisting,fold,folded,folder,folding,folds,foliage,folk,folklore,folks,folksy,follicle,follow,followed,follower,followers,following,follows,folly,fond,fonder,fondest,fondle,fondled,fondling,fondue,font,food,foods,fool,fooled,foolhardy,fooling,foolish,foolishness,foolproof,fools,foot,footage,football,footed,footer,foothold,footing,footman,footnote,footnotes,footprints,footsies,footsteps,footstool,footwear,footwork,fop,for,forage,foraging,foray,forbade,forbid,forbidden,forbidding,forbids,force,forced,forceful,forceps,forces,forcibly,forcing,ford,fore,forearm,foreclosed,foreclosure,forefathers,forego,foregone,forehead,foreign,foreigners,foreman,foremost,forensic,forensics,foreplay,foresaw,foresee,foreseeable,foresight,foreskin,forest,forester,forestry,forests,forethought,foretold,forever,foreword,forfeit,forfeited,forfeits,forgave,forge,forged,forger,forgeries,forgery,forget,forgetful,forgets,forgettable,forgetting,forging,forgive,forgiven,forgiveness,forgives,forgiving,forgo,forgot,forgotten,fork,forked,forklift,forks,form,formal,formaldehyde,formality,formally,format,formation,formations,formed,former,formerly,formidable,forming,forms,formula,formulas,formulate,formulating,fornicating,fornication,forsake,forsaken,forsaking,fort,forte,forth,forthcoming,forthright,forthwith,forties,fortieth,fortified,fortitude,fortnight,fortress,fortuitous,fortunate,fortunately,fortune,fortunes,fortuneteller,forty,forum,forward,forwarded,forwarding,fosse,fossil,fossilized,foster,fostered,fought,foul,fouled,found,foundation,foundations,founded,founder,founding,fountain,fountainhead,fountains,four,fours,foursome,fourteen,fourteenth,fourth,fowl,fowler,fox,foxes,foxhole,foxholes,foxy,foyer,fraction,fractions,fracture,fractured,fractures,fragile,fragment,fragments,fragrance,frail,frailty,frame,framed,framers,frames,framework,framing,franc,franchise,franchises,francs,frank,frankfurter,franklin,frankly,franks,frantic,frantically,frat,fraternal,fraternities,fraternity,fraternization,fraternizing,fraud,frauds,fraught,fraulein,fray,frayed,frazzled,freak,freaked,freaking,freakish,freaks,freaky,freckle,freckling,free,freebie,freed,freedman,freedom,freedoms,freeing,freelance,freelancer,freelancing,freeloader,freeloading,freely,freeman,freer,frees,freeway,freeways,freeze,freezer,freezers,freezes,freezing,freight,freighter,french,frenzy,frequencies,frequency,frequent,frequently,fresh,freshen,freshener,freshening,fresher,freshest,freshly,freshman,freshmen,freshness,freshwater,fret,fretting,friction,fridge,fried,friend,friendless,friendlier,friendliest,friendly,friends,friendship,friendships,fries,frigate,frigging,fright,frighten,frightened,frightening,frightens,frightful,frightfully,frigid,frills,fringe,fringes,frisk,frisky,fritter,fritters,fritz,frivolous,frizzy,fro,frog,frogs,frolic,from,front,frontal,frontier,frontiers,fronting,fronts,frost,frostbite,frosting,frosty,froufrou,frown,froze,frozen,frugal,fruit,fruitcake,fruitful,fruition,fruitless,fruits,fruity,frustrated,frustrates,frustrating,frustration,fry,fryer,frying,fuchsia,fudge,fudged,fudging,fuel,fueled,fueling,fuels,fugitive,fugitives,fugu,fugue,fuhrer,fulcrum,fulfil,fulfill,fulfilled,fulfilling,fulfillment,full,fuller,fullest,fully,fumble,fumbling,fumes,fumigated,fumigating,fumigation,fun,function,functional,functioning,functions,fund,fundamental,fundamentalist,fundamentally,funded,funding,fundraiser,fundraisers,fundraising,funds,funeral,funerals,fungal,fungi,fungus,funk,funky,funnier,funnies,funniest,funny,fur,furious,furiously,furlong,furnace,furnished,furniture,furrowed,furry,furs,further,furthering,furthermore,furthest,furtive,fury,fuse,fused,fuses,fusilli,fusion,fuss,fussing,fussy,futile,futility,futon,future,futures,futuristic,fuzz,fuzzy,gabbing,gabby,gaby,gadget,gaff,gag,gaga,gage,gagged,gagging,gaggle,gags,gaiety,gain,gained,gainful,gainfully,gaining,gains,gal,gala,galactic,galaxy,gale,galilee,gall,gallant,gallbladder,galleries,gallery,galling,gallivanting,gallon,gallons,galloping,gallows,galore,galoshes,gals,galvanized,gambit,gamble,gambler,gambling,game,gamer,games,gamma,gammy,gams,gamut,gander,gang,ganged,ganging,gangland,gangly,gangrene,gangs,gangster,gangsters,gangway,ganja,gap,gaps,gar,garage,garages,garb,garbage,garcon,garden,gardener,gardeners,gardenias,gardening,gardens,gargantuan,gargling,gargoyle,gargoyles,garibaldi,garish,garland,garlic,garment,garments,garner,garnet,garnish,garrison,garter,garters,garth,gas,gasbag,gases,gasket,gaslight,gasoline,gasp,gasped,gasping,gassed,gasses,gassy,gastric,gat,gate,gated,gatehouse,gatekeeper,gates,gateway,gather,gathered,gathering,gatherings,gathers,gator,gaudy,gauge,gauging,gault,gauntlet,gauze,gave,gavel,gawk,gawking,gaze,gazebo,gazed,gazelle,gazpacho,gear,geared,gearing,gears,gearshift,gecko,ged,gee,geek,geeks,geeky,gees,geese,geez,geezer,geezers,geishas,gel,gelatin,gelato,gels,gem,gemma,gems,gen,gendarme,gender,gene,genealogy,general,generally,generals,generate,generated,generates,generating,generation,generations,generator,generators,generic,generosity,generous,generously,genes,genesis,genetic,genetically,geneticist,genetics,geneva,genie,genital,genitals,genius,geniuses,genoa,genome,genre,gentle,gentleman,gentlemanly,gentlemen,gentler,gently,gents,genuine,genuinely,genus,geographic,geographical,geographically,geography,geological,geologist,geologists,geology,geometric,geometry,geopolitical,geosynchronous,gerbil,gerbils,geriatric,geriatrics,germ,german,germans,germs,gestapo,gestating,gestation,gesture,gestures,get,getaway,gets,getter,getting,getup,geyser,ghastly,ghetto,ghettos,ghost,ghosts,ghoul,ghoulish,giant,giants,gib,gibberish,gibbons,giblets,gibson,giddy,giddyup,gift,gifted,gifts,gig,gigantic,giggle,giggles,giggling,giggly,gigolo,gigs,gilbert,gilded,gill,gills,gimbal,gimlet,gimme,gimmicks,gimmie,gimp,gin,ginger,gingerbread,ginny,ginseng,giraffe,giraffes,girl,girlfriend,girlfriends,girlie,girls,girly,girth,gist,git,give,giveaway,given,givens,giver,givers,gives,giving,gizmo,gizmos,gizzard,glaciers,glad,glade,glades,gladiator,gladiators,gladly,gladstone,glamor,glamorous,glamour,glance,glanced,glances,gland,glands,glare,glares,glaring,glass,glasses,glassware,glassy,glaucoma,glaze,glazed,glazer,gleam,gleaming,glee,glen,glengarry,glib,glide,gliders,gliding,glimmer,glimpse,glimpsed,glint,glitch,glitches,glitter,glittering,gloat,gloating,global,globe,globes,gloom,gloomy,glop,gloria,glorified,glorious,gloriously,glory,gloss,glossy,glove,glover,gloves,glow,glowing,glucose,glue,glued,glues,gluing,glum,glutton,gluttony,glycerin,gnat,gnats,gnaw,gnawing,gnome,gnomes,go,goa,goad,goading,goal,goalie,goals,goat,goatee,goats,gob,gobble,gobbledegook,gobbledygook,gobbles,goblet,goblin,goblins,gobs,god,goddaughter,goddess,goddesses,godfather,godforsaken,godlike,godliness,godly,godmother,gods,godsend,godson,goers,goes,gofer,goggle,goggles,going,goiter,gold,golden,goldenrod,goldfish,golf,golfers,golfing,golly,gondola,gone,goner,goners,gong,gonorrhea,gonzo,goo,goober,goobers,good,goodbye,goodbyes,goodie,goodies,goodly,goodman,goodness,goods,goodwill,goody,gooey,goof,goofball,goofing,goofy,googly,goon,gooney,goonie,goonies,goons,goopy,goose,gopher,gor,gore,gorge,gorgeous,gorilla,gory,gosh,gospel,gossip,gossiping,gossips,got,gothic,gotten,gouge,gouged,gouging,goulash,gourd,gourmet,gout,governed,governess,governing,government,governmental,governments,governor,gown,gowns,grab,grabbed,grabbing,grabby,grabs,grace,graced,graceful,gracefully,graces,gracing,gracious,graciously,grad,grade,graded,grader,graders,grades,grading,gradually,graduate,graduated,graduates,graduating,graduation,graffiti,grafts,graham,grail,grain,grainy,gram,grammar,gramps,grams,gran,grand,grandad,grandbaby,grandchild,grandchildren,granddad,granddaddy,granddaughter,granddaughters,grander,grandeur,grandfather,grandfathers,grandkid,grandkids,grandma,grandmother,grandmothers,grandpa,grandparent,grandparents,grandson,grandstand,grange,granger,granite,granny,granola,grant,granted,granting,grants,grape,grapefruit,grapes,grapevine,graphic,graphics,graphite,graphs,grasp,grasped,grasping,grass,grasshopper,grassy,grate,grated,grateful,gratification,gratified,gratifying,gratitude,gratuitous,gratuity,grave,gravel,gravely,graves,gravest,graveyard,graveyards,gravity,gravy,gray,grazed,grazing,grease,greaseball,greased,greasing,greasy,great,greater,greatest,greatly,greatness,greed,greedy,greek,green,greener,greenhouse,greens,greet,greeting,greetings,greets,gremlin,grenade,grenades,grew,grey,greyhound,grid,griddle,gridlock,grief,grievance,grieve,grieves,grieving,griff,griffin,griffins,grift,grifters,grill,grilled,grilling,grills,grim,grime,grimes,grin,grind,grinder,grinding,grinds,grindstone,grinning,grins,grip,gripe,gripes,griping,gripping,grips,grisly,gristle,grits,grizzly,groaning,grocer,groceries,grocery,grog,groggy,groin,groom,groomed,groomer,grooming,grooms,groomsmen,groove,groovy,grope,groped,groping,gross,grossed,grosser,grosses,grossing,grossly,grotesque,grouch,grouchy,ground,groundbreaking,grounded,groundhog,grounding,groundless,grounds,groundskeeper,groundwork,group,groupie,groupies,grouping,groups,grouse,grove,grovel,groveling,groves,grow,growers,growing,growl,grown,grownup,grownups,grows,growth,grub,grubbing,grubby,grubs,grudge,grudges,grudging,gruel,gruesome,grumbling,grumpy,grunge,grungy,grunt,guacamole,guarantee,guaranteed,guaranteeing,guarantees,guard,guarded,guardian,guardians,guardianship,guarding,guardrail,guards,guava,guerilla,guerillas,guerrilla,guerrillas,guess,guessed,guesses,guessing,guest,guesthouse,guests,guff,guidance,guide,guided,guidelines,guides,guiding,guild,guilder,guillotine,guilt,guilty,guinea,guineas,guitar,guitarist,guitars,gulag,gulch,gulf,gull,gullible,gum,gumption,gums,gumshoe,gun,gunfire,gunk,gunman,gunmen,gunned,gunner,gunnery,gunning,gunpoint,gunpowder,guns,gunshot,gunshots,gurgling,gurney,guru,gush,gusher,gushing,gushy,gusto,gut,gutless,guts,gutter,gutters,gutting,guy,guys,guzzling,gym,gyms,gynecologist,gypped,gypsies,gypsy,ha,haberdashery,habit,habitat,habits,hack,hacked,hacker,hackers,hacking,hackman,hacks,hacksaw,had,hades,hadj,hag,haggis,haggling,hags,hah,haha,hail,hailed,hailing,hails,hair,hairball,hairbrush,haircut,haircuts,hairdo,hairdresser,hairdressers,haired,hairless,hairline,hairnet,hairpin,hairs,hairstyles,hairy,hale,half,halfback,halfway,halibut,halitosis,hall,hallelujah,hallmark,hallo,hallowed,halls,hallucinate,hallucinating,hallucination,hallucinations,hallucinogen,hallway,hallways,halo,halothane,halt,halter,halves,ham,hamburg,hamburger,hamburgers,hamlet,hammer,hammered,hammering,hammers,hammock,hamper,hams,hamster,hamsters,hamstring,hand,handbag,handbags,handball,handbasket,handbook,handcuffed,handcuffs,handed,handful,handgun,handguns,handheld,handicap,handicapped,handing,handkerchief,handle,handled,handler,handles,handling,handoff,handout,handouts,handprint,handrail,hands,handshake,handsome,handsomely,handsomest,handstand,handwriting,handy,handyman,hang,hanged,hanger,hangers,hanging,hangman,hangnail,hangout,hangouts,hangover,hangovers,hangs,hank,hankering,hankie,hanks,hanky,hansom,hap,happen,happened,happening,happenings,happens,happier,happiest,happily,happiness,happy,haps,harass,harassed,harassing,harassment,harbor,harboring,harbors,harbour,harbouring,hard,hardball,harden,hardened,hardens,harder,hardest,hardly,hardship,hardware,hardwired,hardworking,hardy,hare,harem,harlot,harm,harmed,harmful,harming,harmless,harmonious,harmony,harms,harness,harnessed,harp,harper,harpies,harping,harpoons,harps,harpy,harridan,harriers,harrowing,harry,harsh,harshly,hart,harts,harvest,has,hash,hassle,hassled,hassling,hast,haste,hasten,hastened,hastily,hasty,hat,hatch,hatchback,hatched,hatches,hatchet,hate,hated,hateful,hater,hates,hath,hating,hatred,hats,hatter,haughty,haul,hauled,hauling,hauls,haunt,haunted,haunting,haunts,haute,have,haven,having,havoc,haw,hawk,hawker,hawking,hawks,hay,haycock,hayloft,hayseed,haystack,hayward,haywire,hazard,hazardous,hazards,haze,hazel,hazelnut,hazing,hazy,he,head,headache,headaches,headband,headboard,headdress,headed,header,headfirst,headgear,headhunter,heading,headless,headlights,headline,headliner,headlines,headlining,headlock,headlong,headmaster,headphones,headpiece,headquarters,heads,headset,headsets,headstone,headway,heady,heal,healed,healer,healers,healing,heals,health,healthier,healthiest,healthy,heap,heaped,heaping,heaps,hear,heard,hearing,hearings,hears,hearsay,hearse,heart,heartache,heartbeat,heartbeats,heartbreak,heartbreaker,heartbreaking,heartbroken,heartburn,hearted,heartfelt,heartland,heartless,hearts,heartsick,heartthrob,heartwarming,hearty,heat,heated,heater,heath,heathen,heathens,heather,heating,heats,heave,heaved,heaven,heavenly,heavens,heavier,heaviest,heavily,heaving,heavy,heavyset,heavyweight,heck,heckle,heckled,heckles,heckling,hectic,hector,hedge,hedges,hedging,heed,heel,heeled,heels,heft,hefty,heh,heifer,heigh,height,heighten,heightened,heights,heil,heinie,heinous,heir,heiress,heirloom,heirlooms,heirs,heist,held,helicopter,helicopters,helipad,helix,hell,hellbent,heller,hellfire,hellhole,hellhound,hellish,hello,hells,helluva,helm,helmet,helmets,helms,helo,help,helped,helper,helpers,helpful,helping,helpless,helplessly,helplessness,helps,hem,hemisphere,hemline,hemlines,hemlock,hemoglobin,hemolytic,hemorrhaging,hemorrhoid,hemorrhoids,hemp,hen,hence,henchman,henchmen,henhouse,henry,hens,hep,hepatitis,her,herald,herb,herbal,herbs,hercules,herd,herding,herds,here,hereafter,hereby,hereditary,herein,heresy,heritage,hermaphrodite,hernia,hernias,herniated,hero,heroes,heroic,heroics,heroine,herring,hers,herself,hertz,hesitant,hesitate,hesitated,hesitates,hesitating,hesitation,hessian,hetero,heterosexual,hex,hexes,hey,heyday,hi,hiatus,hibernating,hibernation,hibiscus,hic,hiccup,hiccups,hick,hickey,hickory,hicks,hid,hidden,hide,hideaway,hideous,hideously,hideout,hides,hiding,hierarchy,hieroglyphs,high,highball,higher,highest,highland,highlands,highlight,highlighted,highlighters,highlights,highly,highness,highs,hightail,highway,highways,hijacking,hijinks,hike,hiked,hiker,hikers,hiking,hilarious,hilarity,hill,hillbillies,hillbilly,hillcrest,hills,hillside,hilltop,hilt,him,himself,hind,hindrance,hindsight,hinges,hint,hinted,hinting,hints,hip,hippest,hippie,hippies,hippo,hippopotamus,hippos,hips,hire,hired,hires,hiring,his,hiss,hissed,hisself,hisses,hissing,historian,historic,historical,historically,histories,history,hit,hitch,hitched,hitchhike,hitchhiker,hitchhikers,hitchhiking,hitching,hits,hitter,hitters,hitting,hive,hives,hm,hmm,ho,hoagie,hoarding,hoarse,hoax,hoaxes,hob,hobbies,hobbit,hobbits,hobble,hobbling,hobby,hobo,hoboes,hock,hockey,hocking,hocks,hocus,hoe,hoedown,hoes,hog,hogan,hogging,hogs,hogwash,hoist,hoisted,hoisting,hokey,hold,holden,holder,holders,holding,holdings,holds,holdup,hole,holed,holes,holiday,holidays,holier,holies,holiest,holiness,holing,holland,hollandaise,holler,hollering,hollers,hollow,hollowed,holly,holocaust,hologram,holographic,holster,holt,holy,homage,hombre,hombres,home,homebody,homeboy,homeboys,homecoming,homegrown,homeless,homely,homemade,homemaker,homeopathic,homeowners,homer,homeroom,homers,homes,homesick,homestead,hometown,homework,homey,homicidal,homicide,homicides,homily,homing,hon,honda,honed,honest,honestly,honesty,honey,honeymoon,honeymooners,honeymooning,honeymoons,honeysuckle,hong,honk,honks,honky,honor,honorable,honorably,honorary,honored,honoring,honors,honour,honourable,honouring,honours,hooch,hood,hooded,hoodlums,hoodwinked,hoof,hook,hooked,hooker,hookers,hooking,hooks,hookup,hooky,hooligan,hooligans,hoop,hoopla,hoops,hooray,hoosegow,hoot,hootenanny,hooter,hooters,hooves,hop,hope,hoped,hopeful,hopefully,hopeless,hopelessly,hopelessness,hopes,hoping,hopped,hopper,hopping,hoppy,hops,hora,horde,hordes,horizon,horizons,horizontal,hormonal,hormone,hormones,horn,horned,hornet,hornets,horns,horny,horoscope,horoscopes,horrendous,horrible,horribly,horrid,horrific,horrified,horrifying,horror,horrors,horse,horseback,horsehair,horseman,horsemen,horsepower,horseradish,horses,horseshit,horsey,horsing,horticulture,hose,hosed,hoses,hosing,hospice,hospitable,hospital,hospitality,hospitalized,hospitals,host,hostage,hostages,hosted,hostel,hostess,hostile,hostiles,hostilities,hostility,hosting,hosts,hot,hotbed,hotcakes,hotdog,hotdogs,hotel,hotels,hothead,hotheaded,hothouse,hotline,hots,hotshot,hotter,hottest,hound,hounded,hounding,hounds,hour,hourglass,hourly,hours,house,housebroken,housecleaning,housed,houseguest,houseguests,household,households,housekeeper,housekeeping,houseman,houses,housewarming,housewife,housewives,housing,hovel,hover,hovercraft,hovering,how,howdy,howe,however,howl,howling,hows,hoy,hub,hubbub,hubby,hubcaps,hubris,huck,huckleberry,huckster,huddle,huddled,hue,huffed,huffing,huffy,hug,huge,hugest,hugged,hugger,huggers,hugging,hugs,huh,hula,hulk,hulking,hull,hullo,hum,human,humane,humanitarian,humanity,humankind,humanly,humanoid,humans,humble,humbled,humbling,humbly,humbug,humdinger,humid,humidifier,humidity,humidor,humiliate,humiliated,humiliates,humiliating,humiliation,humiliations,humility,hummed,hummer,humming,hummus,humor,humored,humoring,humorless,humour,hump,humpback,humped,humph,humping,humps,hums,humus,hun,hunch,hunchback,hunched,hunches,hundred,hundreds,hundredth,hung,hunger,hungover,hungry,hunh,hunk,hunker,hunks,hunky,huns,hunt,hunted,hunter,hunters,hunting,hup,hurdles,hurl,hurley,hurling,hurrah,hurray,hurricane,hurricanes,hurried,hurry,hurrying,hurst,hurt,hurtful,hurting,hurtling,hurts,husband,husbands,hush,hushed,husk,huskies,husks,husky,hussy,hustle,hustler,hut,hutch,huts,huzzah,hybrid,hybrids,hydra,hydrate,hydrated,hydraulic,hydraulics,hydrochloride,hydrogen,hydrolase,hyenas,hygiene,hygienic,hygienist,hymn,hymns,hype,hyped,hyper,hyperactive,hyperbole,hyperspace,hypertension,hyperventilate,hyperventilating,hyphen,hypnosis,hypnotic,hypnotize,hypnotized,hypo,hypochondriac,hypocrisy,hypocrite,hypocrites,hypocritical,hypodermic,hypotensive,hypotenuse,hypothermia,hypothetical,hypothetically,hysterectomy,hysteria,hysteric,hysterical,hysterically,hysterics,iambic,ibuprofen,ice,iceberg,icebergs,icebox,icebreaker,iced,icehouse,iceman,ich,icicle,icicles,icing,ick,icky,icon,icons,icy,id,idea,ideal,idealist,idealistic,idealized,ideally,ideals,ideas,identical,identifiable,identification,identified,identifies,identify,identifying,identities,identity,ideology,idiocy,idiom,idiosyncrasies,idiot,idiotic,idiots,idle,idling,idly,idol,idolized,idols,idyllic,if,iff,iffy,ifs,igloo,igneous,ignite,ignited,ignition,ignoramus,ignorance,ignorant,ignore,ignored,ignores,ignoring,iguana,iguanas,iliad,ilk,ill,illegal,illegally,illegals,illegible,illegitimate,illicit,illness,illnesses,illogical,ills,illuminate,illuminating,illumination,illusion,illusions,illustrate,illustrated,illustrates,illustration,illustrious,image,imagery,images,imaginable,imaginary,imagination,imaginations,imaginative,imagine,imagined,imagines,imagining,imam,imbecile,imbeciles,imbecilic,imbedded,imbued,imitate,imitating,imitation,immaculate,immaterial,immature,immaturity,immediate,immediately,immense,immensely,immerse,immersion,immigrants,immigration,imminent,immobile,immobilize,immodest,immoral,immortal,immortality,immortalized,immune,immunity,immutable,imp,impact,impacted,impacts,impaired,impala,impale,impaled,impart,impartial,impassioned,impatience,impatient,impeach,impeached,impeccable,impediment,impediments,impeding,impending,impenetrable,imperative,imperfect,imperfection,imperfections,imperial,imperialist,impersonal,impersonate,impersonated,impersonating,impersonator,impertinent,impervious,impetuous,implant,implanted,implants,implausible,implement,implemented,implicate,implicated,implicates,implicating,implication,implications,implicitly,implied,implies,imploding,implore,imply,implying,impolite,import,importance,important,importantly,imported,importer,importing,imports,impose,imposed,imposing,imposition,impossibility,impossible,impossibly,imposter,impostor,impotence,impotent,impound,impounded,impractical,impregnated,impress,impressed,impressing,impression,impressionable,impressionists,impressions,impressive,imprint,imprisoned,imprisonment,impromptu,improper,improperly,impropriety,improve,improved,improvement,improvements,improves,improving,improvise,improvised,improvising,impudence,impudent,impulse,impulses,impulsive,impulsively,impunity,in,inability,inaccessible,inaccuracies,inaccurate,inactive,inadequacy,inadequate,inadmissible,inadvertently,inappropriate,inappropriately,inaugural,inbound,inbred,inbreeding,incantation,incantations,incapable,incapacitate,incapacitated,incarcerate,incarcerated,incarceration,incarnation,incase,incendiary,incense,incensed,incentive,incentives,inception,incessantly,incest,incestuous,inch,inches,incidence,incident,incidental,incidentally,incidents,incinerate,incinerated,incinerator,incision,incite,inclined,include,included,includes,including,incognito,incoherent,income,incoming,incommunicado,incomparable,incompatible,incompetence,incompetent,incomplete,incomprehensible,inconceivable,inconsiderate,inconsistencies,inconsistent,inconsolable,inconspicuous,inconvenience,inconveniencing,inconvenient,incorporate,incorrect,incorrectly,incorrigible,increase,increased,increases,increasing,increasingly,incredible,incredibly,increments,incriminate,incriminating,incrimination,incubation,incubator,incubators,incur,incurred,indebted,indecent,indecision,indeed,indefensible,indefinite,indefinitely,indelicate,indentured,independence,independent,indescribable,indestructible,indeterminate,index,indicate,indicated,indicates,indicating,indication,indications,indicative,indicator,indicators,indict,indicted,indictment,indictments,indie,indies,indifference,indifferent,indigenous,indigestion,indignant,indignation,indignities,indigo,indirectly,indiscreet,indiscretion,indiscretions,indispensable,indistinguishable,individual,individuality,individually,individuals,indoor,indoors,induce,induced,inducement,inducing,induction,indulge,indulged,indulgence,indulgent,indulging,industrial,industrialist,industries,industrious,industry,inedible,ineffective,inefficient,inept,inevitability,inevitable,inevitably,inexcusable,inexpensive,inexperience,inexperienced,inexplicable,infallible,infamous,infamy,infant,infantile,infantry,infants,infarction,infatuated,infatuation,infect,infected,infecting,infection,infections,infectious,infects,inference,inferior,inferiority,infernal,inferno,infertile,infestation,infested,infidelities,infidelity,infighting,infiltrate,infiltrated,infiltration,infinite,infinitely,infinity,infirmary,inflame,inflamed,inflammation,inflammatory,inflate,inflated,inflating,inflation,inflexible,inflict,inflicted,inflicting,infliction,influence,influenced,influences,influencing,influential,influenza,influx,info,infomercial,inform,informal,informant,information,informational,informed,informer,informing,informs,infra,infraction,infrared,infrastructure,infuriate,infuriates,infuriating,infused,ingenious,ingenue,ingest,ingested,ingles,ingrates,ingratitude,ingredient,ingredients,inhabit,inhabitants,inhalation,inhale,inhaled,inhaler,inhaling,inherently,inherit,inheritance,inherited,inheriting,inherits,inhibitor,inhibitors,inhuman,inhumane,initial,initially,initials,initiate,initiated,initiating,initiation,initiative,initiatives,inject,injected,injection,injections,injector,injunction,injure,injured,injuries,injury,injustice,ink,inkling,inlaid,inland,inmate,inmates,inn,innards,inner,innermost,inning,innings,innkeeper,innocence,innocent,innocently,innocents,innocuous,innovation,innovative,innuendo,innuendoes,inopportune,input,inquest,inquiries,inquiry,inquisition,inquisitor,inroads,ins,insane,insanely,insanity,insatiable,inscription,inscrutable,insect,insects,insecure,insecurities,insecurity,inseminated,insensitive,insensitivity,inseparable,insert,insertion,inside,insider,insides,insidious,insight,insightful,insights,insignia,insignificant,insincere,insinuated,insinuating,insinuations,insipid,insist,insisted,insistence,insistent,insisting,insists,insolence,insolent,insomnia,inspect,inspected,inspecting,inspection,inspections,inspector,inspectors,inspiration,inspirational,inspire,inspired,inspires,inspiring,install,installation,installed,installing,installment,installments,instance,instances,instant,instantaneous,instantly,instead,instep,instigated,instigator,instill,instilled,instinct,instinctively,instincts,institute,instituted,institution,institutional,institutionalized,institutions,instruct,instructed,instructing,instruction,instructions,instructor,instructors,instrument,instruments,insufferable,insufficient,insulated,insulation,insulin,insult,insulted,insulting,insults,insurance,insure,insured,insuring,insurmountable,insurrection,intact,intake,integral,integrate,integrated,integration,integrity,intellect,intellectual,intellectually,intellectuals,intelligence,intelligent,intelligently,intend,intended,intending,intends,intense,intensely,intensity,intensive,intent,intention,intentional,intentionally,intentions,intently,inter,interact,interacting,interaction,interactive,intercede,intercept,intercepted,intercepting,interchangeable,intercom,intercourse,interest,interested,interesting,interests,interface,interfacing,interfere,interfered,interference,interferes,interfering,interior,interject,interlude,intermediary,intermediate,intermission,intermittent,intern,internal,internally,international,internationally,interning,internist,internment,interns,internship,interplanetary,interpret,interpretation,interpreted,interpreter,interpreting,interpretive,interracial,interrogate,interrogated,interrogating,interrogation,interrogations,interrupt,interrupted,interrupting,interruption,interruptions,interrupts,intersection,interstate,interstellar,intertwined,intervene,intervened,intervening,intervention,interview,interviewed,interviewer,interviewing,interviews,intestinal,intestines,intimacy,intimate,intimated,intimately,intimidate,intimidated,intimidating,into,intolerable,intolerant,intoxicated,intoxicating,intoxication,intravenous,intravenously,intrigue,intrigued,intrigues,intriguing,intro,introduce,introduced,introduces,introducing,introduction,introductions,introductory,intros,intrude,intruded,intruder,intruders,intruding,intrusion,intubate,intuition,intuitive,inundated,invade,invaded,invaders,invading,invalid,invalidate,invaluable,invasion,invasive,invent,invented,invention,inventions,inventive,inventory,inverted,invest,invested,investigate,investigated,investigates,investigating,investigation,investigations,investigative,investigator,investigators,investing,investment,investments,investors,invigorated,invigorating,invincible,invisibility,invisible,invitation,invitations,invite,invited,invites,inviting,invoice,invoices,invoke,invoked,involve,involved,involvement,involves,involving,ion,ions,ipecac,iris,iron,ironclad,ironed,ironic,ironically,ironies,ironing,irons,irony,irrational,irrationally,irregular,irregularities,irrelevant,irreparable,irreplaceable,irresistible,irresponsibility,irresponsible,irrevocably,irrigate,irrigation,irritable,irritate,irritated,irritating,irritation,is,island,islanders,islands,isolate,isolated,isolation,isotopes,issue,issued,issues,issuing,it,itch,itches,itching,itchy,item,items,itinerary,its,itself,ivories,ivory,ivy,jab,jabber,jabbering,jabot,jabs,jack,jackal,jackals,jacked,jackers,jacket,jackets,jacking,jackpot,jacks,jade,jaded,jag,jagger,jags,jaguar,jaguars,jail,jailbird,jailbreak,jailed,jailhouse,jake,jalopy,jam,jamboree,jammed,jammer,jammies,jamming,jams,jane,janitor,janitorial,janitors,japan,jar,jargon,jarring,jars,jasmine,jasper,jaundice,jaunt,java,javelin,jaw,jawbone,jawed,jaws,jay,jaywalking,jazz,jazzed,jealous,jealousy,jean,jeans,jeep,jeepers,jeeps,jeez,jefe,jell,jellies,jelly,jellybean,jellyfish,jenny,jeopardize,jeopardized,jeopardizing,jeopardy,jerk,jerked,jerkin,jerking,jerks,jerky,jerries,jerry,jersey,jerseys,jess,jesse,jest,jester,jesuit,jesuits,jet,jets,jettison,jew,jewel,jeweler,jewelers,jewellery,jewelry,jewels,jews,jezebel,jib,jiff,jiffy,jig,jiggle,jiggling,jiggly,jigsaw,jihad,jill,jilted,jiminy,jimmies,jimmy,jin,jingle,jingles,jingling,jinx,jitters,jittery,jo,job,jobless,jobs,jock,jockey,jockeys,jocks,jockstrap,joe,joes,joey,jog,jogger,jogging,john,johnny,johns,join,joined,joiner,joining,joins,joint,joints,joke,joked,joker,jokers,jokes,joking,jollies,jolly,jolt,jones,jordan,joseph,josh,jot,jotted,journal,journalism,journalist,journalistic,journalists,journals,journey,journeyed,journeys,joust,joy,joyful,joyous,joys,joystick,jubilee,judas,judge,judged,judgement,judges,judging,judgment,judgmental,judgments,judicial,judiciary,judo,jug,juggernaut,juggle,juggling,jughead,jugs,jugular,juice,juiced,juices,juicy,jujitsu,juke,jukebox,julep,jumble,jumbled,jumbo,jump,jumped,jumper,jumpers,jumping,jumps,jumpsuit,jumpy,junction,juncture,jungle,jungles,junior,juniors,juniper,junk,junkie,junkies,junky,junkyard,juries,jurisdiction,juror,jurors,jury,jus,just,justice,justifiable,justification,justified,justifies,justify,jut,juvenile,ka,kabob,kahuna,kaiser,kale,kaleidoscope,kane,kangaroo,kaon,kappa,kaput,karaoke,karat,karate,karma,kasha,kashmir,kat,kay,kayak,kayaking,keel,keeled,keen,keep,keeper,keepers,keeping,keeps,keg,kegs,keister,kelly,kelp,kelson,ken,kendo,keno,kent,kept,kern,kerosene,kerry,ketch,ketchup,kettle,key,keyboard,keyboards,keycard,keyhole,keynote,keys,keystone,khaki,khakis,khan,kibble,kibosh,kick,kickback,kickbacks,kickboxing,kicked,kicker,kicking,kicks,kicky,kid,kidder,kiddie,kiddies,kidding,kiddo,kidnap,kidnapped,kidnapper,kidnappers,kidnapping,kidnaps,kidney,kidneys,kids,kielbasa,killjoy,kiln,kilo,kilometer,kilometers,kilos,kilt,kilter,kimono,kin,kind,kinder,kindergarten,kindergartners,kindest,kindling,kindly,kindness,kinds,king,kingdom,kingdoms,kingpin,kings,kink,kinky,kins,kinship,kiosk,kip,kirk,kismet,kiss,kissable,kissed,kisser,kisses,kissing,kissy,kit,kitchen,kite,kites,kitten,kittens,kitties,kitty,kiwi,klutz,klutzy,knack,knapsack,knee,kneecap,kneecaps,kneed,kneel,kneeling,knees,knelt,knew,knickers,knickknacks,knife,knifed,knight,knights,knit,knitted,knitting,knives,knob,knobby,knobs,knock,knockdown,knocked,knocker,knockers,knocking,knockoff,knockout,knocks,knoll,knot,knots,knotted,know,knowing,knowingly,knowledge,knowledgeable,known,knows,knuckle,knucklehead,knuckles,koala,kobo,koi,kooks,kooky,kosher,koss,kraft,kremlin,kris,kroner,kudos,la,lab,label,labeled,labelled,labels,labor,laboratories,laboratory,labored,laborers,labour,labrador,labs,labyrinth,lac,lace,laced,lacerated,laceration,lacerations,laces,lacey,lack,lacked,lackeys,lacking,lackluster,lacks,lacquer,lacrosse,lactic,lactose,lacy,lad,ladder,ladders,laddies,laden,ladies,ladle,lads,lady,ladyship,lag,lager,lagging,lagoon,laid,lair,laird,lake,laker,lakers,lakes,lakeshore,lakeside,lam,lama,lamb,lambda,lambert,lambs,lame,lameness,lament,laminated,lamp,lamppost,lamps,lance,lancer,land,landed,lander,landers,landfall,landfill,landing,landings,landlady,landlord,landlords,landmark,landmarks,lands,landscape,landscapes,landscaping,landslide,lane,lanes,lang,langley,language,languages,languishing,lanky,lantern,lanyard,lap,lapdog,lapel,lapping,laps,lapse,lapsed,lapses,laptop,laptops,lar,larceny,larch,lard,large,largely,larger,largest,lark,lars,larvae,larval,laryngitis,las,lasagna,lasagne,lascivious,laser,lasers,lash,lashed,lashes,lashing,lass,lassie,lasso,last,lasted,lasting,lasts,latch,latched,late,lately,latent,later,latest,latex,lathe,lather,latino,latitude,latrine,latte,latter,lattes,laugh,laughable,laughed,laughing,laughingstock,laughs,laughter,launch,launched,launcher,launches,launching,launder,laundered,laundering,laundry,laura,laureate,laurel,lava,lavender,lavish,lavished,law,lawful,lawfully,lawman,lawmen,lawn,lawnmower,lawns,laws,lawsuit,lawsuits,lawyer,lawyered,lawyers,lax,laxative,laxatives,lay,layaway,layer,layers,laying,layman,layout,lays,lazar,laziness,lazy,lead,leader,leaders,leadership,leading,leads,leaf,leaflets,leafs,leafy,league,leagues,leak,leaked,leaking,leaks,leaky,lean,leaned,leaning,leans,leap,leaping,leaps,leapt,lear,learn,learned,learner,learning,learns,learnt,leary,lease,leased,leash,least,leather,leave,leaver,leaves,leaving,lech,lecture,lectured,lectures,lecturing,led,lederhosen,ledge,ledger,ledgers,lee,leech,leeches,leering,leery,leeway,left,leftover,leftovers,lefts,lefty,leg,legacy,legal,legalities,legality,legalizing,legally,legend,legendary,legends,legged,leggy,legion,legions,legislate,legislation,legislative,legislature,legit,legitimacy,legitimate,legs,legwork,lei,leisure,leisurely,lemmings,lemon,lemonade,lemony,lemur,lend,lending,length,lengths,leniency,leno,lens,lenses,lent,lentils,leopard,leopards,leotard,leotards,leper,lepers,leprechaun,lesbian,lesbians,lesions,less,lessee,lessen,lesser,lesson,lessons,lest,let,lethal,lets,letter,letterhead,lettering,letterman,letters,letting,lettuce,leukemia,levee,level,levelheaded,levels,lever,leverage,leveraged,levitate,levitation,levity,levy,lewd,lewis,lex,lez,li,liability,liable,liaison,liane,liar,liars,libation,libel,liberal,liberals,liberated,liberating,liberation,liberties,liberty,libido,librarian,libraries,library,lice,licence,license,licensed,licenses,licensing,lichen,lick,licked,licker,licking,licks,licorice,lid,lido,lidocaine,lids,lie,lied,liege,lien,lier,lies,lieu,lieutenant,lieutenants,life,lifeboats,lifeguard,lifeless,lifelike,lifeline,lifelong,lifer,lifers,lifesaver,lifestyle,lifetime,lifetimes,lift,lifted,lifting,liftoff,lifts,ligament,ligature,light,lighted,lighten,lightened,lightening,lighter,lighters,lightheaded,lighthearted,lighthouse,lighting,lightly,lightness,lightning,lights,like,liked,likelihood,likely,likeness,likes,likewise,liking,lilac,lilacs,lilies,lily,lima,limb,limber,limbo,limbs,lime,limelight,limerick,limes,limestone,limey,limit,limitation,limitations,limited,limitless,limits,limo,limos,limousine,limousines,limp,limping,limps,lin,line,lineage,linear,linebacker,lined,linen,linens,liner,liners,lines,lineup,ling,linger,lingerie,lingering,lingers,lingo,linguistic,lining,link,linked,linking,links,linoleum,lint,lion,lions,lip,liposuction,lipped,lippy,lips,lipstick,liquefy,liquid,liquidate,liquidated,liquidation,liquor,liquored,lira,lis,list,listed,listen,listened,listener,listeners,listening,listens,listing,lists,lit,litany,lite,liter,literacy,literal,literally,literary,literate,literature,liters,lithium,litigation,litigious,litter,littered,littering,little,littlest,live,lived,livelihood,lively,liven,liver,livers,lives,livestock,livid,living,lizard,lizards,llama,lo,load,loaded,loading,loads,loaf,loafers,loan,loaned,loaning,loans,loath,loathe,loathed,loathes,loathing,lob,lobby,lobbying,lobbyist,lobe,lobes,lobotomy,lobster,lobsters,loca,local,locale,localized,locally,locals,locate,located,locating,location,locations,locator,loch,lock,lockdown,locked,locker,lockers,locket,locking,locks,locksmith,lockup,loco,locomotive,locust,locusts,lode,lodge,lodged,lodging,lodgings,loft,lofty,log,logan,logged,logger,logic,logical,logically,logistics,logo,logs,loin,loins,loitering,lollipop,lollipops,lolly,lone,lonelier,loneliest,loneliness,lonely,loner,loners,lonesome,long,longer,longest,longevity,longing,longitude,longs,longtime,loo,loofah,look,looked,looker,looking,lookout,lookouts,looks,loom,looming,loon,looney,loons,loony,loop,looped,loophole,loopholes,loops,loos,loose,loosely,loosen,loosened,loosening,looser,loosing,loot,looting,lop,lopped,lopper,lopsided,loran,lord,lording,lords,lordship,lore,lorry,lose,loser,losers,loses,losing,loss,losses,lost,lot,lothario,lotion,lots,lotte,lottery,lotto,loud,louden,louder,loudest,loudly,louie,louis,lounge,lounging,louse,lousy,lout,louvre,lovable,love,loveable,lovebirds,loved,loveless,lovelier,lovelies,loveliest,lovelorn,lovely,lovemaking,lover,lovers,loves,lovesick,loving,lovingly,low,lowdown,lowe,lower,lowered,lowering,lowers,lowery,lowest,lowlife,lowlifes,lowly,lows,lox,loyal,loyalties,loyalty,lozenges,luau,lube,lubricant,lubricants,lubrication,luce,lucid,lucifer,luck,lucked,luckier,luckiest,luckily,lucky,lucrative,ludicrous,lug,luge,luggage,lukewarm,lull,lullaby,lulu,lum,lumbar,lumber,lumbering,lumberjack,luminous,lump,lumpectomy,lumps,lumpy,luna,lunacy,lunar,lunatic,lunatics,lunch,luncheon,lunches,lunching,lunchroom,lunchtime,lung,lunge,lunged,lungs,lupus,lurch,lure,lured,lures,luring,lurk,lurking,lurks,luscious,lush,lust,luster,lusting,lusts,lutz,luv,lux,luxuries,luxury,lye,lying,lymph,lymphoma,lynch,lynched,lynching,lynx,lyrical,lyrics,ma,mac,macadamia,macaroni,macaroons,macaws,mace,mach,mache,machete,machinations,machine,machinery,machines,machismo,macho,mack,mad,madam,madame,madden,maddening,madder,made,madeleine,mademoiselle,madly,madman,madmen,madness,madonna,madre,madrigal,madwoman,mae,maestro,mafia,mag,magazine,magazines,magdalene,maggot,maggots,magic,magical,magically,magician,magicians,magistrate,magnet,magnetic,magnetism,magnets,magnificence,magnificent,magnificently,magnify,magnitude,magnolia,magnum,mags,maharajah,mahatma,maid,maiden,maidens,maids,mail,mailbox,mailboxes,mailed,mailer,mailing,mailman,mails,maim,maimed,maiming,main,mainframe,mainland,mainline,mainly,maintain,maintained,maintaining,maintains,maintenance,majestic,majesty,major,majored,majoring,majority,majors,make,makeover,makeovers,maker,makers,makes,makeshift,makeup,making,makings,malaria,male,males,malevolent,malfeasance,malfunction,malfunctioning,malfunctions,malice,malicious,maliciously,malign,malignant,mall,mallard,mallet,malls,malnourished,malpractice,malt,mama,mamie,mamma,mammals,man,manage,managed,management,manager,managerial,managers,manages,managing,manatee,manatees,mandarin,mandate,mandates,mandatory,mandrake,maneuver,maneuvered,maneuvering,maneuvers,manger,mangled,mangoes,mangos,mangy,manhandle,manhandled,manhattan,manhole,manhood,manhunt,maniac,maniacal,maniacs,manic,manicure,manicured,manicures,manifest,manifestations,manifested,manifesting,manifesto,manifests,manifold,manila,manipulate,manipulated,manipulates,manipulating,manipulation,manipulations,manipulative,manipulator,mankind,manly,manna,mannequin,mannequins,manner,mannered,manners,manning,mano,manor,manpower,mansion,mansions,manslaughter,mantel,manticore,mantis,mantle,mantra,manual,manually,manufacture,manufactured,manufacturer,manufactures,manufacturing,manure,manuscript,manuscripts,many,map,maple,mapped,mapping,maps,mar,marathon,marble,marbles,marc,marcel,march,marched,marches,marching,mare,margarita,margaritas,marge,margin,marginal,marginally,margins,maria,mariachi,marigold,marijuana,marina,marinate,marine,mariner,mariners,marines,marital,maritime,mark,marked,marker,markers,market,marketing,marketplace,markets,marking,markings,marks,marksmanship,marlin,marmalade,maroon,marooned,marquee,marquis,marriage,marriages,married,marries,marrow,marry,marrying,mars,marsh,marshal,marshall,marshmallow,marshmallows,mart,marten,martial,martin,martini,martinis,martins,martyr,martyrs,marvel,marveling,marvellous,marvelous,marzipan,mas,mascara,mascot,mascots,masculine,masculinity,mash,mashed,mask,masked,masking,masks,mason,masons,masquerade,masquerading,mass,massa,massacre,massacred,massacres,massage,massaged,massager,massages,massaging,masses,masseur,masseuse,massive,massively,mastectomy,master,mastered,mastermind,masterminded,masterpiece,masterpieces,masters,mastery,masturbated,masturbating,masturbation,mat,matador,match,matchbook,matched,matches,matching,matchmaker,matchmaking,mate,mated,material,materialistic,materialize,materialized,materials,maternal,maternity,mates,matey,math,mathematically,mathematician,mathematicians,mathematics,matinee,mating,matriarch,matrimonial,matrimony,matrix,matron,mats,matt,matted,matter,mattered,matters,mattress,mattresses,mature,matured,maturity,matzah,matzoh,maudlin,maul,mauled,mausoleum,maverick,mavis,maw,max,maxi,maxim,maximize,maximum,maxwell,may,maya,mayan,maybe,maybes,mayday,mayflower,mayflowers,mayhem,mayo,mayonnaise,mayor,mayoral,mayors,maze,me,mead,meadow,meadows,meager,meal,meals,mealy,mean,meaner,meanest,meanie,meaning,meaningful,meaningless,meanings,meanness,means,meant,meantime,meanwhile,measles,measly,measure,measured,measurements,measures,measuring,meat,meatball,meatballs,meathead,meatloaf,meats,meaty,mecca,mechanic,mechanical,mechanics,mechanism,mechanisms,med,medal,medals,meddle,meddled,meddlesome,meddling,medevac,media,mediator,medic,medicaid,medical,medically,medicare,medicate,medicating,medication,medications,medicine,medicines,medics,medieval,medina,mediocre,mediocrity,meditate,meditating,meditation,mediterranean,medium,medley,medusa,meet,meeting,meetings,meets,megaphone,mel,melancholy,melding,mellow,mellowed,mellowing,melodrama,melodramatic,melody,melon,melons,melt,meltdown,melted,melting,melts,member,members,membership,membrane,membranes,memento,mementos,memo,memoirs,memorabilia,memorable,memorial,memories,memorize,memorized,memorizing,memory,memos,men,menace,menacing,menage,menagerie,mend,mended,mending,menial,meningitis,menopausal,menopause,menorah,mensa,menstrual,mental,mentality,mentally,mention,mentioned,mentioning,mentions,mentor,menu,menus,meow,mercenaries,mercenary,mercer,merchandise,merchandising,merchant,merciful,mercilessly,mercury,mercy,mere,merely,merger,mergers,merging,meridian,meringue,merit,merits,merl,merle,merlot,mermaid,mermaids,merman,merrier,merrily,merry,mesa,mescaline,mesh,mesmerized,mesmerizing,mesquite,mess,message,messages,messed,messenger,messengers,messes,messier,messing,messy,met,meta,metabolic,metal,metallic,metals,metamorphosis,metaphor,metaphorical,metaphorically,metaphors,metaphysics,meteor,meteorite,meteors,meter,meters,meth,methadone,method,methodical,methodology,methods,meticulous,meticulously,metric,metro,metropolitan,mettle,mew,mezzanine,mi,mice,mick,mickey,micro,microbes,microchips,microfilm,microphone,microphones,microscope,microscopic,microwave,mid,middies,middle,middleman,middleweight,midge,midget,midgets,midland,midlife,midnight,midst,midterm,midterms,midtown,midway,midwestern,midwife,midwives,miffed,miggs,might,mightier,mighty,mignon,migraine,migrate,migration,mike,mikes,mil,mild,mildew,mildly,mile,mileage,miles,milestone,militant,militants,military,militia,milk,milked,milking,milkman,milky,mill,millennia,millennium,miller,millet,milligram,milligrams,millimeter,milling,million,millionaire,millionaires,millions,millisecond,mills,milo,milt,mime,mimes,mimic,mimicking,mimosas,mina,mince,mincemeat,mind,minded,mindful,minding,mindless,minds,mindset,mine,mined,minefield,miner,mineral,minerals,miners,mines,mingle,mini,miniature,minimal,minimalist,minimize,minimum,minimums,mining,minion,minions,miniscule,minister,ministers,ministry,minivan,mink,minks,minnow,minor,minorities,minority,minors,minstrel,mint,mints,minty,minus,minuscule,minuses,minute,minutes,miracle,miracles,miraculous,miraculously,mirage,mirror,mirrors,mirth,mis,misbehave,miscalculated,miscalculation,miscarriage,miscarry,miscellaneous,mischief,miscommunication,misconception,misconceptions,misconstrued,miscreant,misdeeds,misdemeanors,miserable,miserably,misery,misfit,misfits,misfortune,misgivings,misguided,mishandled,mishap,misheard,misinformed,misinterpret,misinterpretation,misinterpreted,misinterpreting,misjudged,mislead,misleading,misled,mismatched,misnomer,misogynistic,misplace,misplaced,misprint,misread,misreading,miss,missed,misses,missile,missiles,missing,mission,missionaries,missionary,missions,missis,misspelled,misspent,misspoke,missus,missy,mist,mistake,mistaken,mistakes,mister,mistletoe,mistook,mistreated,mistress,mistresses,mistrial,mistrust,misty,misunderstand,misunderstanding,misunderstandings,misunderstood,misuse,mite,mites,mitt,mitten,mittens,mitts,mitzvah,mix,mixed,mixer,mixers,mixes,mixing,mixture,mixup,mm,mo,moan,moaning,moans,moat,mob,mobile,mobility,mobilize,mobs,mobster,mocha,mock,mocked,mockery,mocking,mocks,mod,mode,model,modeled,modeling,modelling,models,modem,moderately,moderation,moderator,modern,modest,modesty,modicum,modification,modifications,modified,module,modus,mohair,mohel,moil,moist,moisture,moisturize,moisturizer,mojo,mol,molars,molasses,mold,molded,molding,moldings,molds,moldy,mole,molecular,molecule,molecules,moles,molest,molestation,molested,molester,molesting,moll,mollie,mollusk,molly,moloch,molten,molto,moly,mom,moment,momentarily,momentary,momento,moments,momentum,momma,mommies,mommy,moms,mon,monarchs,monarchy,monastery,monde,mondo,monetary,money,moneybags,moneymaker,moneys,monger,mongoloid,mongoose,mongrel,moniker,monitor,monitored,monitoring,monitors,monk,monkey,monkeys,monks,mono,monogamous,monogamy,monogrammed,monologue,monopolizing,monopoly,monoxide,monsieur,monsignor,monsoon,monster,monsters,monstrous,montage,monte,month,monthly,months,monument,monumental,monumentally,moo,mooch,moocher,mood,moods,moody,moola,moon,moonbeams,moonlight,moonlighting,moonlit,moons,moors,moose,moot,mop,mope,moped,mopes,mopey,moping,mopped,mopping,mops,mor,moral,morale,morales,morality,morally,morals,morbid,more,morello,moreover,morgan,morgue,morgues,morn,morning,mornings,morocco,moron,moronic,morons,morph,morphine,morris,morrow,morse,morsel,mort,mortal,mortality,mortals,mortar,mortars,mortgage,mortgages,mortified,mortifying,mortuary,mosey,mosque,mosquito,mosquitoes,mosquitos,moss,most,mostly,mote,motel,motels,moth,mothballs,mother,motherhood,mothering,motherless,mothers,moths,motif,motion,motions,motivate,motivated,motivates,motivation,motivational,motivations,motive,motives,motley,motor,motorbike,motorcade,motorcycle,motorcycles,motorized,motors,motto,mould,moulin,mound,mounds,mount,mountain,mountaineer,mountains,mountainside,mountaintop,mounted,mounting,mourn,mourned,mourners,mourning,mouse,mousey,mousse,moustache,mousy,mouth,mouthed,mouthful,mouthing,mouthpiece,mouths,mouthwash,mouthy,move,moved,movement,movements,movers,moves,movie,movies,moving,mow,mowed,mower,mowers,mowing,moxie,mu,much,muchacho,muchachos,muck,muckraker,mucous,mucus,mud,muddle,muddy,muff,muffin,muffins,muffled,muffler,mug,mugged,mugger,muggers,mugging,muggings,muggy,mugs,mulberry,mulch,mule,mules,muley,mulled,muller,mullet,mulligan,mulling,multimedia,multimillion,multinational,multiple,multiples,multiplex,multiplication,multiplied,multiply,multitude,mum,mumble,mumbled,mumbles,mumbling,mummies,mummy,mumps,mums,munch,munching,munchkin,munchkins,mundane,munitions,mural,murals,murder,murdered,murderer,murderers,murderess,murdering,murderous,murders,murky,murmur,murphy,muscle,muscled,muscles,muscular,muse,muses,museum,museums,mush,mushroom,mushrooms,mushy,music,musical,musicals,musician,musicians,musing,musket,musketeer,musketeers,muskie,muskrat,muss,mussels,must,mustache,mustang,mustangs,mustard,muster,mutant,mutants,mutated,mutating,mute,mutilate,mutilated,mutilation,mutiny,mutt,mutton,mutual,mutually,muumuu,muzzle,my,myriad,myrtle,myself,mysteries,mysterious,mysteriously,mystery,mystic,mystical,mysticism,mystified,mystifying,myth,mythic,mythical,mythological,mythology,myths,na,nacho,nachos,nada,nag,nagged,nagging,nah,nail,nailed,nailing,nails,naive,naivete,naked,nam,name,named,nameless,namely,names,namesake,nametag,naming,nan,nana,nance,nancy,nannies,nanny,nanosecond,nanotechnology,nap,napalm,naphthalene,napkin,napkins,napoleon,napping,nappy,naps,narc,narcissism,narcissist,narcissistic,narcolepsy,narcotic,narcotics,narrative,narrator,narrow,narrowed,narrowing,narrowly,narrows,narwhal,nary,nasal,nastiest,nasty,nation,national,nationally,nationals,nations,native,natives,nativity,natty,natural,naturally,nature,natured,naught,naughty,nausea,nauseated,nauseating,nauseous,nautical,nautilus,naval,navigate,navigating,navigation,navigational,navigator,navy,naw,nay,nazi,nazis,ne,near,nearby,nearest,nearly,neat,neatly,neatness,nebula,necessarily,necessary,necessity,neck,necking,necklace,necklaces,neckline,necks,necromancer,necrosis,nectar,nee,need,needed,neediness,needing,needle,needlepoint,needles,needless,needlessly,needs,needy,nefarious,negate,negative,negatively,negatives,negativity,neglect,neglected,neglectful,neglecting,negligee,negligence,negligent,negligible,negotiable,negotiate,negotiated,negotiating,negotiation,negotiations,negotiator,neighbor,neighborhood,neighborhoods,neighboring,neighborly,neighbors,neighbour,neighbours,neither,nellie,nelly,nelson,neon,neonatal,nephew,nephews,nepotism,nerd,nerds,nerdy,nerve,nerves,nervous,nervously,nervousness,ness,nest,net,nether,network,networking,networks,neural,neurological,neurologist,neurology,neuroses,neurosis,neurosurgeon,neurosurgery,neurotic,neutered,neutral,neutralize,neutralized,neutron,neve,never,nevermore,nevertheless,new,newborn,newborns,newcomer,newcomers,newest,newfound,newly,newlywed,newlyweds,news,newscast,newsletter,newsman,newspaper,newspapers,newsroom,newsstand,newsstands,newt,newton,next,nexus,nibble,nibbling,nice,nicely,nicer,nicest,niceties,niche,nick,nicked,nickel,nickels,nickname,nicknamed,nicknames,nicks,nicotine,niece,nieces,nifty,nigh,night,nightcap,nightclub,nightfall,nightgown,nighthawk,nightingale,nightlife,nightly,nightmare,nightmares,nightmarish,nights,nightstand,nightstick,nighttime,nighty,nihilist,nil,nimrod,nine,nines,nineteen,nineteenth,nineties,ninety,ninja,ninny,ninth,nip,nipped,nipper,nipping,nipple,nipples,nippy,nirvana,nite,nitrogen,nitroglycerin,nitrous,nitty,nitwit,nix,nixed,no,nobility,noble,nobleman,nobodies,nobody,nocturnal,nod,nodded,nodding,node,nodes,nods,nodules,noel,noggin,noh,noir,noise,noises,noisy,nomad,nomadic,nomads,nome,nominal,nominate,nominated,nominating,nomination,nominations,nominee,nominees,nonchalant,none,nonetheless,nonexistent,nonissue,nonnegotiable,nonsense,nonsensical,nonstop,nonviolent,noo,noodle,noodles,noon,noose,nope,nor,nordic,norland,norm,normal,normalcy,normally,north,northeast,northeastern,norther,northern,northwest,northwestern,nos,nose,nosebleeds,nosed,nosedive,noses,nosey,nosh,nosing,nostalgia,nostalgic,nostril,nostrils,nosy,not,notable,notably,notary,notation,notch,notches,note,notebook,noted,notepad,notes,nothing,nothings,notice,noticeable,noticeably,noticed,notices,noticing,notification,notified,notify,noting,notion,notions,notorious,notoriously,notwithstanding,nougat,noun,nourish,nourishing,nourishment,nous,nouveau,nova,novel,novelist,novels,novelty,novocaine,now,nowadays,nowhere,noxious,nozzle,nu,nuance,nuances,nub,nubile,nuclear,nude,nudes,nudge,nudie,nudist,nudity,nuggets,nuisance,nuke,nuked,nukes,null,numb,number,numbered,numbers,numbing,numbness,numerous,nun,nunnery,nuns,nuptial,nuptials,nurse,nursed,nursery,nurses,nursing,nurtured,nurturing,nut,nutcase,nutcracker,nuthouse,nutmeg,nutrition,nutritional,nutritionist,nutritious,nuts,nutshell,nuttier,nutty,nylon,nylons,nymph,nympho,nymphomaniac,oaf,oak,oaks,oar,oars,oasis,oath,oaths,oatmeal,oats,obese,obey,obeyed,obeying,obi,obits,obituary,object,objected,objecting,objection,objectionable,objections,objective,objectives,objectivity,objects,obligated,obligation,obligations,obligatory,oblige,obliged,oblique,obliterate,obliterated,oblivion,oblivious,obnoxious,oboe,obscene,obscenities,obscenity,obscure,obscured,obscurity,observable,observant,observation,observations,observatory,observe,observed,observer,observers,observing,obsess,obsessed,obsessing,obsession,obsessions,obsessive,obsessively,obsolete,obstacle,obstacles,obstetrician,obstinate,obstructed,obstruction,obtain,obtained,obtuse,obvious,obviously,occasion,occasional,occasionally,occasions,occult,occupancy,occupant,occupants,occupation,occupational,occupations,occupied,occupy,occupying,occur,occurred,occurrence,occurrences,occurs,ocean,oceanographic,oceans,octane,octopus,od,odd,oddball,oddest,oddly,odds,ode,odious,odor,odorless,odyssey,oedipal,of,off,offbeat,offence,offend,offended,offender,offenders,offending,offends,offense,offensive,offer,offered,offering,offerings,offers,office,officer,officers,offices,official,officially,officials,officiate,offing,offs,offset,offshore,offspring,often,oftentimes,ogle,ogling,ogre,ogres,oh,ohm,oho,oil,oiled,oils,oily,oink,ointment,okay,okayed,okeydokey,okra,old,olden,older,oldest,oldie,olds,ole,olfactory,olive,olives,om,omega,omelet,omelette,omelettes,omen,omens,ominous,omitted,omnipotent,on,onboard,once,oncology,oncoming,one,ones,oneself,ongoing,onion,onions,only,onstage,onto,onward,oodles,ooh,oomph,oops,ooze,oozing,op,opal,open,opened,opener,openers,opening,openings,openly,openness,opens,opera,operas,operate,operated,operates,operating,operation,operational,operations,operative,operatives,operator,opinion,opinionated,opinions,opium,opponent,opponents,opportune,opportunist,opportunities,opportunity,opposable,oppose,opposed,opposing,opposite,opposites,opposition,oppress,oppression,oppressive,ops,opted,optic,optimism,optimist,optimistic,optimum,option,optional,options,optometrist,opus,or,oracle,oracles,oral,orally,orange,oranges,orator,orb,orbed,orbing,orbit,orbital,orbiting,orbs,orchard,orchestra,orchestrate,orchestrated,orchestrating,ordeal,order,ordered,ordering,orderlies,orderly,orders,ordinance,ordinarily,ordinary,ordinate,ordinates,ordnance,ore,oregano,organ,organic,organisation,organise,organised,organism,organisms,organization,organizations,organize,organized,organizer,organizing,organs,orgasm,orgasmic,orgasms,orgies,orgy,oriental,orientation,oriented,orienteering,origami,origin,original,originality,originally,originals,originating,origins,orioles,ornament,ornamental,ornaments,ornate,ornery,orphan,orphanage,orphaned,orphans,ort,orthodontist,orthodox,orthopedic,os,ose,ostensibly,ostracized,ostrich,other,others,otherwise,otherworldly,otter,otto,ottoman,ouch,ought,ounce,ounces,our,ours,ourselves,out,outage,outback,outbid,outbound,outbreak,outburst,outcast,outcasts,outcome,outdated,outdid,outdo,outdone,outdoor,outdoors,outdoorsy,outer,outfield,outfit,outfits,outfitted,outgoing,outgrew,outgrow,outgrown,outhouse,outing,outlander,outlandish,outlast,outlaw,outlawed,outlaws,outlet,outlets,outline,outlined,outlines,outlive,outlived,outlook,outnumber,outnumbered,outpatient,outpost,outpouring,outrage,outraged,outrageous,outrageously,outrank,outright,outrun,outs,outset,outside,outsider,outsiders,outskirts,outsmarted,outspoken,outstanding,outvoted,outward,outweigh,outwit,outwitted,oval,ovarian,ovaries,ovation,oven,over,overachiever,overactive,overall,overalls,overanxious,overbearing,overbite,overblown,overboard,overcame,overcast,overcharge,overcome,overcoming,overcompensating,overcooked,overcrowded,overdid,overdo,overdone,overdose,overdosed,overdressed,overdrive,overdue,overestimate,overestimated,overexcited,overflow,overflowing,overgrown,overhaul,overhead,overhear,overheard,overhearing,overheated,overheating,overjoyed,overkill,overlap,overlapping,overload,overlook,overlooked,overlooking,overly,overnight,overnights,overpaid,overpass,overpower,overpriced,overprotective,overqualified,overrated,overreact,overreacted,overreacting,overreaction,override,overrule,overruled,overrun,overseas,oversee,overseeing,oversensitive,oversight,oversized,oversleep,overslept,overstating,overstay,overstayed,overstep,overstepping,overstress,overtaken,overthink,overtime,overtired,overtures,overturned,overview,overweight,overwhelm,overwhelmed,overwhelming,overwhelmingly,overwhelms,overworked,overwrought,ovulating,ow,owe,owed,owes,owing,owl,owls,own,owned,owner,owners,ownership,owning,owns,ox,oxen,oxford,oxygen,oxymoron,oy,oyez,oyster,oysters,ozone,pa,pac,pace,paced,pacemaker,pacer,paces,pachyderm,pacific,pacifier,pacifist,pacing,pack,package,packages,packed,packer,packet,packets,packing,packs,pact,pad,padded,padding,paddle,paddles,paddling,paddock,paddy,padlock,padre,pads,paella,pagan,page,pageant,paged,pager,pagers,pages,paging,pah,paid,pail,pain,pained,painful,painfully,painkiller,painkillers,painless,pains,paint,paintbrush,painted,painter,painters,painting,paintings,paints,pair,paired,pairs,paisley,pajama,pajamas,pal,palace,palaces,pale,paleontologist,paleontology,paler,pales,palette,palm,palmer,palms,palomino,palp,palpable,palpitations,pals,palsy,paltry,pam,pamper,pampered,pampering,pampers,pamphlet,pamphlets,pan,panache,panama,pancake,pancakes,pancreatic,panda,pandering,pandora,pane,panel,panels,panhandle,panic,panicked,panicking,panicky,panics,panned,pans,pansy,pant,pantaloons,pantheon,panther,panties,panting,pantry,pants,panty,pantyhose,pap,papa,paparazzi,papayas,paper,paperback,paperboy,papers,paperweight,paperwork,pappy,paprika,par,para,parable,parabolic,parachute,parachutes,parachuting,parade,paradigm,parading,paradise,paradox,paragon,paragraph,paragraphs,parakeet,paralegal,parallel,parallels,paralysis,paralyze,paralyzed,paralyzing,paramedic,paramedics,parameters,paramilitary,paramour,paranoia,paranoid,paranormal,paraphernalia,parasailing,parasite,parasites,parasitic,paratrooper,paratroopers,parchment,pardner,pardon,pardoned,pardons,pare,parent,parental,parenthood,parenting,parents,parfait,paris,parish,parishioner,parishioners,park,parka,parked,parker,parking,parks,parkway,parliament,parliamentary,parlor,parlors,parlour,parody,parole,paroled,parrot,parrots,parsley,parsons,part,parted,partial,partially,participant,participants,participate,participated,participating,participation,particle,particles,particular,particularly,particulars,partied,parties,parting,partisan,partly,partner,partnered,partners,partnership,partnerships,parton,partridge,parts,party,partying,pas,pass,passable,passage,passages,passageway,passageways,passed,passenger,passengers,passer,passes,passing,passion,passionate,passionately,passions,passive,passkey,passport,passports,password,past,pasta,paste,pasted,pastels,pasties,pastime,pastor,pastrami,pastry,pasts,pasture,pastures,pat,patch,patched,patches,patching,patchouli,pate,patent,patented,patently,patents,paternal,paternity,path,pathetic,pathetically,pathogen,pathological,pathologically,pathologist,pathology,pathos,paths,pathways,patience,patient,patiently,patients,patio,patois,patriarch,patriot,patriotic,patriots,patrol,patrolled,patrolling,patrolman,patrolmen,patron,patronize,patronized,patronizing,patrons,patsy,patten,patter,pattern,patterns,patties,patting,patty,pause,pauses,pave,paved,pavement,pavilion,paving,paw,pawing,pawn,pawning,pawnshop,paws,pax,pay,payable,payback,paycheck,paychecks,payday,paying,payload,payment,payments,payoff,payoffs,payroll,pays,pe,pea,peace,peaceful,peacefully,peacemaker,peacetime,peach,peaches,peachy,peak,peaked,peaks,peanut,peanuts,pear,pearl,pearls,pearly,peas,peasant,peat,pebble,pecan,pecans,peck,pecked,pecker,peckers,pecking,peckish,pecks,pecs,peculiar,pedal,pedaling,pedals,peddle,peddler,peddling,pedestal,pedestrian,pedestrians,pediatric,pediatrician,pediatrics,pedicure,pedicures,pedigree,pedophile,pedro,pee,peed,peeing,peek,peeked,peeking,peeks,peel,peeled,peeling,peels,peep,peepers,peeping,peeps,peer,peering,peerless,peers,pees,peeved,peg,pegged,peignoir,pele,pellet,pellets,pelt,pelting,pelts,pelvic,pemmican,pen,penal,penalize,penalized,penalty,penance,pence,pencil,pencils,pendant,pending,penetrate,penetrated,penetrating,penetration,penguin,penguins,penicillin,peninsula,penitentiary,pennant,penne,penned,pennies,penniless,penny,pens,pension,pensioners,pensions,pentagon,pentagram,penthouse,peon,people,peoples,pep,pepper,peppermint,pepperoni,peppers,per,perceive,perceived,percent,percentage,percentages,percentile,perception,perceptions,perceptive,perch,perchance,perched,percolating,perennial,perfect,perfecting,perfection,perfectionist,perfectly,perfecto,perform,performance,performances,performed,performer,performers,performing,performs,perfume,perfumed,perfumes,perhaps,peril,perils,perimeter,perimeters,period,periodic,periods,peripheral,periphery,periscope,perish,perishable,perished,perjure,perjured,perjury,perk,perks,perky,perm,permanent,permanently,permissible,permission,permit,permits,permitted,permitting,peroxide,perpetrate,perpetrated,perpetrator,perpetrators,perpetual,perpetuating,perry,persecute,persecuted,persecuting,persist,persistence,persistent,persists,persnickety,person,persona,personable,personal,personalities,personality,personalize,personalized,personally,personals,personified,personnel,persons,perspective,perspectives,perspiration,persuade,persuaded,persuasion,persuasive,pertaining,pertains,pertinent,perturbed,peruse,perverse,perversion,pervert,perverted,perverts,pesky,pesos,pessimist,pessimistic,pest,pester,pestering,pesticides,pestilence,pesto,pests,pet,petal,petals,peter,peters,petit,petite,petition,petitioner,petitioning,petrified,petrol,petroleum,pets,petticoat,petting,petty,pew,pewter,pfft,phantom,phantoms,pharaoh,pharaohs,pharmaceutical,pharmaceuticals,pharmacist,pharmacy,phase,phased,phases,phasing,pheasant,pheasants,phenomena,phenomenal,phenomenally,phenomenon,pheromones,phew,phi,philandering,philanthropist,philharmonic,philosopher,philosophers,philosophical,philosophies,philosophy,phobia,phobias,phobic,phoebe,phoenix,phone,phoned,phones,phoney,phonies,phony,phooey,phosphate,phosphorous,photo,photocopy,photogenic,photograph,photographed,photographer,photographers,photographic,photographing,photographs,photography,photos,phrase,phrases,physic,physical,physically,physicals,physician,physicist,physicists,physics,physiological,physiologically,physiology,physique,pi,pianist,piano,pianos,piazza,pic,pick,picked,picker,picket,picketing,picking,pickings,pickle,pickled,pickles,pickpocket,pickpockets,picks,pickup,pickups,picky,picnic,picnics,picture,pictured,pictures,picturing,piddles,piddling,pie,piece,pieced,pieces,piecing,pied,pier,pierce,pierced,piercing,pies,piffle,pig,pigeon,pigeons,piggies,piggy,piggyback,pigheaded,piglet,pigs,pigskin,pigsty,pigtails,pike,pilar,pile,piled,piles,pilgrim,pilgrimage,pilgrims,piling,pill,pillage,pillar,pillars,pillow,pillows,pills,pilot,pilots,pimp,pimped,pimping,pimple,pimples,pimply,pin,pina,pinafore,pinata,pinball,pinch,pinched,pinches,pinching,pincushion,pine,pineapple,pineapples,pinecone,pines,ping,pinhead,pinheads,pining,pink,pinkie,pinks,pinky,pinned,pinning,pinochle,pinot,pinpoint,pinpointed,pins,pint,pinto,pints,pioneer,pious,pip,pipe,pipeline,piper,pipes,piping,piqued,piranhas,pirate,pirated,pirates,pis,piss,pissant,pissed,pisses,pissing,pistachio,pistachios,pistol,pistols,piston,pistons,pit,pitch,pitched,pitcher,pitchers,pitches,pitchfork,pitching,pitfalls,pithy,pitied,pitiful,pits,pittance,pitted,pitting,pity,pitying,pivot,pivotal,pixels,pixie,pixies,pixilated,pizza,pizzas,placate,place,placebos,placed,placement,places,placid,placing,plague,plagued,plagues,plaguing,plaid,plain,plainclothes,plainly,plains,plaintiff,plaintiffs,plait,plan,plane,planes,planet,planetarium,planetary,planets,planing,plank,plankton,planned,planner,planners,planning,plans,plant,plantation,plantations,planted,planting,plants,plaque,plasma,plaster,plastered,plastic,plate,plateau,plateaued,plated,platelets,plates,platform,platforms,plating,platinum,platitudes,platonic,platoon,platter,platters,platypus,plausible,play,playa,playbook,playboy,playboys,played,player,players,playful,playground,playgrounds,playhouse,playing,playmate,playoffs,playroom,plays,plaything,playthings,playtime,playwright,plaza,plea,plead,pleaded,pleading,pleadings,pleads,pleas,pleasant,pleasantly,pleasantries,please,pleased,pleaser,pleases,pleasing,pleasure,pleasures,pleasuring,pled,pledge,pledged,pledges,pledging,plenty,plethora,pliers,plight,plop,plot,plots,plotted,plotting,plow,plowed,plowing,ploy,pluck,plucked,plucking,plucky,plug,plugged,plugging,plugs,plum,plumber,plumbers,plumbing,plummet,plummeted,plummeting,plump,plums,plunder,plunge,plunging,plural,plus,pluses,plush,plutonium,pneumonia,poach,poached,poacher,poachers,poaching,pocket,pocketbook,pocketed,pocketful,pockets,pod,podiatrist,podiatry,podium,pods,poem,poems,poet,poetic,poetry,poets,poi,poignant,point,pointe,pointed,pointer,pointers,pointing,pointless,points,pointy,poise,poised,poison,poisoned,poisoning,poisonous,poisons,poke,poked,poker,pokes,pokey,poking,polar,polarity,pole,polecat,poles,police,policeman,policemen,policies,policing,policy,polio,polish,polished,polishing,polite,politely,political,politically,politician,politicians,politics,polka,poll,pollack,pollard,polled,pollen,polling,pollock,polls,pollute,polluting,pollution,polo,poltergeist,poltergeists,poly,polyester,polygraph,polymerization,pom,pomegranate,pompoms,pompous,poncho,pond,ponder,ponds,pong,ponies,pontoon,pony,ponytail,pooch,poodle,poodles,poof,poofs,poofy,pooh,pool,pooling,pools,poolside,poop,pooped,poor,poorer,poorest,poorhouse,poorly,pop,popcorn,pope,poppa,popped,popper,poppers,poppet,poppies,popping,poppy,poppycock,pops,popular,popularity,populated,population,populations,porcelain,porch,porcupine,pore,pores,poring,pork,porky,porridge,port,portable,portal,portals,portent,porter,porterhouse,portfolio,portion,portions,portrait,portraits,portray,portrayal,portrayed,portrays,ports,pose,posed,poser,poses,posies,posing,position,positioned,positioning,positions,positive,positively,positives,posse,posses,possess,possessed,possesses,possessing,possession,possessions,possessive,possibilities,possibility,possible,possibly,possum,post,postage,postal,postcard,postcards,posted,poster,posterior,posters,posting,postman,postmark,postmaster,postmortem,postpartum,postpone,postponed,postponement,postponing,posts,posttraumatic,posture,pot,potassium,potato,potatoes,potency,potent,potential,potentially,pothead,pothole,potholes,potion,potions,pots,potsie,potted,potter,pottery,potting,potty,pouch,poultry,pounce,pound,pounder,pounding,pounds,pour,poured,pouring,pours,pout,pouting,poverty,pow,powder,powdered,powders,power,powered,powerful,powerhouse,powering,powerless,powers,pox,practical,practicality,practically,practice,practiced,practices,practicing,practise,practising,practitioner,praetorians,pragmatic,pragmatist,prairie,praise,praised,praises,praising,pralines,pram,prance,prancer,prancing,prank,pranks,prankster,prattling,pray,prayed,prayer,prayers,praying,prays,preach,preached,preacher,preachers,preaching,preachy,preamble,precaution,precautionary,precautions,preceded,precedence,precedent,precedents,preceding,precinct,precious,precipice,precise,precisely,precision,precludes,precocious,preconceived,predator,predators,predatory,predecessor,predecessors,predicament,predict,predictable,predicted,predicting,predictions,predisposed,predisposition,prednisone,preeclampsia,preemptive,preface,prefer,preferable,preferably,preference,preferences,preferred,prefers,prefix,pregnancies,pregnancy,pregnant,prehistoric,prejudice,prejudiced,prejudicial,prelim,preliminary,prelude,premarital,premature,prematurely,premed,premeditated,premier,premiere,premise,premises,premium,premiums,premonition,premonitions,prenatal,preoccupied,prep,preparation,preparations,prepare,prepared,preparing,preposterous,prepped,preppie,prepping,preppy,prerequisite,prerogative,preschool,prescribe,prescribed,prescribes,prescribing,prescription,prescriptions,presence,present,presentable,presentation,presented,presenting,presently,presents,preservation,preservatives,preserve,preserved,preserver,preserves,presets,preside,presided,presidency,president,presidential,presidents,presiding,press,pressed,presses,pressing,pressman,pressure,pressured,pressures,pressuring,prestige,prestigious,presto,presumably,presume,presumed,presuming,presumptuous,pretend,pretended,pretending,pretends,pretense,pretenses,pretentious,pretext,pretrial,prettier,prettiest,pretty,pretzel,pretzels,prevail,prevailed,prevails,prevent,preventative,prevented,preventing,prevention,preventive,prevents,preview,previous,previously,prey,preyed,preying,preys,price,priced,priceless,prices,pricey,prick,pricked,prickly,pricks,pride,prided,priest,priesthood,priests,prim,prima,primal,primaries,primarily,primary,primate,primates,prime,primed,primer,primitive,primo,primordial,primping,prince,princely,princes,princess,princesses,principal,principals,principle,principles,print,printed,printer,printers,printing,prints,prior,priorities,prioritize,prioritizing,priority,priors,prison,prisoner,prisoners,prisons,priss,prissy,privacy,private,privately,privilege,privileged,privileges,privy,prize,prized,prizes,pro,proactive,probability,probable,probably,probate,probation,probationary,probe,probes,problem,problematic,problems,procedural,procedure,procedures,proceed,proceeded,proceeding,proceedings,proceeds,process,processed,processes,processing,procession,processional,processors,proclaimed,proclivities,procrastinate,procrastinating,procrastination,procreate,proctologist,procure,procured,prod,prodded,prodding,prodigal,produce,produced,producer,producers,produces,producing,product,production,productions,productive,productivity,products,prof,profanity,profess,professed,profession,professional,professionalism,professionally,professionals,professor,professors,profile,profiles,profiling,profit,profitable,profits,profound,profoundly,profusely,progeny,prognosis,program,programme,programmed,programmer,programming,programs,progress,progressed,progresses,progressing,progression,progressive,prohibit,prohibited,prohibition,prohibits,project,projected,projectile,projecting,projection,projections,projector,projects,proliferation,prolong,prolonged,prom,promenade,prominent,promiscuous,promise,promised,promises,promising,promo,promote,promoted,promotes,promoting,promotion,promotions,prompt,prompter,prompting,promptly,proms,prone,pronounce,pronounced,pronouns,pronto,pronunciation,proof,proofed,proofing,proofs,prop,propaganda,propane,propelled,propellers,propensity,proper,properly,properties,property,prophecies,prophecy,prophesied,prophet,prophets,prophylactic,proportion,proportional,proportioned,proportions,proposal,proposals,propose,proposed,proposes,proposing,proposition,propositioning,propped,propping,proprietary,proprietor,propriety,props,propulsion,pros,prosciutto,prose,prosecute,prosecuted,prosecuting,prosecution,prosecutor,prosecutorial,prosecutors,prospect,prospective,prospector,prospects,prosper,prosperity,prostate,prosthetic,prostitute,prostitutes,prostitution,protect,protected,protecting,protection,protections,protective,protector,protectors,protects,protein,protest,protestant,protestants,protested,protesters,protesting,protestors,protests,proteus,protocol,protocols,protons,prototype,prototypes,protracted,protruding,proud,prouder,proudest,proudly,prove,proved,proven,provenance,proverb,proverbial,proves,provide,provided,providence,provider,provides,providing,provinces,proving,provision,provisional,provisions,provocation,provocations,provocative,provoke,provoked,provoking,provolone,prowess,prowl,prowler,proximity,proxy,prude,prudence,prudent,prudes,prune,prunes,pruning,pry,prying,psalm,pseudo,psi,psst,psych,psyche,psyched,psychiatric,psychiatrist,psychiatrists,psychiatry,psychic,psychically,psycho,psychoanalysis,psychoanalyze,psychobabble,psychological,psychologically,psychologist,psychologists,psychology,psychopath,psychopathic,psychopaths,psychos,psychosis,psychosomatic,psychotherapist,psychotherapy,psychotic,psychotics,pub,puberty,pubes,pubescent,pubic,public,publically,publication,publications,publicist,publicity,publicly,publish,published,publisher,publishers,publishing,puce,puck,pucker,pudding,puddle,puddles,puff,puffed,puffing,puffs,puffy,pug,puke,puking,pull,pulled,puller,pulling,pulls,pulmonary,pulp,pulpit,pulsating,pulse,pulses,pummel,pump,pumped,pumping,pumpkin,pumps,pun,punch,punched,punches,punching,punchy,punctual,punctuality,punctuation,puncture,punctured,pungent,punish,punished,punishes,punishing,punishment,punishments,punitive,punk,punks,punky,puns,punt,punters,puny,pup,pupil,pupils,puppet,puppeteer,puppets,puppies,puppy,purblind,purchase,purchased,purchases,purchasing,pure,puree,purely,purer,purest,purgatory,purge,purged,purging,purification,puritan,puritanical,puritans,purity,purple,purpose,purposefully,purposely,purposes,purr,purse,pursuant,pursue,pursued,pursuing,pursuit,pursuits,purview,pus,push,pushed,pusher,pushers,pushes,pushing,pushover,pushy,puss,pussycat,put,putrid,puts,putter,putting,putty,puzzle,puzzled,puzzles,puzzling,pygmies,pygmy,pyjamas,pyramid,pyramids,pyre,pyromaniac,pyrotechnics,quack,quacks,quad,quadrant,quahog,quaint,quake,quaker,quaking,qualifications,qualified,qualifies,qualify,qualifying,qualities,quality,qualms,quandary,quantities,quantity,quantum,quarantine,quarantined,quark,quarrel,quarreled,quarry,quart,quarter,quarterback,quarterbacks,quarters,quartet,queasy,queen,queens,quell,queller,query,quest,question,questionable,questioned,questioning,questionnaire,questions,queue,quibble,quiche,quick,quicker,quickest,quickie,quickly,quicksand,quid,quiet,quieter,quietly,quilt,quilting,quilts,quince,quinine,quintessential,quintet,quints,quintuplets,quirk,quirks,quirky,quit,quite,quits,quitter,quitters,quitting,quiver,quivering,quixote,quiz,quizmaster,quizzes,quota,quotation,quote,quoted,quotes,quoth,quoting,rabbi,rabbit,rabbits,rabble,rabid,rabies,raccoons,race,raced,racer,races,racetrack,racial,racing,racism,racist,rack,racked,racket,racketeer,racketeering,racking,racks,racquet,racquetball,racy,radar,radial,radiance,radiant,radiating,radiation,radiator,radical,radically,radio,radioactive,radioed,radiologist,radiology,radios,radish,radishes,radius,raffle,raft,rafting,rag,rage,rages,ragged,raggedy,ragging,raging,rags,ragtime,rah,raid,raided,raider,raiders,raiding,raids,rail,railing,railroad,railroading,railroads,rails,rain,rainbow,raincoat,rained,rainier,raining,rains,rainstorm,rainy,raise,raised,raiser,raisers,raises,raisin,raising,raisins,rajah,rake,raked,rallied,rally,rallying,ralph,ram,ramble,rambling,rambunctious,ramifications,rammed,ramp,rampage,ramrod,ramus,ran,ranch,rancher,ranchers,rancho,rancid,rand,random,randomly,randy,rang,range,ranger,rangers,ranges,ranging,rank,ranking,rankings,ranks,ransack,ransom,rant,ranting,rants,rap,rapid,rapidly,rapids,raping,rapist,rapists,rappers,raptor,raptors,rapture,rare,rarely,rarest,raring,rarity,rascals,rash,rashes,rashly,raspberry,rat,rate,rated,rates,rath,rather,rathole,rating,ratings,ratio,ration,rational,rationalize,rationalizing,rationally,rations,rats,ratted,ratting,rattle,rattled,rattles,rattlesnake,rattlesnakes,rattling,ratty,ravage,rave,raved,raven,ravenous,ravens,ravine,raving,ravings,ravish,ravishing,raw,rawhide,ray,rayed,rays,razor,razors,re,reach,reached,reaches,reaching,reacquaint,reacquainted,react,reacted,reacting,reaction,reactionary,reactions,reactive,reactor,reactors,reacts,read,reader,readers,readily,reading,readings,readout,reads,ready,real,realise,realised,realises,realism,realist,realistic,realistically,realities,reality,realization,realize,realized,realizes,realizing,really,realm,realms,realty,ream,reamed,reap,reaper,reapers,reappear,reappeared,reappears,rear,reared,rearing,rearrange,rearranging,rears,reason,reasonable,reasonably,reasoned,reasoning,reasons,reassemble,reassign,reassigned,reassigning,reassignment,reassurance,reassure,reassuring,reattach,rebate,rebel,rebelling,rebellion,rebellious,rebels,rebirth,reboot,reborn,rebound,rebounds,rebuild,rebuilding,rebuilt,rebuttal,rec,recall,recalled,recalling,recant,recanted,recanting,recap,recapture,recaptured,receding,receipt,receipts,receive,received,receiver,receives,receiving,recent,recently,reception,receptionist,receptive,recess,recession,recharge,recheck,rechecked,recipe,recipes,recipient,reciprocal,recital,recitals,recitation,recite,reciting,reckless,recklessly,recklessness,reckon,reckoned,reckoning,reclaim,reclaiming,recliner,reclining,reclusive,recognise,recognised,recognition,recognizable,recognizance,recognize,recognized,recognizes,recognizing,recollection,recombinant,recommend,recommendation,recommendations,recommended,recommending,recommends,recon,reconcile,reconciled,reconciliation,reconciling,reconnaissance,reconnect,reconnected,reconnecting,reconsider,reconsidered,reconstruct,reconvene,record,recorded,recorder,recorders,recording,recordings,records,recount,recourse,recover,recovered,recovering,recovers,recovery,recreate,recreated,recreating,recreation,recreational,recrimination,recriminations,recruit,recruited,recruiter,recruiting,recruits,rectal,rectangle,rectify,rectory,rectum,recuperate,recuperating,recurring,recuse,recycle,recycles,recycling,red,reddish,redecorate,redecorating,redeem,redeeming,redefine,redemption,redevelopment,redhead,redheads,redial,redid,redirect,redneck,rednecks,redo,redone,redress,reds,redskins,reduce,reduced,reduces,reducing,reduction,redundancies,redundancy,redundant,redwood,ree,reed,reef,reefer,reefs,reek,reeking,reeks,reel,reelected,reelection,reeled,reeling,reels,reenter,reevaluate,reeve,reeves,refer,referee,reference,referenced,references,referencing,referendum,referendums,referral,referrals,referred,referring,refers,refill,refills,refined,refinement,refinery,refining,reflect,reflected,reflecting,reflection,reflective,reflects,reflex,reflexes,refocus,reform,reformed,reforms,refrain,refresh,refreshed,refreshing,refreshments,refrigerated,refrigerator,refrigerators,refueling,refuge,refugee,refugees,refund,refundable,refusal,refuse,refused,refuses,refusing,refute,regain,regained,regaining,regal,regard,regarded,regarding,regardless,regards,regatta,regency,regenerate,regenerated,regeneration,regent,reggae,regime,regimen,regiment,regimental,regimes,regina,region,regional,regionals,register,registered,registering,registrar,registration,registry,regret,regrets,regrettable,regrettably,regretted,regretting,regroup,regular,regularity,regularly,regulars,regulate,regulated,regulation,regulations,regurgitate,rehab,rehabilitate,rehabilitated,rehabilitation,rehash,rehashing,rehearsal,rehearsals,rehearse,rehearsed,rehearsing,reheat,reign,reigning,reigns,reimburse,reimbursed,rein,reincarnated,reindeer,reinforced,reinforcement,reinforcements,reinstate,reinstated,reinstatement,reinstating,reinvent,reinvented,reinventing,reiterate,reject,rejected,rejecting,rejection,rejections,rejects,rejoice,rejoicing,rejuvenate,rejuvenated,rejuvenating,rekindle,rekindled,relapse,relapsing,relate,related,relates,relating,relation,relations,relationship,relationships,relative,relatively,relatives,relativity,relax,relaxants,relaxation,relaxed,relaxes,relaxing,relay,release,released,releases,releasing,relegated,relentless,relentlessly,relevance,relevant,reliability,reliable,reliance,relic,relics,relief,relies,relieve,relieved,relieving,religion,religious,religiously,relinquish,relinquishing,relish,relive,reliving,reload,relocate,relocation,reluctant,reluctantly,rely,relying,rem,remain,remainder,remained,remaining,remains,remake,remark,remarkable,remarkably,remarked,remarks,remarried,remarry,rematch,remedial,remedied,remedies,remedy,remember,remembered,remembering,remembers,remembrance,remind,reminded,reminder,reminders,reminding,reminds,reminisce,remission,remitting,remnants,remodeled,remodelling,remorse,remote,remotely,removal,remove,removed,remover,removes,removing,renaissance,renal,rename,render,rendered,renders,rendezvous,renegade,renege,reneging,renegotiate,renew,renewal,renewed,renewing,renounce,renovate,renovating,renovation,renovations,renown,renowned,rent,rental,rentals,rented,renting,rents,reopen,reopened,reopening,reorganize,reorganizing,rep,repaid,repaint,repair,repaired,repairing,repairman,repairs,reparations,repartee,repay,repayment,repeal,repeat,repeated,repeatedly,repeating,repellent,repent,repentance,repercussions,repertoire,repetition,repetitious,repetitive,rephrase,replace,replaceable,replaced,replacement,replacements,replacing,replay,replaying,replenish,replica,replicate,replicating,replied,replies,reply,report,reported,reportedly,reporter,reporters,reporting,reports,repository,repossess,represent,representation,representations,representative,representatives,represented,representing,represents,repress,repressed,repression,reprieve,reprimand,reprimanded,reprisal,reprisals,reproach,reprobate,reproduction,reproductive,reprogram,reprogramming,reps,reptile,reptiles,reptilian,republic,republican,republicans,repugnant,repulsive,reputation,repute,reputed,request,requested,requesting,requests,requiem,require,required,requirement,requirements,requires,requiring,requisite,requisition,requisitions,reread,reroute,rerouted,reruns,res,reschedule,rescheduled,rescinded,rescue,rescued,rescuer,rescuers,rescues,rescuing,research,researcher,researchers,researching,reseda,resemblance,resemble,resembles,resembling,resent,resented,resentful,resenting,resentment,resentments,resents,reservation,reservations,reserve,reserved,reserves,reserving,reset,resetting,reshoot,reshoots,residence,residences,residency,resident,residents,resides,residing,residual,residue,resign,resignation,resigned,resigning,resilient,resin,resist,resistance,resistant,resisted,resisting,resolute,resolution,resolve,resolved,resolving,resonance,resort,resorted,resorts,resource,resourceful,resources,respect,respectability,respectable,respected,respectful,respectfully,respecting,respectively,respects,respiration,respirations,respirator,respiratory,respond,responded,responding,responds,response,responses,responsibilities,responsibility,responsible,responsibly,responsive,rest,restart,restaurant,restaurants,restaurateur,rested,restful,resting,restitution,restless,restorative,restore,restored,restoring,restrain,restraining,restraint,restraints,restrict,restricted,restriction,restrictions,restroom,restrooms,restructuring,rests,result,resulted,resulting,results,resume,resumed,resumes,resurfaced,resurrection,retail,retailers,retain,retained,retainer,retaining,retake,retaliate,retaliated,retaliating,retaliatory,retard,retarded,retest,rethink,rethinking,rethought,retina,retinal,retinas,retire,retired,retirement,retiring,retort,retract,retractable,retraction,retreat,retreated,retreating,retreats,retribution,retrieval,retrieve,retrieved,retrieving,retro,retrofit,retrograde,return,returned,returning,returns,reunion,reunions,reunite,reunited,reuniting,rev,revamp,reveal,revealed,revealing,reveals,revelation,revelations,reveling,revels,revenge,revenue,revenues,revere,revered,reverence,reverend,reverently,reversal,reverse,reversed,reversible,revert,reverts,review,reviewed,reviewer,reviewing,reviews,revise,revised,revising,revisions,revisit,revival,revive,revived,reviving,revoke,revoked,revoking,revolting,revolution,revolutionaries,revolutionary,revolutionize,revolutions,revolve,revolved,revolver,revolves,revolving,revulsion,reward,rewarded,rewarding,rewards,rewind,rewrite,rewrites,rewriting,rewritten,rewrote,rex,rhetorical,rhinestone,rhinestones,rhino,rhinoceros,rhyme,rhymed,rhymes,rhythm,rhythms,rialto,rib,ribbed,ribbon,ribbons,ribs,rice,rich,richer,riches,richest,rick,rickety,rickey,ricks,rickshaw,ricochet,rid,riddance,ridden,ridding,riddle,riddled,riddler,riddles,ride,rider,riders,rides,ridge,ridicule,ridiculed,ridiculous,ridiculously,riding,rife,riff,rifle,rifles,rifling,rift,rig,rigged,rigging,right,righteous,righteousness,rightful,rightfully,rightly,righto,rights,righty,rigid,rigor,rigorous,rigs,rile,riled,riley,rim,rin,ring,ringer,ringers,ringing,rings,ringside,rink,rinse,rinsing,rioja,riot,rioting,riots,rip,ripe,ripped,ripper,ripping,ripple,ripples,rippling,rips,rise,risen,rises,rising,risk,risked,risking,risks,risky,risotto,rite,rites,ritter,ritual,ritualistic,rituals,ritz,ritzy,rival,rivalry,rivals,river,riverbank,rivers,riverside,riveted,riveting,riviera,roach,road,roadblock,roadblocks,roadhouse,roadie,roadies,roads,roadster,roadway,roam,roaming,roar,roaring,roast,roasted,roasting,roasts,rob,robbed,robber,robberies,robbers,robbery,robbin,robbing,robbins,robe,robes,robin,robins,robot,robotic,robots,robs,robust,rock,rocked,rocker,rockers,rocket,rockets,rocking,rocks,rocky,rod,rode,rodent,rodents,rodeo,rodman,rods,roger,rogers,rogue,rogues,role,roles,rolf,roll,rolled,roller,rollers,rolling,rolls,rom,roman,romance,romances,romancing,romantic,romantically,romanticize,romeo,romp,romper,romping,roof,roofer,roofs,rooftop,rooftops,rook,rookie,rookies,room,roomful,roomie,rooming,roommate,roommates,rooms,roomy,roost,rooster,roosters,root,rooted,rooting,roots,rope,roped,ropes,roscoe,rose,rosebud,rosebuds,rosebush,rosemary,roses,rosin,roster,rosy,rot,rotary,rotate,rotated,rotates,rotating,rotation,rotisserie,roto,rots,rotted,rotten,rotting,rotunda,rouge,rough,roughage,rougher,roughing,roughly,roughnecks,roughriders,roulette,round,roundabout,rounded,rounding,rounds,rouse,rousing,roust,rousted,route,routed,router,routes,routine,routinely,routines,routing,rover,roving,row,rowan,rowboat,rowdy,rowing,rows,royal,royally,royals,royalties,royalty,rub,rubbed,rubber,rubbers,rubbing,rubbish,rubble,rube,rubes,rubies,rubs,ruby,ruckus,rudder,rude,rudely,rudeness,ruder,rudimentary,rue,ruff,ruffians,ruffle,ruffled,ruffles,rug,rugby,rugged,ruin,ruined,ruining,ruins,rule,ruled,ruler,rulers,rules,ruling,rum,rumba,rumble,rumbling,rumblings,rummaging,rummy,rumor,rumored,rumors,rumour,rumours,rump,rumpled,rumpus,run,runaround,runaway,runaways,rundown,rune,runes,rung,runner,runners,running,runny,runoff,runs,runt,runway,rupture,ruptured,rural,ruse,rush,rushed,rushes,rushing,rust,rusted,rustle,rusty,rut,ruth,ruthless,ruthlessly,rutting,rya,rye,sabbath,sabbatical,sabe,saber,sabers,sabin,sable,sabotage,sabotaged,sabotaging,sac,saccharine,sack,sacks,sacrament,sacred,sacrifice,sacrificed,sacrifices,sacrificial,sacrificing,sacrilege,sad,saddened,sadder,saddest,saddle,saddled,sade,sadist,sadistic,sadly,sadness,safari,safe,safeguard,safeguards,safely,safer,safes,safest,safety,saffron,saga,sage,sagging,sahib,said,sail,sailboat,sailboats,sailed,sailing,sailor,sailors,sails,saint,saintly,saints,saith,sake,sakes,saki,sal,salaam,salad,salads,salamander,salami,salaries,salary,sale,sales,salesman,salesmen,salesperson,saleswoman,salient,salina,salinas,saline,saliva,sally,salmon,salmonella,salon,saloon,salsa,salt,salted,saltines,saltwater,salty,salutations,salute,saluted,saluting,salvage,salvaged,salvaging,salvation,samaritan,same,sample,sampled,samples,sampling,samurai,sanatorium,sanctimonious,sanction,sanctioned,sanctity,sanctuary,sanctum,sand,sandal,sandals,sandalwood,sandbag,sandbar,sandbox,sanded,sanders,sanding,sandman,sandpaper,sands,sandstorm,sandwich,sandwiches,sandy,sane,sanest,sang,sangria,sanitarium,sanitary,sanitation,sanity,sank,sans,santo,santos,sap,sapiens,sapphire,sapphires,sappy,saps,saran,sarcasm,sarcastic,sarcoidosis,sarcophagus,sardine,sardines,sarge,sark,sashimi,sassy,sat,satanic,satchel,satellite,satellites,satin,satire,satisfaction,satisfactory,satisfied,satisfies,satisfy,satisfying,saturated,saturation,satyr,sauce,saucer,saucers,sauerkraut,saul,sauna,sausage,sausages,savage,savagely,savages,savannah,save,saved,saver,saves,savin,saving,savings,savior,saviour,savor,savored,savoring,savour,savvy,saw,sawdust,sawed,sawing,saws,sawyer,sax,saxophone,say,sayer,saying,sayings,sayonara,says,scab,scabby,scabs,scag,scald,scalding,scale,scaled,scales,scallions,scallop,scallops,scalp,scalped,scalpel,scalper,scalping,scam,scammed,scamming,scamp,scampered,scampi,scams,scan,scandal,scanned,scanner,scanners,scanning,scans,scant,scapegoat,scar,scarce,scarcely,scare,scarecrow,scared,scares,scarf,scarfing,scarier,scariest,scaring,scarlet,scarred,scars,scarves,scary,scat,scatter,scattered,scattering,scavenger,scavenging,scenario,scenarios,scene,scenery,scenes,scenic,scent,scented,scents,scepter,schedule,scheduled,schedules,scheduling,schematics,scheme,schemed,schemes,scheming,schiller,schizo,schizoid,schizophrenia,schizophrenic,schlep,schmo,schmooze,schmoozing,schmuck,schnapps,schnauzer,schnitzel,schnoz,scholar,scholarly,scholarship,scholarships,school,schoolboy,schooled,schoolgirl,schooling,schools,schoolteacher,schoolwork,schooner,science,sciences,scientific,scientist,scientists,scintillating,scissor,scissors,scoff,scolded,scolding,scoliosis,scone,scones,scoop,scooped,scooping,scoops,scoot,scooter,scope,scopes,scoping,scorch,scorched,scorcher,scorching,score,scoreboard,scored,scores,scoring,scorned,scorpion,scorpions,scot,scotch,scotches,scotia,scots,scottie,scoundrel,scoundrels,scoured,scourge,scout,scouted,scouting,scouts,scow,scrabble,scram,scramble,scrambled,scrambler,scrambling,scrap,scrapbook,scrape,scraped,scrapes,scraping,scrapings,scrapped,scrappy,scraps,scratch,scratched,scratches,scratching,scratchy,scrawny,scream,screamed,screamer,screaming,screams,screech,screeching,screen,screened,screening,screens,screw,screwball,screwdriver,screwed,screwing,screws,screwup,screwups,screwy,scribble,scribbled,scribbling,script,scripted,scripts,scripture,scriptures,scroll,scrolls,scrooge,scrotum,scrounge,scrounging,scrub,scrubbed,scrubbing,scrubs,scrunch,scruples,scrutinized,scrutiny,scry,scuba,scud,scuff,scuffle,scullery,sculptor,sculpture,sculptures,scum,scummy,scurry,scurrying,scurvy,scuttled,scuzzy,sea,seaboard,seafood,seagull,seal,sealed,sealing,seals,seam,seaman,seamen,seamless,seams,seamstress,seaplane,sear,search,searched,searches,searching,seared,sears,seas,seascape,seashell,seashells,season,seasonal,seasoned,seasons,seat,seated,seating,seats,seaweed,sec,secluded,seclusion,second,secondary,seconded,secondly,seconds,secrecy,secret,secretarial,secretaries,secretary,secretive,secretly,secrets,section,sections,sector,sectors,secure,secured,securely,securing,securities,security,sedan,sedate,sedated,sedation,sedative,sedatives,seduce,seduced,seduces,seducing,seduction,seductive,see,seed,seeds,seedy,seeing,seek,seeker,seekers,seeking,seeks,seem,seemed,seemingly,seems,seen,seep,seeping,seer,sees,seesaw,seg,segment,segments,segue,seismic,seize,seized,seizes,seizing,seizure,seizures,seldom,select,selected,selecting,selection,selections,selective,selectman,self,selfish,selfishly,selfishness,selfless,selflessness,sell,seller,selling,sells,seltzer,selves,semantics,semblance,semen,semester,semi,semiautomatic,seminal,seminar,seminars,seminary,semple,sen,senate,senator,senators,send,sender,sending,sendoff,sends,senile,senility,senior,seniors,senor,senora,senorita,sensation,sensational,sense,sensed,senseless,senses,sensibilities,sensibility,sensible,sensing,sensitive,sensitivity,sensor,sensors,sensory,sensual,sensuous,sent,sentence,sentenced,sentences,sentencing,sentient,sentiment,sentimental,sentiments,sentinel,sentinels,sentry,separate,separated,separately,separates,separating,separation,seppuku,septic,septum,sequel,sequence,sequences,sequencing,sequestered,sequined,sequins,ser,sera,serenade,serene,serenity,serge,sergeant,serial,series,serious,seriously,seriousness,sermon,sermons,serotonin,serpent,serum,servant,servants,serve,served,server,servers,serves,service,serviced,services,servicing,serving,servings,servitude,sesame,session,sessions,sesterces,set,setback,setbacks,sets,setting,settings,settle,settled,settlement,settlements,settles,settling,setup,seven,sevens,seventeen,seventeenth,seventh,seventies,seventy,sever,several,severance,severe,severed,severely,severity,sew,sewage,sewed,sewer,sewers,sewing,sewn,sex,sexes,sexier,sexiest,sexism,sexist,sexless,sexual,sexuality,sexually,sexy,sh,sha,shabbily,shabby,shack,shackle,shackled,shackles,shad,shade,shades,shading,shadow,shadowing,shadows,shadowy,shady,shaft,shafted,shag,shagged,shagging,shaggy,shah,shake,shaken,shaker,shakers,shakes,shaking,shaky,shale,shall,shallow,shallows,shalom,shalt,sham,shaman,shambles,shame,shamed,shameful,shameless,shaming,shampoo,shamrock,shanghai,shanghaied,shank,shanks,shape,shaped,shapely,shapes,shaping,shards,share,shared,shareholder,shares,sharing,shark,sharking,sharks,sharp,sharpened,sharpener,sharpening,sharpens,sharper,sharpest,sharply,sharpshooters,shat,shatter,shattered,shattering,shave,shaved,shaven,shaver,shaves,shaving,shaw,shawl,shawn,shay,she,shea,shear,shebang,shed,shedding,sheds,sheen,sheeny,sheep,sheepskin,sheer,sheet,sheets,sheila,sheldrake,shelf,shell,shelled,shellfish,shelling,shells,shelly,shelter,sheltered,shelters,shelve,shelves,shenanigans,shepherd,shepherds,sheriff,sherlock,sherry,shes,shh,shield,shielded,shields,shift,shifted,shifter,shifting,shifts,shifty,shill,shillings,shimmer,shimmering,shimmy,shin,shindig,shine,shines,shingle,shingles,shining,shins,shiny,ship,shipment,shipments,shipped,shipping,ships,shipshape,shipwreck,shipwrecked,shipyard,shirking,shirt,shirtless,shirts,shiv,shiva,shiver,shivering,shivers,shoal,shock,shocked,shocker,shocking,shockingly,shocks,shoddy,shoe,shoehorn,shoelace,shoelaces,shoemaker,shoes,shone,shoo,shook,shoot,shooter,shooters,shooting,shootings,shootout,shoots,shop,shopkeeper,shoplifter,shoplifters,shoplifting,shopped,shopper,shopping,shops,shore,shoreline,shores,short,shortage,shortcake,shortcomings,shortcut,shorted,shorten,shorter,shortest,shorthand,shorthanded,shortly,shortness,shorts,shortsighted,shortstop,shorty,shot,shotgun,shotguns,shots,should,shoulder,shoulders,shout,shouted,shouting,shouts,shove,shoved,shovel,shoveled,shoveling,shovels,shoves,shoving,show,showbiz,showcase,showdown,showed,shower,showered,showering,showers,showgirl,showing,shown,showoff,showroom,shows,showstopper,showy,shrapnel,shred,shredded,shredder,shreds,shrew,shrewd,shriek,shrieking,shrimp,shrine,shrink,shrinkage,shrinking,shrinks,shrivel,shriveled,shroud,shrouds,shrub,shrubbery,shrubs,shrug,shrugged,shrugging,shrugs,shrunk,shrunken,shtick,shuck,shucks,shuffle,shuffled,shuffling,shun,shunned,shunt,shush,shushing,shut,shutdown,shuts,shutters,shutting,shuttle,shuttles,shy,shylock,shyness,si,sibling,siblings,sic,sicced,sick,sicken,sickened,sickening,sickens,sicker,sickest,sickly,sickness,sicko,sickos,side,sidebar,sideboard,sideburns,sidecar,sided,sidekick,sidekicks,sideline,sidelines,sides,sideshow,sidetracked,sidewalk,sidewalks,sideways,sidewinder,siding,sidle,siege,sierra,siesta,sift,sifting,sigh,sighing,sighs,sight,sighted,sighting,sightings,sightless,sights,sigma,sign,signal,signaled,signaling,signals,signature,signatures,signed,significance,significant,significantly,signifies,signify,signifying,signing,signor,signora,signore,signs,sikes,silence,silenced,silences,silent,silicone,silk,silken,silks,silky,sill,silliest,silliness,silly,silva,silver,silverware,silvery,sim,similar,similarities,similarity,similarly,simmer,simony,simp,simple,simpler,simplest,simpleton,simpletons,simplicity,simplify,simplistic,simply,sims,simulate,simulated,simulates,simulation,simulations,simulator,simultaneously,sin,since,sincere,sincerely,sincerest,sincerity,sine,sinful,sing,singe,singed,singer,singers,singing,single,singles,singleton,singling,sings,sinister,sink,sinker,sinking,sinks,sinned,sinner,sinners,sins,sinus,sinuses,sip,siphoning,sipped,sipping,sir,sire,siree,siren,sirens,sirloin,sirree,sirs,sis,sissies,sissy,sister,sisterhood,sisters,sit,sitcom,sitcoms,site,sites,sits,sitter,sitters,sitting,situated,situation,situations,six,sixes,sixpence,sixteen,sixteenth,sixth,sixties,sixty,sizable,size,sizeable,sized,sizes,sizing,sizzle,sizzling,skag,skate,skateboard,skateboards,skated,skater,skaters,skates,skating,skedaddle,skeeters,skeletal,skeleton,skeletons,skeptic,skeptical,skepticism,sketch,sketches,sketching,sketchy,skewed,skewer,skewered,ski,skid,skidded,skids,skied,skier,skies,skiff,skiing,skill,skilled,skillet,skillful,skills,skim,skimmed,skimming,skimp,skimpy,skin,skinned,skinner,skinny,skins,skip,skipped,skipper,skipping,skirmish,skirmishes,skirt,skirts,skis,skit,skittish,skittles,skivvies,skulk,skull,skulls,skunk,sky,skylight,skyrocket,skyscraper,skyscrapers,slack,slacker,slackers,slacks,slam,slammed,slammer,slamming,slams,slander,slanderous,slang,slant,slanted,slap,slapped,slapping,slaps,slash,slashed,slasher,slashing,slate,slated,slater,slaughter,slaughtered,slaughterhouse,slaughtering,slave,slaved,slavery,slaves,slaw,slay,slayed,slayer,slayers,slaying,sleaze,sleazebag,sleazeball,sleazy,sled,sledding,sledge,sledgehammer,sleek,sleep,sleeper,sleepers,sleeping,sleepless,sleepover,sleepovers,sleeps,sleepwalk,sleepwalking,sleepy,sleepyhead,sleet,sleeve,sleeves,sleigh,sleight,slender,slept,sleuth,slew,slice,sliced,slicer,slices,slick,slicker,slid,slide,slider,slides,sliding,slight,slighted,slightest,slightly,slim,slime,slimmer,slimming,slimy,sling,slinging,slings,slingshot,slink,slinking,slinky,slip,slipped,slipper,slippers,slippery,slipping,slips,slit,slither,slithered,slithering,sliver,slob,slobbering,slogan,slogans,slop,slope,slopes,sloppy,sloshed,slot,slots,slouch,slough,slow,slowed,slower,slowest,slowing,slowly,sludge,slug,slugged,slugger,slugging,sluggish,slugs,slum,slumber,slumlord,slumming,slumped,slung,slur,slurp,slurping,slush,sly,smack,smacked,smackers,smacks,small,smaller,smallest,smart,smartass,smarter,smartest,smarts,smarty,smash,smashed,smashes,smashing,smear,smeared,smearing,smears,smell,smelled,smelling,smells,smelly,smelt,smidgen,smile,smiled,smiles,smiley,smiling,smirk,smirking,smite,smith,smithereens,smithers,smitten,smock,smog,smoke,smoked,smoker,smokers,smokes,smokey,smoking,smoky,smoldering,smooch,smooching,smoochy,smooth,smoother,smoothest,smoothie,smoothly,smorgasbord,smother,smothered,smothering,smudge,smudged,smug,smuggle,smuggled,smuggler,smugglers,smuggling,smugness,snack,snacking,snacks,snag,snagged,snagging,snags,snails,snake,snakebite,snakes,snakeskin,snap,snapped,snapper,snapping,snappy,snaps,snapshot,snapshots,snarky,snarl,snarling,snatch,snatched,snatcher,snatchers,snatches,snatching,snazzy,sneak,sneaked,sneaker,sneakers,sneaking,sneaks,sneaky,sneer,sneeze,sneezed,sneezing,snicker,snickering,snickers,snide,sniff,sniffed,sniffing,sniffles,sniffling,snifter,snip,snipe,sniper,snipers,sniping,snit,snitch,snitches,snivelling,snob,snobby,snook,snoop,snooping,snoopy,snooty,snooze,snore,snores,snoring,snorkel,snorkeling,snort,snorted,snorting,snot,snotty,snout,snow,snowball,snowballed,snowballing,snowballs,snowbank,snowboard,snowed,snowflake,snowflakes,snowing,snowman,snowmen,snowmobile,snowmobiles,snowstorm,snowy,snub,snubbed,snuck,snuff,snug,snuggle,snuggled,snuggles,snuggling,so,soak,soaked,soaking,soap,soapbox,soaps,soapy,soar,soared,soaring,soars,sob,sobbing,sober,sobered,sobering,soberly,sobriety,sobs,soccer,sociable,social,socialism,socialite,socialize,socialized,socializing,socially,society,sociology,sociopath,sociopathic,sock,socked,socket,sockets,socks,sod,soda,sodas,sodding,sodium,sodomy,sofa,sofas,soft,softball,soften,softener,softening,softer,softest,softly,softness,software,softy,soggy,soil,soiled,soiree,sol,solace,solar,solarium,sold,soldier,soldiers,sole,solely,solemn,solenoid,soles,solicit,solicitation,solicited,soliciting,solicitor,solid,solidify,solitaire,solitary,solitude,solo,solution,solutions,solve,solved,solvent,solves,solving,somber,some,somebody,someday,somehow,someone,someplace,somerset,something,sometime,sometimes,someway,somewhat,somewhere,somewheres,son,sonar,sonata,song,songs,songwriter,sonics,sonnet,sonnets,sonny,sonogram,sons,sook,soon,sooner,soonest,soot,soothe,soothes,soothing,soothsayer,sop,sophisticated,sophistication,sophomore,soprano,sopranos,sorbet,sorcerers,sorcery,sordid,sore,sorel,sores,sororities,sorority,sorrel,sorrow,sorrows,sorry,sort,sorted,sorting,sorts,sos,sot,souffle,sought,soul,soulful,soulless,souls,sound,sounded,sounding,soundly,soundproof,sounds,soundstage,soup,soups,soupy,sour,source,sources,sourpuss,sous,souse,south,southbound,southeast,southern,southwest,souvenir,souvenirs,souvlaki,sovereign,sovereignty,soviet,soviets,sow,sowing,sown,sox,soy,soybean,spa,space,spacecraft,spaced,spaces,spaceship,spacey,spacing,spackle,spade,spades,spaghetti,span,spandex,spaniel,spank,spanked,spanking,spans,spar,spare,spared,spareribs,sparing,spark,sparked,sparkle,sparklers,sparkles,sparkling,sparkly,sparks,sparky,sparring,sparrow,spartan,spas,spasm,spasms,spastic,spat,spate,spatial,spatula,spawn,spawned,spaz,speak,speakeasy,speaker,speakerphone,speakers,speaking,speaks,spear,spears,special,specialise,specialist,specialists,specialize,specialized,specializes,specializing,specially,specials,specialties,specialty,species,specific,specifically,specifications,specifics,specified,specify,specimen,specimens,speck,specs,spectacle,spectacles,spectacular,spectacularly,spectator,spectators,specter,spectra,spectral,spectre,spectrum,speculate,speculating,speculation,speculations,speculative,speech,speeches,speechless,speed,speedboat,speedily,speeding,speedo,speedometer,speedos,speeds,speedway,speedy,spell,spelled,speller,spelling,spells,spelt,spence,spencer,spencers,spend,spender,spending,spends,spent,sperm,spew,spewing,sphere,sphinx,spice,spices,spicy,spider,spiders,spied,spiel,spies,spike,spiked,spikes,spikey,spiking,spiky,spill,spilled,spilling,spills,spin,spinach,spinal,spindly,spine,spineless,spinner,spinning,spins,spinster,spiny,spiral,spiraling,spirals,spirit,spirited,spirits,spiritual,spirituality,spit,spite,spiteful,spitfire,spits,spitting,spitz,splash,splashing,splashy,splat,splatter,spleen,splendid,splendidly,splendor,spliced,splicing,splint,splinter,splinters,split,splits,splitting,splurge,spoil,spoiled,spoiler,spoiling,spoils,spoilsport,spoke,spoken,spokes,spokesman,spokesperson,sponge,sponges,sponsor,sponsored,sponsoring,sponsors,sponsorship,spontaneity,spontaneous,spontaneously,spook,spooked,spooking,spooks,spooky,spool,spoon,spoonful,spooning,spoons,spores,sport,sporting,sports,sportsmanship,sportswear,sporty,spot,spotless,spotlight,spotlights,spots,spotted,spotter,spotters,spotting,spotty,spouse,spouting,sprain,sprained,sprang,sprawled,spray,sprayed,spraying,sprays,spread,spreading,spreads,spreadsheet,spreadsheets,spree,sprightly,spring,springer,springing,springs,springtime,sprinkle,sprinkled,sprinkler,sprinklers,sprinkles,sprint,sprints,sprite,spritzer,sprouted,sprouting,sprouts,spruce,sprung,spry,spud,spun,spunk,spunky,spur,spurred,spurs,spurt,sputnik,spy,spying,squabble,squad,squadron,squads,squall,squalor,squander,squandered,square,squared,squarely,squares,squaring,squash,squashed,squashing,squat,squatter,squatters,squatting,squaw,squawk,squawking,squeak,squeaking,squeaks,squeaky,squeal,squealed,squeegee,squeeze,squeezed,squeezes,squeezing,squid,squiggle,squiggly,squinting,squire,squirm,squirrel,squirrels,squirt,squirts,squish,squished,squishing,sri,stab,stabbed,stabbing,stability,stabilize,stabilized,stabilizing,stable,stables,stack,stacked,stacks,stadium,staff,staffed,staffer,staffers,stag,stage,staged,stages,stagger,staggered,staggering,staggeringly,staging,stagnant,stain,stained,stainless,stains,stair,staircase,stairs,stairway,stairwell,stake,staked,stakeout,stakeouts,stakes,staking,stale,stalemate,stalk,stalked,stalker,stalkers,stalking,stalks,stall,stalled,stalling,stallion,stamina,stammering,stamp,stamped,stampede,stamper,stamps,stance,stand,standard,standardized,standards,standby,standing,standish,standoff,standpoint,stands,standstill,standup,stang,stanza,staple,stapled,stapler,star,starboard,stardom,stardust,stare,stared,stares,staring,stark,starlet,starlets,starling,starred,starring,starry,stars,starship,start,started,starter,starters,starting,startle,startled,startling,starts,startup,starvation,starve,starved,starving,stash,stashed,stasis,stat,state,stated,statehood,stately,statement,statements,stateroom,states,stateside,statesmen,statewide,static,stating,station,stationary,stationed,stationery,stations,statistic,statistical,statistically,statistics,stats,statuary,statue,statues,stature,status,statute,statutes,staunch,stave,stay,stayed,staying,stays,stead,steadfast,steadily,steady,steak,steaks,steal,stealer,stealing,steals,stealth,stealthy,steam,steamed,steamer,steaming,steamroll,steamroller,steamy,steed,steel,steely,steep,steer,steerage,steered,steering,stein,stella,stellar,stem,stemmed,stems,stench,steno,stenographer,step,stepfather,stepmother,steppe,stepped,stepping,steps,stepson,stereo,stereotype,stereotypes,sterile,sterilize,sterilized,sterling,stern,sterner,steroid,steroids,stethoscope,stew,stewardess,stewardesses,stewards,stewed,stick,sticker,stickers,sticking,stickler,sticks,stickup,sticky,stiff,stiffer,stifle,stifler,stifling,stigma,stigmata,stiles,stiletto,stilettos,still,stillness,stills,stilts,stimulated,stimulating,stimulation,stimuli,stimulus,sting,stinger,stinging,stingray,stings,stingy,stink,stinking,stinks,stinky,stint,stipulate,stipulated,stipulates,stipulation,stir,stirred,stirring,stirs,stitch,stitched,stitches,stitching,stock,stockbroker,stockbrokers,stocked,stockholder,stockholders,stocking,stockings,stockpile,stocks,stocky,stodgy,stoic,stoke,stoked,stokes,stole,stolen,stomach,stomachache,stomachs,stomp,stomped,stomper,stomping,stone,stoned,stoner,stones,stonewalled,stoney,stony,stood,stooge,stool,stoolie,stools,stoop,stooped,stooping,stop,stopped,stopping,stops,stopwatch,storage,store,stored,storeroom,stores,stories,storing,stork,storm,stormed,storming,storms,stormy,story,storybook,stove,stow,stowaway,stowaways,stowed,straddle,straddling,straight,straightaway,straighten,straightened,straightening,straightforward,straights,strain,strained,straining,strains,strait,straitjacket,straits,strand,stranded,strange,strangely,strangeness,stranger,strangers,strangest,strangle,strangled,stranglehold,strangler,strangling,strangulation,strap,strapless,strapped,strapping,straps,strategic,strategically,strategies,strategist,strategize,strategizing,strategy,straw,strawberries,strawberry,straws,stray,streak,streaks,stream,streaming,streamlined,streams,street,streetcar,streetlights,streets,streetwalker,strength,strengthen,strengths,strenuous,strenuously,strep,stress,stressed,stresses,stressful,stressing,stretch,stretched,stretcher,stretches,stretching,strewn,stricken,strict,strictly,stride,strides,strife,strike,strikes,striking,strikingly,string,stringing,strings,strip,stripe,striped,stripes,striping,stripped,stripper,strippers,stripping,strips,striptease,strive,striving,strobe,stroke,strokes,stroll,stroller,strolling,strolls,strong,strongbox,stronger,strongest,stronghold,strongly,struck,structural,structure,structured,struggle,struggled,struggles,struggling,strummer,strumpet,strung,strut,struts,strutting,strychnine,stub,stubbed,stubble,stubborn,stubs,stuck,stud,studded,student,students,studied,studies,studio,studios,studious,studly,studs,study,studying,stuff,stuffed,stuffing,stuffs,stuffy,stumble,stumbled,stumbles,stumbling,stump,stumped,stumper,stumps,stun,stung,stunk,stunned,stunning,stunningly,stunt,stunted,stuntman,stunts,stupendous,stupid,stupider,stupidest,stupidity,stupidly,stupor,sturdy,sturgeon,stutter,stuttering,sty,style,styled,styles,stylings,stylish,stylist,stymied,suave,sub,subatomic,subbasement,subbing,subcommittee,subconscious,subconsciously,subdued,subdural,subject,subjected,subjecting,subjective,subjects,subjugation,sublet,subletting,sublevel,sublimating,sublime,submarine,submarines,submersible,submissive,submit,submitted,submitting,subordinate,subpoena,subpoenaed,subpoenas,subscription,subscriptions,subsequently,subservient,subsided,subsidiary,subsidies,subsidize,subsidy,substance,substances,substantial,substantially,substantiate,substantive,substation,substitute,substituting,subterfuge,subterranean,subtext,subtitle,subtitled,subtitles,subtle,subtlety,subtly,suburb,suburban,suburbia,suburbs,subversive,subvert,subway,subways,subzero,succeed,succeeded,succeeds,success,successes,successful,successfully,succession,succinct,succotash,succubus,succulent,succumb,succumbed,such,suck,sucked,sucker,suckered,suckers,sucking,sucks,suction,sudden,suddenly,suds,sue,sued,suede,sues,suffer,suffered,suffering,suffers,suffice,sufficient,sufficiently,suffocate,suffocated,suffocating,suffocation,sugar,sugarcoat,sugarplum,sugars,sugary,suggest,suggested,suggesting,suggestion,suggestions,suggestive,suggests,suicidal,suicide,suicides,suing,suit,suitable,suitcase,suitcases,suite,suited,suites,suitor,suitors,suits,sulfur,sulk,sulking,sullen,sultan,sultry,sum,summarily,summarize,summary,summation,summed,summer,summers,summertime,summit,summon,summoned,summoning,summons,sumo,sump,sumptuous,sums,sun,sunbathing,sunblock,sunburn,sundae,sundaes,sundown,sundress,sunflower,sunflowers,sung,sunglasses,sunk,sunken,sunless,sunlight,sunning,sunny,sunrise,sunroom,sunscreen,sunset,sunsets,sunshine,sunspots,sunstroke,suntan,sup,super,superb,supercollider,superego,superficial,superfluous,superhero,superheroes,superintendent,superior,superiors,superman,supermarket,supermarkets,supermodel,supernatural,supernova,superpower,superpowers,superstar,superstars,superstition,superstitious,supervise,supervised,supervising,supervision,supervisor,superwoman,supper,suppertime,supple,supplement,supplements,supplied,supplier,supplies,supply,support,supported,supporter,supporting,supportive,supports,suppose,supposed,supposedly,supposition,suppress,suppressed,suppression,supremacy,supreme,supremely,sure,surely,surf,surface,surfaced,surfaces,surfboard,surfed,surfer,surfers,surfing,surge,surgeon,surgeons,surgeries,surgery,surgical,surgically,surging,surly,surname,surpass,surpassed,surplus,surprise,surprised,surprises,surprising,surprisingly,surreal,surrender,surrendered,surrendering,surrogate,surround,surrounded,surrounding,surroundings,surrounds,surveillance,survey,survival,survive,survived,survives,surviving,survivor,survivors,susceptible,sushi,suspect,suspected,suspects,suspend,suspended,suspenders,suspending,suspense,suspension,suspicion,suspicions,suspicious,suss,sustain,sustained,sustaining,sustenance,sutra,sutures,swab,swabs,swaddling,swallow,swallowed,swallowing,swallows,swam,swami,swamp,swamped,swamps,swan,swank,swanky,swans,swap,swapped,swapping,swarm,swarming,swastika,swat,swatch,swatches,swatting,sway,swayed,swaying,swear,swearing,swears,sweat,sweater,sweaters,sweating,sweatpants,sweats,sweatshirt,sweatshirts,sweatshop,sweatshops,sweaty,swede,swedes,sweep,sweeper,sweeping,sweeps,sweepstakes,sweet,sweetbreads,sweeter,sweetest,sweetheart,sweethearts,sweetie,sweetly,sweetness,sweets,swell,swelled,swelling,swells,sweltering,swept,swerve,swerving,swift,swig,swill,swilling,swim,swimmer,swimmers,swimming,swimmingly,swims,swimsuit,swimsuits,swindled,swine,swing,swingers,swinging,swings,swipe,swiped,swiping,swirl,swirling,swirly,swiss,switch,switchblade,switched,switcheroo,switches,switching,switchman,swivel,swizzle,swollen,swooning,swoop,swooped,swooping,swoops,sword,swordfish,swords,swore,sworn,swung,sycamore,sycophant,sykes,syllable,syllabus,symbiote,symbiotic,symbol,symbolic,symbolically,symbolism,symbolize,symbolizes,symbols,symmetrical,sympathetic,sympathies,sympathize,sympathy,symphony,symptom,symptomatic,symptoms,synagogue,synapses,synaptic,sync,synch,synchronicity,synchronize,synchronized,syndicate,syndicated,syndrome,syne,synergy,synonymous,synthesis,synthesize,synthetic,syphilis,syphon,syringe,syrup,system,systematic,systematically,systemic,systems,systolic,ta,tab,tabby,table,tableau,tablecloth,tablecloths,tables,tablespoon,tablet,tabloid,tabloids,tabs,tach,tachycardia,tacit,tack,tacked,tackle,tackled,tackles,tackling,tacks,tacky,taco,tacos,tact,tactful,tactic,tactical,tactics,tactile,tad,tadpole,tae,taffeta,taffy,tag,tagged,tagging,tags,tail,tailed,tailing,tailor,tailored,tailors,tails,tailspin,taint,tainted,taj,take,takedown,taken,takeoff,takeout,takeover,taker,takers,takes,takin,taking,talcum,tale,talent,talented,talentless,talents,tales,talisman,talk,talkative,talked,talker,talkie,talking,talks,talky,tall,taller,tallest,tallied,tally,talons,tamales,tambourine,tame,tammy,tamper,tampered,tampering,tampon,tampons,tan,tandem,tangent,tangerine,tangible,tangle,tangled,tangling,tango,tank,tanked,tanker,tankers,tanking,tanks,tanned,tanner,tanning,tantric,tantrum,tantrums,tap,tapas,tape,taped,tapered,tapes,tapeworm,taping,tapioca,tapped,tapping,tar,tarantulas,tardiness,tardy,target,targeted,targeting,targets,tarmac,tarnish,tarnished,tarot,tarp,tarragon,tarred,tart,tartar,tarts,tarzan,tas,task,tasks,tassels,taste,tasted,tasteful,tastes,tasting,tasty,tat,tate,tater,tattered,tattle,tattoo,tattooed,tattoos,tau,taught,taunt,taunted,taunting,taunts,taut,tavern,tawdry,tax,taxed,taxes,taxi,taxicab,taxidermist,taxing,taxis,taxpayer,taxpayers,tea,teach,teacher,teachers,teaches,teaching,teacup,teal,team,teamed,teaming,teammate,teammates,teams,teamsters,teamwork,teapot,tear,tearful,tearing,tears,teary,teas,tease,teased,teasing,teaspoon,teaspoons,technical,technicalities,technicality,technically,technician,technicians,technique,techniques,technological,technologically,technologies,technology,ted,teddy,tedious,tee,teed,teeming,teen,teenage,teenaged,teenager,teenagers,teens,teensy,teeny,teenybopper,teetering,teeth,telegram,telegrams,telegraph,telekinesis,telekinetic,telemarketing,telemetry,telepathic,telepathically,telepathy,telephone,telephones,teleport,teleportation,telescope,telescopic,telethon,televised,television,telex,tell,teller,tellers,telling,tells,telly,temp,temper,temperament,temperamental,temperature,temperatures,tempered,tempers,tempest,temple,tempo,temporarily,temporary,temps,tempt,temptation,temptations,tempted,tempting,temptress,ten,tenacious,tenacity,tenant,tenants,tend,tended,tendencies,tendency,tender,tenderness,tending,tendon,tendonitis,tendons,tends,tenement,tenets,tenner,tennis,tenor,tenors,tens,tense,tension,tensions,tent,tentative,tenth,tenths,tents,tenuous,tenure,tepid,tequila,teriyaki,term,terminal,terminate,terminated,terminating,termination,terminator,terminology,terminus,termites,terms,terra,terrace,terraforming,terrain,terrestrial,terrible,terribly,terrific,terrifically,terrified,terrifies,terrify,terrifying,territorial,territories,territory,terror,terrorism,terrorist,terrorists,terrorize,terrorized,terrorizing,terrors,terry,test,testament,tested,testified,testifies,testify,testifying,testimonies,testimony,testing,testosterone,tests,testy,tet,tetanus,tether,tetherball,texas,text,textbook,textbooks,textile,textiles,texts,texture,than,thank,thanked,thankful,thankfully,thanking,thankless,thanks,thanksgiving,thanksgivings,that,thataway,thatcher,thaw,thawed,thawing,the,theater,theaters,theatre,theatres,theatrical,theatrics,thee,theft,thefts,their,theirs,them,theme,themed,themes,themselves,then,theologian,theological,theology,theorem,theoretical,theoretically,theories,theory,therapeutic,therapist,therapists,therapy,there,thereafter,thereby,therefore,therein,thereof,theres,thermal,thermometer,thermonuclear,thermos,thermostat,thesaurus,these,theses,thesis,thespian,theta,they,thick,thickening,thickens,thicker,thief,thieves,thigh,thighs,thin,thine,thing,things,think,thinkers,thinking,thinks,thinly,thinner,thinners,thinning,thins,third,thirds,thirst,thirsty,thirteen,thirteenth,thirties,thirtieth,thirty,this,thistle,tho,thong,thongs,thoracic,thoracotomy,thorn,thorns,thorough,thoroughly,thorpe,those,thou,though,thought,thoughtful,thoughtfully,thoughtless,thoughts,thousand,thousands,thrall,thrash,thrashing,thread,threads,thready,threat,threaten,threatened,threatening,threatens,threats,three,threesome,threshold,threw,thrice,thrift,thrill,thrilled,thriller,thrilling,thrills,thrive,thrives,thriving,throat,throats,throbbing,throne,thrones,throttle,through,throughout,throughway,throw,thrower,throwers,throwing,thrown,throws,thru,thrust,thrusters,thrusts,thruway,thug,thugs,thumb,thumbing,thumbprint,thumbs,thump,thumping,thunder,thunderbird,thunderbolt,thundering,thunderstorm,thus,thusly,thy,thyme,thyroid,thyself,ti,tiara,tibia,tic,tick,ticked,ticker,ticket,ticketed,tickets,ticking,tickle,tickles,tickling,ticklish,ticks,tidal,tidbits,tide,tides,tidings,tidy,tidying,tie,tied,tier,ties,tiff,tiffany,tiger,tigers,tight,tighten,tightened,tightening,tightens,tighter,tightly,tightness,tightrope,tights,tiki,til,tile,tiles,till,tiller,tilt,tilted,timber,timbers,timbre,time,timed,timeless,timeline,timely,timer,timers,times,timetable,timid,timing,timothy,timpani,tin,tinfoil,ting,tingle,tingling,tingly,tiniest,tinkered,tinkering,tinkle,tins,tinsel,tinted,tiny,tip,tipped,tipper,tippers,tipping,tippy,tips,tipsy,tiptoe,tiptoeing,tiramisu,tire,tired,tireless,tires,tiresome,tiring,tis,tissue,tissues,titan,titanic,titanium,titans,title,titles,tizzy,to,toad,toast,toasted,toaster,toasting,toasts,toasty,tobacco,toby,tod,today,toddle,toddler,toddlers,toddy,toe,toed,toenail,toenails,toes,toffee,tofu,toga,together,togetherness,toilet,toilets,toiling,toke,token,tokens,told,tolerable,tolerance,tolerant,tolerate,tolerated,toll,toller,tolls,tom,tomahawk,tomato,tomatoes,tomb,tombs,tombstone,tomcat,tome,tomfoolery,tommy,tomorrow,tomorrows,toms,ton,tone,toned,toner,tones,tong,tongs,tongue,tongues,tonic,tonics,tonight,tonnage,tons,tonsil,tonsils,tony,too,took,tool,toolbox,tools,toon,toons,toot,tooth,toothache,toothbrush,toothbrushes,toothed,toothpaste,toothpick,toothpicks,toots,top,topaz,topes,topic,topical,topics,topless,topnotch,topped,topping,toppings,topple,tops,topside,tor,torah,torch,torched,torches,torching,tore,torment,tormented,tormenting,torn,tornado,toro,torpedo,torpedoed,torpedoes,torque,torrent,torrid,torso,tortillas,tortious,tortoise,torture,tortured,tortures,torturing,torturous,tory,tosh,toss,tossed,tosses,tossing,total,totaled,totalled,totally,totals,tote,totem,toting,tots,touch,touchdown,touchdowns,touche,touched,touches,touching,touchstone,touchy,tough,toughen,tougher,toughest,toughness,toupee,tour,toured,touring,tourism,tourist,tourists,tournament,tournaments,tours,tow,toward,towards,towed,towel,towels,tower,towers,town,townhouse,townie,townies,towns,townsfolk,townspeople,toxic,toxicity,toxicology,toxin,toxins,toxoplasmosis,toy,toyed,toying,toys,trace,traced,tracer,traces,tracing,track,tracked,tracker,trackers,tracking,tracks,traction,tractor,tractors,trade,traded,trademark,tradeoff,trader,trades,trading,tradition,traditional,traditionally,traditions,traffic,trafficker,trafficking,tragedies,tragedy,tragic,tragically,trail,trailer,trailers,trailing,trails,train,trained,trainee,trainees,trainer,training,trains,traipse,traipsing,trait,traitor,traitors,traits,trajectory,tram,tramp,trample,trampling,tramps,trance,tranquil,tranquility,tranquilizer,trans,transaction,transactions,transatlantic,transcendent,transcends,transcript,transcripts,transfer,transference,transferred,transferring,transfers,transform,transformation,transformed,transfusion,transfusions,transgenic,transgression,transgressions,transient,transistor,transit,transition,transitional,translate,translated,translating,translation,translator,translators,transmission,transmissions,transmit,transmitted,transmitter,transmitters,transparent,transpired,transplant,transponder,transport,transportation,transported,transporting,transports,transsexual,transvestite,transvestites,trap,trapeze,trapped,trapper,trapping,trappings,traps,trash,trashed,trashes,trashing,trashy,trattoria,trauma,traumas,traumatic,traumatized,traumatizing,travel,traveled,traveler,travelers,traveling,travelled,traveller,travellers,travelling,travels,traverse,travesty,trawler,tray,trays,treacherous,treachery,tread,treadmill,treads,treason,treasure,treasured,treasurer,treasures,treasury,treat,treatable,treated,treaters,treaties,treating,treatment,treatments,treats,treaty,tree,trees,trek,trellis,tremble,trembling,tremendous,tremendously,tremor,tremors,trench,trenches,trend,trends,trendy,trespass,trespassed,trespasser,trespassers,trespassing,trey,triad,trial,trials,triangle,triangular,tribal,tribe,tribulations,tribunal,tribune,tribute,triceps,trick,tricked,trickery,trickier,tricking,trickle,tricks,trickster,tricky,tricycle,trident,tried,tries,trifecta,trifle,trifled,trifling,trig,trigger,triggered,triggering,triggers,trigonometry,trillion,trilogy,trim,trimester,trimmed,trimmers,trimmings,trinity,trinket,trinkets,trio,trip,tripe,triple,tripled,triplets,triplicate,tripped,tripping,trippy,trips,trite,triumph,triumphed,triumphs,trivial,troll,trolling,trollop,trolls,trombone,troop,trooper,troopers,troops,trophies,trophy,tropic,tropical,tropics,trot,trotting,troubadour,trouble,troubled,troublemaker,troublemakers,troubles,troublesome,troubling,troupe,trouper,trouser,trousers,trout,trove,troy,truant,truce,truck,truckers,truckload,trucks,true,truer,truest,truffle,truffles,truly,trump,trumpet,trumpets,trumps,trunk,trunks,truss,trussed,trust,trusted,trustee,trustees,trusting,trusts,trustworthy,trusty,truth,truthful,truthfully,truths,try,trying,tryout,tryouts,tsk,tsunami,tub,tuba,tubby,tube,tubers,tubes,tubing,tubs,tuck,tucked,tucker,tucking,tug,tuition,tularemia,tulip,tulle,tumble,tumbler,tumbling,tummy,tumor,tumors,tuna,tundra,tune,tuned,tunes,tung,tunic,tuning,tunnel,tunnels,turban,turbine,turbo,turbulence,turbulent,turd,turds,turf,turk,turkey,turkeys,turks,turmoil,turn,turnaround,turncoat,turndown,turned,turner,turning,turnips,turnout,turnpike,turns,turquoise,turret,turtle,turtleneck,turtles,tush,tushie,tushy,tusk,tussle,tut,tutor,tutoring,tutors,tutti,tutu,tux,tuxedo,tuxedos,tuxes,twain,twas,tweak,tweaked,tweaking,tweed,tweedle,tween,tweet,tweeze,twelfth,twelve,twenties,twentieth,twenty,twerp,twice,twig,twigs,twilight,twin,twine,twinge,twinkle,twins,twirl,twist,twisted,twister,twisting,twists,twisty,twit,twitch,twitching,twitchy,twits,two,twos,twosome,tycoon,tying,tykes,type,typed,types,typewriter,typhoid,typical,typically,typing,typist,typo,tyranny,tyrant,tyrants,tyre,tyres,ubiquitous,ugh,uglies,ugliest,ugliness,ugly,uh,ulcer,ulcers,ulterior,ultimate,ultimately,ultimatum,ultimatums,ultra,ultrasound,um,umbilical,umbrella,umm,ump,umpire,umpteenth,un,unable,unacceptable,unadvisedly,unaffected,unanimous,unanimously,unannounced,unanswered,unappealing,unappreciated,unarmed,unattached,unattainable,unattended,unattractive,unauthorized,unavailable,unavoidable,unaware,unbalanced,unbearable,unbearably,unbeatable,unbecoming,unbeknownst,unbelievable,unbelievably,unblemished,unborn,unbreakable,unbroken,unburden,unbuttoned,uncalled,uncanny,uncaring,uncertain,uncertainty,unchanged,uncharacteristic,uncharacteristically,uncivilized,unclaimed,uncle,unclear,unclench,uncles,uncomfortable,uncommon,uncomplicated,uncompromising,unconcerned,unconditional,unconscionable,unconscious,unconsciously,unconsciousness,unconstitutional,uncontrollable,uncontrollably,uncontrolled,unconventional,uncool,uncooperative,uncover,uncovered,uncovering,uncuff,uncut,undead,undecided,undeniably,under,underage,undercover,undercurrent,undercut,underdeveloped,underdog,underestimate,underestimated,underestimating,undergarment,undergarments,undergo,undergoing,undergone,undergrad,undergraduate,underground,underhanded,underline,underlined,underlings,underlying,undermine,undermined,undermines,undermining,underneath,underpaid,underpants,underpass,underprivileged,undershirt,understand,understandable,understandably,understanding,understands,understatement,understood,understudy,undertake,undertaken,undertaking,undertow,underwater,underway,underwear,underworld,undeserving,undesirable,undetectable,undetected,undies,undisciplined,undisclosed,undiscovered,undisputed,undivided,undo,undoing,undone,undoubtedly,undress,undressed,undressing,undue,unduly,undying,unearth,unearthed,uneasy,uneducated,unemployable,unemployed,unemployment,unencumbered,unending,unequivocally,unethical,uneventful,unexpected,unexpectedly,unexplained,unexplored,unexpressed,unfair,unfairly,unfairness,unfaithful,unfamiliar,unfathomable,unfinished,unfit,unflattering,unfold,unfolding,unfolds,unforeseen,unforgettable,unforgivable,unforgiving,unfortunate,unfortunately,unfounded,unfreeze,unfreezing,unfriendly,unfulfilled,unfunny,ungodly,ungrateful,unguarded,unhand,unhappily,unhappiness,unhappy,unharmed,unhealthy,unheard,unhinged,unholy,unhook,unhooked,unicorn,unicycle,unidentified,unified,uniform,uniformity,uniforms,unimaginative,unimportant,unimpressed,uninformed,uninhibited,uninspired,uninsured,unintelligible,unintentionally,uninteresting,uninterrupted,uninvited,uninvolved,union,unions,unique,unisex,unit,unite,united,units,unity,universal,universally,universe,universes,universities,university,unjustly,unknown,unknowns,unlawful,unleaded,unleash,unleashed,unless,unlikable,unlike,unlikely,unlimited,unlisted,unload,unloading,unlock,unlocked,unlocking,unlocks,unlovable,unloved,unlucky,unmanned,unmarked,unmarried,unmentionable,unmentionables,unmistakable,unmitigated,unnatural,unnecessary,unnerve,unnerved,unofficial,unopened,unorthodox,unpack,unpacked,unpacking,unpaid,unparalleled,unplanned,unpleasant,unpleasantness,unplug,unplugged,unpopular,unprecedented,unpredictable,unprepared,unprincipled,unprofessional,unprotected,unprovoked,unpunished,unqualified,unquenchable,unquote,unravel,unraveled,unraveling,unreachable,unreal,unrealistic,unreasonable,unrelated,unreliable,unresolved,unresponsive,unrest,unruly,uns,unsafe,unsaid,unsanitary,unsatisfactory,unsatisfied,unsatisfying,unsavory,unscathed,unscheduled,unscrupulous,unseal,unsealed,unseasonably,unseat,unseemly,unseen,unselfish,unsettled,unsettling,unshakable,unsigned,unsolved,unspeakable,unspeakably,unspoiled,unspoken,unstable,unstoppable,unstuck,unsubstantiated,unsuccessful,unsuitable,unsung,unsupervised,unsure,unsuspecting,untamed,untangle,untapped,untenable,unthinkable,untie,untied,until,untimely,unto,untouchable,untouched,untoward,untraceable,untrained,untreated,untrue,untrustworthy,unusual,unusually,unveil,unveiling,unwanted,unwarranted,unwashed,unwelcome,unwieldy,unwilling,unwind,unwise,unwitting,unwittingly,unworthy,unwrap,unwrapping,unwritten,unzip,up,upbeat,upbringing,upchuck,upcoming,update,updated,updates,updating,upfront,upgrade,upgraded,upgrades,upgrading,upheaval,upheld,uphill,uphold,upholstery,upkeep,uplifting,uplink,upload,uploading,upon,upped,upper,uppers,upping,uppity,upright,uprising,uproar,uproot,uprooted,ups,upscale,upset,upsets,upsetting,upshot,upside,upstage,upstaged,upstairs,upstanding,upstart,upstate,uptake,uptight,uptown,upward,upwards,uranium,urban,urchin,urethra,urge,urged,urgency,urgent,urgently,urges,urging,urinal,urinary,urinate,urinating,urination,urine,urn,urns,urologist,us,usable,use,used,useful,usefulness,useless,user,users,uses,usher,ushering,ushers,using,usual,usually,utensils,uterine,uterus,utilities,utility,utilize,utmost,utopia,utter,utterly,vacancies,vacant,vacate,vacated,vacation,vacationing,vacations,vaccinated,vaccination,vaccine,vacuum,vacuumed,vacuuming,vagabond,vague,vaguely,vaguest,vail,vain,valentine,valentines,valet,valiant,valid,validate,validated,validity,valise,valley,valor,valuable,valuables,value,valued,values,valve,valves,vamoose,vamp,vampire,vampires,vamps,van,vandalism,vandalized,vandals,vanilla,vanish,vanished,vanishes,vanishing,vanity,vanquish,vanquished,vanquishing,vantage,vapid,vaporize,vaporized,variable,variables,variance,variant,variations,varicose,varied,varies,variety,various,varnish,varsity,vary,vascular,vase,vasectomy,vast,vastly,vat,vault,veal,vector,vee,veer,veered,veering,veg,vegan,vegetable,vegetables,vegetarian,vegetarians,vegetative,veggies,vehicle,vehicles,vehicular,veil,veiled,veils,vein,veins,velocity,velvet,vendetta,vendettas,vending,vendor,vendors,veneer,venerable,venereal,venetian,vengeance,vengeful,venison,venom,venomous,vent,vented,ventilate,ventilated,ventilation,ventilator,venting,ventricle,ventricular,ventriloquism,ventriloquist,vents,venture,ventures,venue,venues,vera,veracity,veranda,verbal,verbally,verbatim,verbs,verdict,verge,verger,verification,verified,verify,verily,veritable,veritas,vermeil,vermin,vermouth,vernacular,veronica,versatile,verse,versed,version,versions,versus,vertebrae,vertical,vertically,vertigo,very,vespers,vessel,vessels,vest,vesta,vested,vestibule,vestigial,vests,vet,veteran,veterans,veterinarian,veterinary,veto,vetoed,vets,vetted,via,viable,vial,vials,vibe,vibes,vibrant,vibrating,vibrations,vibrator,vicar,vicariously,vice,viceroy,vicinity,vicious,viciousness,victim,victimless,victims,victor,victoria,victories,victorious,victory,video,videos,videotape,videotaped,videotapes,vie,view,viewed,viewer,viewers,viewing,viewpoint,views,vig,vigil,vigilance,vigilant,vigilante,vigor,vigorously,viking,vile,vill,villa,village,villagers,villages,villain,villains,villas,vindicated,vindication,vindictive,vindictiveness,vine,vinegar,vineyards,vino,vintage,vinyl,viola,violate,violated,violates,violating,violation,violations,violence,violent,violently,violet,violets,violin,violinist,violins,viper,vipers,viral,virgin,virginal,virginity,virgins,virile,virtual,virtually,virtue,virtuoso,virtuous,virulent,virus,viruses,vis,visa,visage,visas,visceral,viscount,viscous,visibility,visible,vision,visions,visit,visitation,visited,visiting,visitor,visitors,visits,visor,vista,visual,visualize,visualizing,visually,visuals,vital,vitality,vitally,vitals,vitamin,vitamins,viva,vivacious,vive,vivid,vixen,vixens,vocabulary,vocal,vocalist,vocals,vocation,vocational,vodka,vogue,voice,voiced,voices,voicing,void,voila,volant,volatile,volcanic,volcano,volcanoes,volition,volleyball,volt,voltage,volts,volume,volumes,voluntarily,voluntary,volunteer,volunteered,volunteering,volunteers,vomit,vomiting,voodoo,vortex,vote,voted,voter,voters,votes,voting,vouch,vouched,vouching,vow,vowed,vowel,vowels,vows,voyage,voyeur,vroom,vulgar,vulnerability,vulnerable,vulture,vultures,vying,wack,wacko,wackos,wacky,wad,waddle,wade,wading,waffle,waffles,wag,wage,wager,wages,wagging,waging,wagon,wagons,wahoo,waif,wail,wailing,waist,waistband,wait,waited,waiter,waiters,waiting,waitress,waitressed,waitresses,waitressing,waits,waive,waived,waiver,wake,waken,wakes,waking,walk,walked,walker,walking,walks,wall,walla,wallaby,walled,wallet,wallets,walling,wallop,wallow,wallowing,wallpaper,walls,wally,walnut,waltz,waltzing,wampum,wan,wand,wander,wandered,wanderer,wandering,wangler,waning,want,wanted,wanting,wanton,wants,war,ward,warden,wardrobe,ware,warehouse,warehouses,wares,warfare,warfarin,warhead,warheads,warlock,warlocks,warlord,warm,warmed,warmer,warmest,warming,warms,warmth,warn,warned,warner,warning,warnings,warp,warpath,warped,warrant,warranted,warrants,warren,warring,warrior,warriors,wars,warsaw,wart,wartime,warts,wary,was,wash,washcloth,washed,washer,washes,washing,washout,washroom,washrooms,washy,wasp,wasps,waste,wastebasket,wasted,wasteful,wastes,wasting,watch,watched,watcher,watchers,watches,watchful,watching,watchman,water,waterbed,watered,waterfall,waterfront,watering,waterloo,watermelon,waterproof,waters,waterworks,watery,watt,watts,wave,waved,wavelength,wavelengths,wavered,waves,waving,wavy,wax,waxed,waxes,waxing,waxy,way,ways,wayside,wayward,we,weak,weaken,weakened,weakening,weaker,weakest,weakling,weakness,weaknesses,wealth,wealthiest,wealthy,wean,weaning,weapon,weapons,wear,wearer,wearing,wears,weary,weasel,weasels,weather,weathered,weatherman,weathers,weave,weaver,web,webbing,weber,webs,webster,wed,wedded,wedding,weddings,wedge,wedges,wedgie,wedgies,wedlock,weds,wee,weed,weeds,week,weekend,weekends,weekly,weeks,weenie,weenies,weensy,weeny,weep,weeping,weepy,weevil,weigh,weighed,weighing,weighs,weight,weighted,weightless,weights,weiner,weir,weird,weirder,weirdest,weirdly,weirdness,weirdo,weirdoes,weirdos,welch,welcome,welcomed,welcomes,welcoming,weld,welded,welder,welding,welfare,well,welling,wellness,wells,welsh,welt,welts,wench,went,wept,were,werewolf,werewolves,west,westbound,western,westerns,westward,wet,wetlands,wets,wetter,wetting,wha,whack,whacked,whacko,whale,whales,wham,whammo,whammy,wharf,wharves,what,whatchamacallit,whatever,whatnot,whats,whatsoever,wheat,whee,wheel,wheelchair,wheelchairs,wheeler,wheeling,wheels,wheeze,wheezing,when,whence,whenever,where,whereabouts,whereas,wherefore,wherein,whereof,wherever,whet,whether,whew,whey,which,whichever,whiff,while,whilst,whim,whimper,whimpering,whims,whimsical,whimsy,whine,whiner,whining,whiny,whip,whiplash,whipped,whipper,whipping,whirl,whirling,whirlwind,whisk,whisked,whisker,whiskey,whisking,whisky,whisper,whispered,whispering,whispers,whistle,whistler,whistles,whistling,whit,white,whiteness,whiter,whites,whitey,whittle,whittled,whittling,whiz,who,whoa,whodunit,whoever,whole,wholeheartedly,wholesale,wholesaler,wholesome,wholly,whom,whomever,whoop,whoopee,whooping,whoops,whoosh,whopper,whopping,whose,why,whys,wick,wicked,wickedness,wide,widen,widening,wider,widow,widowed,widower,widows,width,wield,wielding,wiener,wieners,wife,wig,wigged,wiggle,wiggled,wiggles,wiggling,wiggly,wiggy,wigs,wild,wildcat,wildcats,wilder,wilderness,wildest,wildfire,wildlife,wildly,will,willed,willful,willfully,willies,willing,willingly,willingness,willow,willows,willpower,wills,willy,wilt,wily,wimp,wimps,wimpy,win,winch,wind,windbag,windbreaker,winded,windfall,winding,windjammer,windmills,window,windows,windowsill,windpipe,winds,windshield,windsurfing,windward,windy,wine,winery,wing,winged,winger,winging,wingman,wings,wining,wink,winking,winkle,winks,winner,winners,winning,winnings,winos,wins,winter,winters,wipe,wiped,wiper,wipers,wipes,wiping,wire,wired,wireless,wires,wiretap,wiretaps,wiring,wisdom,wise,wiseass,wisecracks,wised,wisely,wiser,wisest,wish,wished,wishes,wishful,wishing,wit,witch,witchcraft,witches,witching,witchy,with,withdraw,withdrawal,withdrawals,withdrawing,withdrawn,withdraws,withdrew,wither,withered,withering,withers,withheld,withhold,withholding,within,without,withstand,withstanding,witless,witness,witnessed,witnesses,witnessing,wits,witty,wives,wizard,wo,wobble,woe,woes,wok,woke,woken,wolf,wolfram,wolves,woman,womanhood,womanizer,womanizing,womanly,womb,women,won,wonder,wondered,wonderful,wonderfully,wondering,wonderland,wonders,wondrous,wonky,wont,woo,wood,woodchuck,wooded,wooden,woodland,woodpecker,woods,woodshed,woodsy,woodwork,woody,wooed,woof,wooing,wool,woolly,woops,woozy,word,worded,wording,words,wore,work,workable,workaholic,workday,worked,worker,workers,working,workings,workload,workman,workmen,workout,workplace,works,workshop,workstation,workup,world,worldly,worlds,worldwide,worm,wormed,wormhole,worming,worms,worn,worried,worrier,worries,worrisome,worry,worrying,worrywart,worse,worsened,worship,worshiped,worshipped,worshippers,worshipping,worships,worst,worth,worthless,worthwhile,worthy,would,wound,wounded,wounding,wounds,woven,wow,wowed,wracked,wracking,wraith,wrangle,wrangler,wrap,wrapped,wrapper,wrappers,wrapping,wraps,wrath,wreak,wreaked,wreaking,wreaks,wreaths,wreck,wreckage,wrecked,wrecker,wrecking,wrecks,wrench,wrenched,wrestle,wrestler,wrestling,wretch,wretched,wriggle,wright,wring,wringer,wringing,wrinkle,wrinkled,wrinkles,wrinkly,wrist,wrists,writ,write,writer,writers,writes,writhing,writing,writings,written,wrong,wronged,wrongful,wrongfully,wrongs,wrote,wrought,wrung,wry,wuss,wussies,wussy,wuthering,xerox,ya,yacht,yachts,yah,yahoo,yak,yakking,yam,yammer,yammering,yams,yang,yank,yanked,yanking,yanks,yap,yapping,yard,yards,yardstick,yarn,yaw,yawn,yawning,yay,ye,yea,yeah,year,yearbook,yearbooks,yearned,yearning,yearnings,yearns,years,yeast,yech,yeh,yell,yelled,yeller,yelling,yellow,yells,yen,yep,yes,yesterday,yesterdays,yet,yeti,yield,yielding,yields,yikes,yin,yipe,yippee,yippie,yo,yodel,yoga,yoghurt,yogi,yogurt,yoke,yokel,yokels,yolk,yom,yon,yonkers,yore,you,young,younger,youngest,youngster,youngsters,your,yours,yourself,yourselves,youse,youth,youthful,youths,yuan,yuck,yuk,yuletide,yum,yummy,yup,yuppie,yuppies,zag,zander,zany,zap,zapped,zeal,zebra,zebras,zed,zee,zephyr,zephyrs,zeppelin,zero,zeroed,zeroes,zeros,zest,zesty,zeta,zig,zillion,zillions,zinfandel,zing,zip,zipped,zipper,zipping,zippy,zit,zits,zodiac,zombie,zombies,zone,zoned,zones,zoning,zonked,zoo,zoom,zooming' # 65536 words english_words_google = 'the,of,and,to,a,in,for,is,on,that,by,this,with,i,you,it,not,or,be,are,from,at,as,your,all,have,new,more,an,was,we,will,home,can,us,about,if,page,my,has,search,free,but,our,one,other,do,no,information,time,they,site,he,up,may,what,which,their,news,out,use,any,there,see,only,so,his,when,contact,here,business,who,web,also,now,help,get,view,online,c,e,first,am,been,would,how,were,me,s,services,some,these,click,its,like,service,x,than,find,price,date,back,top,people,had,list,name,just,over,state,year,day,into,email,two,health,n,world,re,next,used,go,b,work,last,most,products,music,buy,data,make,them,should,product,system,post,her,city,t,add,policy,number,such,please,available,copyright,support,message,after,best,software,then,jan,good,well,d,where,rights,public,books,high,school,through,m,each,links,she,review,years,order,very,privacy,book,items,company,r,read,group,sex,need,many,user,said,de,does,set,under,general,research,university,january,mail,full,map,reviews,program,life,know,games,way,days,management,p,part,could,great,united,hotel,real,f,item,international,center,must,store,travel,comments,made,development,report,off,member,details,line,terms,before,hotels,did,send,right,type,because,local,those,using,results,office,education,national,car,design,take,posted,internet,address,community,within,states,area,want,phone,shipping,reserved,subject,between,forum,family,l,long,based,w,code,show,o,even,black,check,special,prices,index,being,women,much,sign,file,link,open,today,technology,south,case,project,same,pages,uk,version,section,own,found,sports,house,related,security,both,g,county,american,photo,game,members,power,while,care,network,down,computer,systems,three,total,place,end,following,download,h,him,without,per,access,think,north,resources,current,posts,big,media,law,control,water,history,pictures,size,art,personal,since,including,guide,shop,directory,board,location,change,white,text,small,rating,rate,government,children,during,usa,return,students,v,shopping,account,times,sites,level,digital,profile,previous,form,events,love,old,john,main,call,hours,image,department,title,description,non,k,y,insurance,another,why,shall,property,class,cd,still,money,quality,every,listing,content,country,private,little,visit,save,tools,low,reply,customer,december,compare,movies,include,college,value,article,york,man,card,jobs,provide,j,food,source,author,different,press,u,learn,sale,around,print,course,job,canada,process,teen,room,stock,training,too,credit,point,join,science,men,categories,advanced,west,sales,look,english,left,team,estate,box,conditions,select,windows,gay,thread,week,category,note,live,large,gallery,table,register,however,june,october,november,market,library,really,action,start,series,model,features,air,industry,plan,human,provided,tv,yes,required,second,hot,accessories,cost,movie,march,la,september,better,say,questions,july,going,medical,test,friend,come,dec,study,application,cart,staff,articles,san,again,play,looking,issues,april,never,users,complete,street,topic,comment,financial,things,working,against,standard,tax,person,below,mobile,less,got,party,payment,equipment,login,student,let,programs,offers,legal,above,recent,park,stores,side,act,problem,red,give,memory,performance,social,q,august,quote,language,story,sell,experience,rates,create,key,body,young,america,important,field,few,east,paper,single,ii,age,activities,club,example,girls,additional,password,z,latest,something,road,gift,question,changes,night,ca,hard,texas,oct,pay,four,poker,status,browse,issue,range,building,seller,court,february,always,result,light,write,war,nov,offer,blue,groups,al,easy,given,files,event,release,analysis,request,china,making,picture,needs,possible,might,professional,yet,month,major,star,areas,future,space,committee,hand,sun,cards,problems,london,washington,meeting,become,interest,id,child,keep,enter,california,share,similar,garden,schools,million,added,reference,companies,listed,baby,learning,energy,run,delivery,net,popular,term,film,stories,put,computers,journal,reports,co,try,welcome,central,images,president,notice,god,original,head,radio,until,cell,color,self,council,away,includes,track,australia,discussion,archive,once,others,entertainment,agreement,format,least,society,months,log,safety,friends,sure,trade,edition,cars,messages,marketing,tell,further,updated,association,able,having,provides,david,fun,already,green,studies,close,common,drive,specific,several,gold,feb,living,collection,called,short,arts,lot,ask,display,limited,solutions,means,director,daily,beach,past,natural,whether,due,et,five,upon,period,planning,says,official,weather,mar,land,average,done,technical,window,france,pro,region,island,record,direct,conference,environment,records,st,district,calendar,costs,style,front,statement,parts,aug,ever,early,miles,sound,resource,present,applications,either,ago,document,word,works,material,bill,written,talk,federal,rules,final,adult,tickets,thing,centre,requirements,via,cheap,nude,kids,finance,true,minutes,else,mark,third,rock,gifts,europe,reading,topics,bad,individual,tips,plus,auto,cover,usually,edit,together,percent,fast,function,fact,unit,getting,global,meet,far,economic,en,player,projects,lyrics,often,subscribe,submit,germany,amount,watch,included,feel,though,bank,risk,thanks,everything,deals,various,words,jul,production,commercial,james,weight,town,heart,advertising,received,choose,treatment,newsletter,archives,points,knowledge,magazine,error,camera,girl,currently,construction,toys,registered,clear,golf,receive,domain,methods,chapter,makes,protection,policies,loan,wide,beauty,manager,india,position,taken,sort,models,michael,known,half,cases,step,engineering,florida,simple,quick,none,wireless,license,paul,friday,lake,whole,annual,published,later,basic,shows,corporate,church,method,purchase,customers,active,response,practice,hardware,figure,materials,fire,holiday,chat,enough,designed,along,among,death,writing,speed,html,countries,loss,face,brand,discount,higher,effects,created,remember,standards,oil,bit,yellow,political,increase,advertise,kingdom,base,near,thought,stuff,french,storage,oh,japan,doing,loans,shoes,entry,stay,nature,orders,availability,africa,summary,turn,mean,growth,notes,agency,king,monday,european,activity,copy,although,drug,western,income,force,cash,employment,overall,bay,river,commission,ad,package,contents,seen,players,engine,port,album,regional,stop,supplies,started,administration,bar,institute,views,plans,double,dog,build,screen,exchange,types,soon,lines,electronic,continue,across,benefits,needed,season,apply,someone,held,ny,anything,printer,condition,effective,believe,organization,effect,asked,mind,sunday,selection,casino,lost,tour,menu,volume,cross,anyone,mortgage,hope,silver,corporation,wish,inside,solution,mature,role,rather,weeks,addition,came,supply,nothing,certain,executive,running,lower,necessary,union,jewelry,according,dc,clothing,mon,com,particular,fine,names,robert,hour,gas,skills,six,bush,islands,advice,career,military,rental,decision,leave,british,teens,pre,huge,sat,woman,facilities,zip,bid,kind,sellers,middle,move,cable,opportunities,taking,values,division,coming,tuesday,object,appropriate,machine,length,actually,nice,score,statistics,client,ok,returns,capital,follow,sample,investment,sent,shown,saturday,christmas,england,culture,band,flash,ms,lead,george,choice,went,starting,registration,fri,thursday,courses,consumer,hi,foreign,artist,outside,furniture,levels,channel,letter,mode,ideas,wednesday,structure,fund,summer,allow,degree,contract,button,releases,wed,homes,super,male,matter,custom,virginia,almost,took,located,multiple,asian,distribution,editor,inn,industrial,cause,potential,song,ltd,los,focus,late,fall,featured,idea,rooms,female,responsible,inc,communications,win,associated,thomas,primary,cancer,numbers,reason,tool,browser,spring,foundation,answer,voice,friendly,schedule,documents,communication,purpose,feature,bed,comes,police,everyone,independent,approach,brown,physical,operating,hill,maps,medicine,deal,hold,chicago,forms,glass,happy,tue,smith,wanted,developed,thank,safe,unique,survey,prior,telephone,sport,ready,feed,animal,sources,mexico,population,pa,regular,secure,navigation,operations,therefore,ass,simply,evidence,station,christian,round,favorite,understand,option,master,valley,recently,probably,sea,built,publications,blood,cut,improve,connection,publisher,hall,larger,networks,earth,parents,impact,transfer,introduction,kitchen,strong,tel,carolina,wedding,properties,hospital,ground,overview,ship,accommodation,owners,disease,excellent,paid,italy,perfect,hair,opportunity,kit,classic,basis,command,cities,william,express,award,distance,tree,peter,assessment,ensure,thus,wall,ie,involved,el,extra,especially,pussy,partners,budget,rated,guides,success,maximum,ma,operation,existing,quite,selected,boy,amazon,patients,restaurants,beautiful,warning,wine,locations,horse,vote,forward,flowers,stars,significant,lists,owner,retail,animals,useful,directly,manufacturer,ways,est,son,providing,rule,mac,housing,takes,iii,bring,catalog,searches,max,trying,mother,authority,considered,told,traffic,programme,joined,strategy,feet,agent,valid,bin,modern,senior,ireland,teaching,door,grand,testing,trial,charge,units,instead,canadian,cool,normal,wrote,enterprise,ships,entire,educational,md,leading,metal,positive,fl,fitness,chinese,opinion,asia,football,abstract,uses,output,funds,mr,greater,likely,develop,employees,artists,alternative,processing,responsibility,resolution,java,guest,seems,publication,pass,relations,trust,van,contains,session,photography,republic,fees,components,vacation,century,academic,assistance,completed,skin,indian,mary,il,expected,ring,grade,dating,pacific,mountain,organizations,pop,filter,mailing,vehicle,longer,consider,int,northern,behind,panel,floor,german,buying,match,proposed,default,require,iraq,boys,outdoor,deep,morning,otherwise,allows,rest,protein,plant,reported,hit,transportation,mm,pool,politics,partner,disclaimer,authors,boards,faculty,parties,fish,membership,mission,eye,string,sense,modified,pack,released,stage,internal,goods,recommended,born,unless,richard,detailed,japanese,race,approved,background,target,except,character,maintenance,ability,maybe,functions,ed,moving,brands,places,pretty,spain,southern,yourself,etc,winter,rape,battery,youth,pressure,submitted,boston,incest,debt,medium,television,interested,core,break,purposes,throughout,sets,dance,wood,itself,defined,papers,playing,awards,fee,studio,reader,virtual,device,established,answers,rent,las,remote,dark,external,apple,le,regarding,instructions,min,offered,theory,enjoy,remove,aid,surface,minimum,visual,host,variety,teachers,martin,manual,block,subjects,agents,increased,repair,fair,civil,steel,understanding,songs,fixed,wrong,beginning,hands,associates,finally,classes,paris,ohio,gets,sector,capacity,requires,jersey,un,fat,fully,father,electric,saw,instruments,quotes,officer,driver,businesses,dead,respect,unknown,specified,restaurant,mike,trip,worth,mi,procedures,poor,teacher,xxx,eyes,relationship,workers,farm,georgia,peace,traditional,campus,tom,showing,creative,coast,benefit,progress,funding,devices,lord,grant,sub,agree,fiction,hear,sometimes,watches,careers,beyond,goes,families,led,museum,themselves,fan,transport,interesting,wife,accepted,former,ten,hits,zone,complex,th,cat,galleries,references,die,presented,jack,flat,flow,agencies,literature,respective,parent,spanish,michigan,columbia,setting,dr,scale,stand,economy,highest,helpful,monthly,critical,frame,musical,definition,secretary,path,employee,chief,gives,bottom,magazines,packages,detail,francisco,laws,changed,pet,heard,begin,individuals,colorado,royal,clean,switch,russian,largest,african,guy,titles,relevant,guidelines,justice,bible,cup,basket,applied,weekly,vol,installation,described,demand,pp,suite,na,square,chris,attention,advance,skip,diet,army,auction,gear,lee,os,difference,allowed,correct,charles,nation,selling,lots,piece,sheet,firm,seven,older,illinois,regulations,elements,species,jump,cells,resort,facility,random,certificate,minister,motion,looks,fashion,directions,visitors,monitor,trading,forest,calls,whose,couple,giving,chance,vision,ball,ending,clients,actions,listen,discuss,accept,naked,goal,successful,sold,wind,communities,clinical,situation,sciences,markets,lowest,highly,publishing,appear,emergency,lives,currency,leather,determine,temperature,palm,announcements,patient,actual,historical,stone,bob,commerce,perhaps,persons,difficult,scientific,satellite,fit,tests,village,accounts,amateur,ex,met,pain,particularly,factors,coffee,cum,buyer,cultural,steve,easily,oral,ford,poster,edge,functional,root,au,fi,closed,holidays,ice,pink,zealand,balance,graduate,replies,shot,architecture,initial,label,thinking,scott,sec,recommend,canon,league,waste,minute,bus,optional,dictionary,cold,accounting,manufacturing,sections,chair,fishing,effort,phase,fields,bag,fantasy,po,letters,motor,va,professor,context,install,shirt,apparel,generally,continued,foot,mass,crime,count,breast,ibm,johnson,sc,quickly,dollars,religion,claim,driving,permission,surgery,patch,heat,wild,measures,generation,kansas,miss,chemical,doctor,task,reduce,brought,himself,nor,component,enable,exercise,bug,santa,mid,guarantee,leader,diamond,israel,se,processes,soft,alone,meetings,seconds,jones,arizona,interests,flight,congress,fuel,walk,produced,italian,wait,supported,pocket,saint,rose,freedom,argument,competition,creating,jim,drugs,joint,premium,fresh,characters,attorney,di,factor,growing,thousands,km,stream,apartments,pick,hearing,eastern,entries,dates,generated,signed,upper,administrative,serious,prime,limit,began,louis,steps,errors,shops,bondage,del,efforts,informed,ga,ac,thoughts,creek,ft,worked,quantity,urban,practices,sorted,reporting,essential,myself,tours,platform,load,labor,immediately,nursing,defense,machines,tags,heavy,covered,recovery,joe,guys,configuration,cock,merchant,comprehensive,expert,universal,protect,drop,solid,presentation,languages,became,orange,compliance,vehicles,prevent,theme,rich,im,campaign,marine,improvement,vs,guitar,finding,pennsylvania,examples,saying,spirit,ar,claims,challenge,acceptance,mo,seem,affairs,touch,intended,towards,sa,goals,hire,election,suggest,branch,charges,serve,reasons,magic,mount,smart,talking,gave,ones,latin,avoid,certified,manage,corner,rank,computing,oregon,element,birth,virus,abuse,requests,separate,quarter,procedure,leadership,tables,define,racing,religious,facts,breakfast,kong,column,plants,faith,chain,identify,avenue,missing,died,approximately,domestic,recommendations,moved,houston,reach,comparison,mental,viewed,moment,extended,sequence,inch,attack,sorry,centers,opening,damage,reserve,recipes,plastic,produce,snow,placed,truth,counter,failure,follows,eu,dollar,camp,ontario,automatically,des,minnesota,films,bridge,native,fill,williams,movement,printing,baseball,owned,approval,draft,chart,played,contacts,cc,jesus,readers,clubs,wa,jackson,equal,adventure,matching,offering,shirts,profit,leaders,posters,institutions,assistant,variable,ave,advertisement,expect,headlines,yesterday,compared,determined,wholesale,workshop,russia,gone,codes,kinds,extension,seattle,statements,golden,completely,teams,fort,cm,wi,lighting,senate,forces,funny,brother,gene,turned,portable,tried,electrical,applicable,disc,returned,pattern,boat,named,theatre,earlier,manufacturers,sponsor,classical,warranty,dedicated,indiana,direction,harry,objects,ends,delete,evening,assembly,nuclear,taxes,mouse,signal,criminal,issued,brain,sexual,wisconsin,powerful,dream,obtained,false,da,cast,flower,felt,personnel,passed,supplied,identified,falls,pic,soul,aids,opinions,promote,stated,professionals,appears,carry,flag,decided,covers,hr,em,advantage,hello,designs,maintain,tourism,priority,newsletters,adults,savings,iv,graphic,atom,payments,estimated,binding,brief,ended,winning,eight,anonymous,iron,straight,script,served,wants,miscellaneous,prepared,void,dining,alert,integration,atlanta,dakota,tag,interview,mix,framework,disk,installed,queen,credits,clearly,fix,handle,sweet,desk,dave,massachusetts,diego,hong,vice,associate,ne,truck,behavior,enlarge,ray,frequently,revenue,measure,changing,votes,du,duty,looked,discussions,bear,gain,festival,laboratory,ocean,flights,experts,signs,lack,depth,iowa,whatever,vintage,train,exactly,dry,explore,maryland,spa,concept,nearly,eligible,reality,forgot,handling,origin,knew,gaming,feeds,billion,destination,scotland,faster,intelligence,dallas,bought,con,ups,nations,route,followed,specifications,broken,frank,alaska,blow,battle,residential,speak,decisions,industries,protocol,query,clip,partnership,editorial,nt,expression,es,equity,provisions,speech,wire,principles,suggestions,rural,shared,sounds,replacement,tape,strategic,judge,economics,acid,cent,forced,compatible,fight,apartment,height,null,zero,speaker,filed,netherlands,obtain,recreation,offices,designer,remain,managed,pr,failed,marriage,roll,korea,banks,fr,participants,secret,bath,kelly,leads,negative,austin,favorites,toronto,theater,springs,missouri,andrew,var,perform,healthy,translation,estimates,font,assets,injury,mt,joseph,ministry,drivers,lawyer,figures,married,protected,proposal,sharing,philadelphia,portal,waiting,birthday,beta,fail,gratis,banking,officials,brian,toward,won,slightly,assist,conduct,contained,legislation,calling,serving,bags,miami,comics,matters,houses,doc,postal,relationships,tennessee,wear,controls,breaking,combined,ultimate,wales,representative,frequency,introduced,minor,finish,departments,residents,noted,displayed,reduced,physics,rare,spent,performed,extreme,samples,davis,daniel,bars,reviewed,row,oz,forecast,removed,helps,administrator,cycle,contain,accuracy,dual,rise,sleep,bird,brazil,creation,static,scene,hunter,addresses,lady,crystal,famous,writer,chairman,violence,fans,oklahoma,speakers,drink,academy,dynamic,gender,eat,permanent,agriculture,dell,cleaning,portfolio,practical,delivered,exclusive,seat,concerns,colour,vendor,originally,utilities,philosophy,regulation,officers,reduction,aim,bids,referred,supports,nutrition,recording,regions,junior,toll,les,cape,ann,rings,meaning,tip,secondary,wonderful,mine,ladies,henry,ticket,announced,guess,agreed,prevention,whom,ski,import,posting,presence,instant,mentioned,automatic,viewing,maintained,ch,increasing,majority,connected,christ,dan,dogs,sd,directors,aspects,austria,ahead,moon,participation,scheme,utility,fly,manner,matrix,containing,combination,amendment,despite,strength,guaranteed,turkey,libraries,proper,distributed,degrees,singapore,enterprises,delta,fear,seeking,inches,phoenix,convention,shares,principal,daughter,standing,comfort,colors,wars,ordering,kept,alpha,appeal,cruise,bonus,previously,hey,buildings,beat,disney,household,batteries,adobe,smoking,becomes,drives,arms,alabama,tea,improved,trees,achieve,positions,dress,subscription,dealer,contemporary,sky,utah,nearby,rom,carried,happen,exposure,hide,signature,gambling,refer,miller,provision,outdoors,clothes,caused,luxury,babes,frames,certainly,indeed,newspaper,toy,circuit,layer,printed,slow,removal,easier,liability,trademark,hip,printers,nine,adding,kentucky,mostly,eric,spot,taylor,prints,spend,factory,interior,grow,americans,optical,promotion,relative,amazing,clock,dot,hiv,identity,suites,conversion,feeling,hidden,reasonable,victoria,serial,relief,revision,influence,ratio,importance,rain,onto,planet,copies,recipe,zum,permit,seeing,proof,tennis,bass,prescription,bedroom,empty,instance,hole,pets,ride,licensed,orlando,specifically,tim,bureau,maine,represent,conservation,pair,ideal,recorded,don,pieces,finished,parks,dinner,lawyers,sydney,stress,cream,runs,trends,discover,ap,patterns,boxes,louisiana,hills,fourth,nm,advisor,mn,marketplace,nd,evil,aware,wilson,shape,evolution,irish,certificates,objectives,stations,suggested,op,remains,greatest,firms,concerned,operator,structures,generic,usage,cap,ink,charts,continuing,mixed,census,peak,competitive,exist,wheel,transit,dick,salt,compact,poetry,lights,tracking,angel,bell,keeping,preparation,attempt,receiving,matches,accordance,width,noise,engines,forget,array,discussed,accurate,stephen,elizabeth,climate,reservations,pin,alcohol,greek,instruction,managing,sister,raw,differences,walking,explain,smaller,newest,establish,happened,expressed,jeff,extent,sharp,ben,lane,paragraph,kill,mathematics,compensation,ce,export,managers,aircraft,sweden,conflict,conducted,versions,employer,occur,percentage,knows,mississippi,describe,concern,requested,citizens,connecticut,heritage,immediate,holding,trouble,spread,coach,agricultural,expand,supporting,audience,assigned,jordan,collections,ages,participate,plug,specialist,cook,affect,virgin,experienced,investigation,raised,hat,institution,directed,dealers,searching,sporting,helping,affected,lib,totally,plate,expenses,indicate,blonde,ab,proceedings,favourite,transmission,anderson,characteristics,der,lose,organic,seek,experiences,cheats,extremely,contracts,guests,diseases,concerning,equivalent,chemistry,tony,neighborhood,nevada,thailand,anyway,continues,tracks,advisory,cam,curriculum,logic,prince,circle,soil,grants,anywhere,psychology,responses,atlantic,wet,circumstances,edward,identification,ram,leaving,appliances,matt,cooking,speaking,fox,respond,sizes,plain,exit,entered,iran,arm,keys,launch,wave,checking,costa,belgium,holy,acts,guidance,mesh,trail,enforcement,symbol,crafts,highway,buddy,observed,dean,poll,glossary,fiscal,celebrity,styles,denver,unix,filled,bond,channels,appendix,notify,blues,chocolate,pub,portion,scope,hampshire,cables,cotton,controlled,requirement,authorities,biology,dental,killed,border,ancient,debate,representatives,starts,pregnancy,causes,arkansas,biography,leisure,attractions,learned,transactions,notebook,explorer,historic,attached,opened,husband,disabled,authorized,crazy,britain,concert,retirement,financing,efficiency,sp,comedy,adopted,efficient,linear,commitment,specialty,bears,jean,hop,carrier,edited,constant,visa,mouth,jewish,meter,linked,portland,interviews,concepts,gun,reflect,pure,deliver,wonder,hell,lessons,fruit,begins,qualified,reform,lens,treated,discovery,draw,classified,relating,assume,confidence,alliance,fm,confirm,warm,neither,lewis,howard,leaves,engineer,consistent,replace,clearance,connections,inventory,suck,organisation,babe,checks,reached,becoming,objective,indicated,sugar,crew,legs,sam,stick,securities,allen,relation,enabled,genre,slide,montana,volunteer,tested,rear,democratic,enhance,switzerland,exact,bound,formal,dimensions,contribute,lock,storm,colleges,mile,showed,challenges,editors,mens,threads,bowl,supreme,brothers,recognition,presents,ref,tank,submission,dolls,estimate,encourage,navy,kid,inspection,consumers,cancel,limits,territory,transaction,manchester,weapons,paint,delay,pilot,outlet,contributions,continuous,czech,resulting,cambridge,initiative,novel,pan,execution,disability,increases,ultra,winner,idaho,contractor,episode,examination,potter,dish,plays,bulletin,ia,pt,indicates,modify,oxford,adam,truly,painting,committed,extensive,universe,candidate,patent,slot,outstanding,ha,eating,perspective,planned,watching,lodge,messenger,mirror,tournament,consideration,sterling,sessions,kernel,stocks,buyers,journals,gray,catalogue,ea,antonio,charged,broad,taiwan,und,chosen,greece,swiss,sarah,clark,labour,hate,terminal,publishers,nights,behalf,caribbean,liquid,rice,nebraska,loop,salary,reservation,foods,guard,properly,orleans,saving,remaining,empire,resume,twenty,newly,raise,prepare,gary,depending,illegal,expansion,vary,hundreds,rome,arab,lincoln,helped,premier,tomorrow,purchased,milk,decide,consent,drama,visiting,performing,downtown,keyboard,contest,collected,nw,bands,boot,suitable,ff,absolutely,millions,lunch,audit,push,chamber,guinea,findings,muscle,iso,implement,clicking,scheduled,polls,typical,tower,yours,sum,significantly,chicken,temporary,attend,shower,alan,sending,jason,tonight,dear,sufficient,shell,province,catholic,oak,vat,vancouver,governor,beer,seemed,contribution,measurement,swimming,formula,constitution,solar,jose,catch,jane,pakistan,ps,reliable,consultation,northwest,sir,doubt,earn,finder,unable,periods,classroom,tasks,democracy,attacks,kim,merchandise,const,resistance,doors,symptoms,resorts,memorial,visitor,twin,forth,insert,baltimore,gateway,ky,dont,drawing,candidates,charlotte,ordered,biological,fighting,transition,happens,preferences,spy,romance,instrument,bruce,split,themes,powers,heaven,br,bits,pregnant,twice,classification,focused,egypt,physician,bargain,cellular,norway,vermont,asking,blocks,normally,lo,spiritual,hunting,suit,shift,chip,res,sit,bodies,photographs,cutting,simon,writers,marks,flexible,loved,favourites,numerous,relatively,birds,satisfaction,represents,char,pittsburgh,superior,preferred,saved,paying,cartoon,shots,intellectual,moore,granted,choices,carbon,spending,comfortable,magnetic,interaction,listening,effectively,registry,crisis,outlook,massive,denmark,employed,bright,treat,header,cs,poverty,formed,piano,echo,que,sheets,patrick,experimental,puerto,revolution,consolidation,displays,allowing,earnings,mystery,landscape,dependent,mechanical,journey,delaware,bidding,risks,banner,applicant,charter,fig,barbara,cooperation,counties,acquisition,ports,directories,recognized,dreams,notification,licensing,stands,teach,occurred,rapid,pull,hairy,diversity,cleveland,ut,reverse,deposit,investments,wheels,specify,dutch,sensitive,formats,depends,boots,holds,si,editing,poland,completion,pulse,universities,technique,contractors,voting,courts,notices,subscriptions,calculate,detroit,alexander,broadcast,converted,anniversary,improvements,strip,specification,pearl,accident,nick,accessible,accessory,resident,plot,possibly,typically,representation,regard,pump,exists,arrangements,smooth,conferences,strike,consumption,birmingham,flashing,narrow,afternoon,threat,surveys,sitting,putting,controller,ownership,committees,penis,legislative,vietnam,trailer,anne,castle,gardens,missed,malaysia,antique,labels,willing,molecular,acting,heads,stored,residence,attorneys,antiques,density,hundred,ryan,operators,strange,philippines,statistical,beds,breasts,mention,innovation,employers,grey,parallel,amended,operate,bills,bold,bathroom,stable,opera,definitions,von,doctors,lesson,asset,scan,elections,drinking,reaction,blank,enhanced,entitled,severe,generate,stainless,newspapers,hospitals,vi,humor,aged,exception,lived,duration,bulk,successfully,indonesia,pursuant,fabric,visits,primarily,tight,domains,capabilities,contrast,recommendation,flying,sin,berlin,cute,organized,ba,para,adoption,improving,cr,expensive,meant,capture,pounds,buffalo,organisations,plane,pg,explained,seed,programmes,desire,mechanism,camping,ee,jewellery,meets,welfare,peer,caught,eventually,marked,driven,measured,bottle,agreements,considering,marshall,massage,rubber,conclusion,closing,thousand,meat,legend,grace,susan,ing,adams,monster,alex,bang,villa,bone,columns,disorders,bugs,collaboration,hamilton,detection,ftp,cookies,inner,formation,med,engineers,entity,gate,holder,proposals,sw,settlement,portugal,lawrence,roman,duties,valuable,erotic,tone,ethics,forever,dragon,busy,captain,fantastic,imagine,brings,heating,leg,neck,hd,wing,governments,purchasing,appointed,taste,dealing,commit,tiny,rail,liberal,jay,trips,gap,sides,tube,turns,corresponding,descriptions,cache,belt,jacket,determination,animation,oracle,er,matthew,lease,productions,aviation,proud,excess,disaster,console,commands,jr,instructor,giant,achieved,injuries,shipped,seats,approaches,alarm,anthony,usual,loading,stamps,appeared,franklin,angle,rob,mining,melbourne,worst,betting,scientists,liberty,wyoming,argentina,era,convert,possibility,commissioner,dangerous,garage,exciting,thongs,unfortunately,respectively,volunteers,attachment,finland,morgan,derived,pleasure,honor,asp,eagle,pants,columbus,nurse,prayer,appointment,workshops,hurricane,quiet,luck,postage,producer,represented,mortgages,dial,responsibilities,cheese,comic,carefully,jet,productivity,investors,crown,par,underground,diagnosis,maker,crack,principle,picks,vacations,gang,calculated,fetish,appearance,smoke,apache,incorporated,craft,cake,apart,fellow,blind,lounge,mad,coins,andy,gross,strongly,cafe,valentine,hilton,ken,horror,su,familiar,capable,douglas,till,involving,pen,investing,christopher,admission,shoe,elected,carrying,victory,sand,madison,joy,editions,mainly,ethnic,ran,parliament,actor,finds,seal,situations,fifth,citizen,vertical,corrections,structural,municipal,describes,prize,sr,occurs,jon,absolute,disabilities,consists,substance,prohibited,addressed,lies,pipe,soldiers,guardian,lecture,simulation,ill,concentration,classics,lbs,lay,interpretation,horses,dirty,deck,wayne,donate,taught,bankruptcy,worker,alive,temple,substances,prove,discovered,wings,breaks,restrictions,participating,waters,promise,thin,exhibition,prefer,ridge,cabinet,harris,bringing,sick,dose,tiffany,tropical,collect,bet,composition,streets,definitely,shaved,turning,buffer,purple,existence,commentary,larry,developments,def,immigration,lets,mutual,necessarily,syntax,li,attribute,prison,skill,chairs,nl,everyday,apparently,surrounding,mountains,moves,popularity,inquiry,checked,exhibit,throw,trend,sierra,visible,cats,desert,ya,oldest,rhode,obviously,mercury,steven,handbook,greg,navigate,worse,summit,victims,spaces,fundamental,burning,escape,somewhat,receiver,substantial,tr,progressive,boats,glance,scottish,championship,arcade,richmond,sacramento,impossible,russell,tells,obvious,fiber,depression,graph,covering,platinum,judgment,bedrooms,talks,filing,foster,passing,awarded,testimonials,trials,tissue,nz,clinton,masters,bonds,cartridge,explanation,folk,commons,cincinnati,subsection,fraud,electricity,permitted,spectrum,arrival,pottery,emphasis,roger,aspect,awesome,mexican,confirmed,counts,priced,hist,crash,lift,desired,inter,closer,assumes,heights,shadow,riding,infection,lisa,expense,grove,venture,korean,healing,princess,mall,entering,packet,spray,studios,dad,buttons,observations,thompson,winners,extend,roads,subsequent,pat,dublin,rolling,fell,yard,disclosure,establishment,memories,nelson,te,arrived,creates,faces,tourist,cocks,av,mayor,murder,sean,adequate,senator,yield,grades,cartoons,pour,digest,reg,lodging,tion,dust,hence,entirely,replaced,rescue,undergraduate,losses,combat,reducing,stopped,occupation,lakes,butt,donations,associations,closely,radiation,diary,seriously,kings,shooting,kent,adds,ear,flags,baker,launched,elsewhere,pollution,conservative,shock,effectiveness,walls,abroad,ebony,tie,ward,drawn,arthur,ian,visited,roof,walker,demonstrate,atmosphere,suggests,kiss,beast,ra,operated,experiment,targets,overseas,purchases,dodge,counsel,federation,invited,yards,assignment,chemicals,gordon,mod,farmers,queries,rush,ukraine,absence,nearest,cluster,vendors,whereas,yoga,serves,woods,surprise,lamp,rico,partial,phil,everybody,couples,nashville,ranking,jokes,http,simpson,sublime,palace,acceptable,satisfied,glad,wins,measurements,verify,globe,trusted,copper,milwaukee,rack,warehouse,ec,rep,kerry,receipt,supposed,ordinary,nobody,ghost,violation,stability,mit,applying,southwest,boss,pride,expectations,independence,knowing,reporter,keith,champion,cloudy,linda,ross,personally,chile,anna,plenty,solo,sentence,throat,ignore,maria,uniform,excellence,wealth,tall,somewhere,vacuum,dancing,attributes,recognize,brass,writes,plaza,survival,quest,publish,screening,toe,trans,jonathan,whenever,nova,lifetime,pioneer,booty,forgotten,plates,acres,venue,athletic,essays,behaviour,vital,telling,fairly,coastal,cf,charity,intelligent,edinburgh,vt,excel,modes,obligation,campbell,wake,stupid,harbor,hungary,traveler,segment,realize,lan,enemy,puzzle,rising,aluminum,wells,opens,insight,restricted,republican,secrets,lucky,latter,merchants,thick,repeat,philips,attendance,penalty,drum,glasses,enables,nec,builder,vista,jessica,chips,terry,flood,ease,arguments,amsterdam,orgy,arena,adventures,pupils,stewart,announcement,outcome,xx,appreciate,expanded,casual,grown,polish,lovely,extras,centres,jerry,clause,smile,lands,ri,troops,indoor,bulgaria,armed,broker,charger,regularly,believed,pine,cooling,tend,gulf,rick,trucks,cp,mechanisms,divorce,laura,tokyo,partly,tradition,candy,pills,tiger,donald,folks,exposed,hunt,angels,deputy,sealed,physicians,loaded,fred,complaint,scenes,experiments,balls,afghanistan,scholarship,governance,mill,founded,chronic,moral,den,finger,keeps,pound,locate,pl,trained,burn,roses,ourselves,bread,tobacco,wooden,motors,tough,roberts,incident,gonna,lie,conversation,decrease,chest,pension,billy,revenues,emerging,worship,capability,ak,fe,craig,herself,producing,churches,precision,damages,reserves,contributed,solve,reproduction,minority,diverse,ingredients,sb,ah,johnny,sole,franchise,recorder,complaints,facing,nancy,promotions,tones,passion,rehabilitation,maintaining,sight,laid,clay,defence,patches,weak,refund,towns,divided,reception,wise,cyprus,odds,correctly,consequences,makers,hearts,geography,appearing,integrity,worry,discrimination,eve,carter,legacy,marc,pleased,danger,widely,phrase,genuine,raising,implications,paradise,hybrid,reads,roles,emotional,sons,leaf,pad,glory,platforms,ja,bigger,versus,combine,overnight,geographic,exceed,rod,saudi,fault,cuba,hrs,preliminary,districts,introduce,silk,kate,babies,bi,karen,compiled,romantic,revealed,specialists,generator,albert,examine,jimmy,graham,suspension,bristol,margaret,sad,correction,wolf,slowly,communicate,rugby,supplement,cal,portions,infant,promoting,samuel,fluid,grounds,fits,kick,regards,meal,ta,hurt,machinery,unlike,equation,baskets,probability,pot,dimension,wright,barry,proven,admissions,warren,slip,studied,reviewer,involves,quarterly,profits,devil,grass,comply,marie,illustrated,cherry,continental,alternate,deutsch,achievement,limitations,kenya,cuts,funeral,earrings,enjoyed,chapters,charlie,quebec,passenger,convenient,dennis,mars,francis,sized,noticed,socket,silent,literary,egg,signals,caps,pill,theft,childhood,swing,symbols,lat,meta,humans,facial,choosing,talent,dated,flexibility,seeker,wisdom,shoot,boundary,mint,offset,philip,elite,gi,spin,holders,believes,swedish,poems,jurisdiction,displaying,witness,collins,equipped,stages,encouraged,sur,winds,powder,broadway,acquired,wash,cartridges,stones,entrance,roots,declaration,losing,attempts,noble,glasgow,rev,gospel,advantages,shore,loves,induced,ll,knight,preparing,loose,aims,recipient,linking,extensions,appeals,earned,illness,islamic,athletics,southeast,ho,alternatives,pending,parker,determining,lebanon,kennedy,sh,soap,ae,triple,cooper,vincent,jam,secured,unusual,answered,destruction,increasingly,migration,disorder,routine,rocks,conventional,titans,applicants,wearing,axis,sought,mounted,habitat,median,guns,herein,animated,horny,judicial,rio,adjustment,hero,bachelor,attitude,engaged,falling,montreal,carpet,lenses,binary,attended,difficulty,collective,coalition,pi,dropped,duke,walter,ai,pace,besides,wage,producers,ot,collector,arc,hosts,moments,atlas,strings,dawn,representing,observation,feels,torture,carl,coat,mitchell,mrs,restoration,convenience,returning,ralph,opposition,container,yr,defendant,warner,confirmation,app,embedded,supervisor,wizard,corps,actors,liver,liable,morris,petition,recall,picked,assumed,departure,minneapolis,belief,killing,memphis,shoulder,texts,brokers,roy,ion,diameter,ottawa,doll,ic,tit,seasons,peru,refine,bidder,singer,evans,herald,literacy,fails,aging,intervention,fed,attraction,diving,invite,modification,alice,suppose,reed,involve,moderate,terror,younger,thirty,mice,opposite,understood,rapidly,ban,mercedes,assurance,clerk,happening,vast,mills,outline,amendments,holland,receives,metropolitan,compilation,verification,ent,odd,wrap,refers,mood,favor,veterans,gr,attractive,occasion,jefferson,victim,demands,sleeping,careful,beam,gardening,obligations,arrive,orchestra,sunset,tracked,moreover,lottery,tops,framed,aside,licence,essay,discipline,amy,dialogue,identifying,alphabetical,camps,declared,dispatched,aaron,trace,disposal,shut,packs,ge,switches,romania,voluntary,thou,consult,greatly,mask,midnight,ng,commonly,pe,photographer,inform,turkish,coal,cry,quantum,murray,intent,tt,zoo,largely,pleasant,announce,constructed,additions,requiring,spoke,arrow,engagement,rough,weird,tee,lion,inspired,holes,weddings,blade,suddenly,oxygen,meals,canyon,meters,merely,arrangement,conclusions,passes,bibliography,pointer,stretch,durham,furthermore,permits,cooperative,muslim,xl,neil,sleeve,cleaner,cricket,beef,feeding,stroke,township,cad,hats,robin,robinson,jacksonville,strap,headquarters,sharon,crowd,transfers,surf,olympic,transformation,remained,attachments,dir,entities,customs,administrators,personality,rainbow,hook,roulette,decline,gloves,cord,cloud,facilitate,subscriber,valve,val,explains,proceed,feelings,knife,jamaica,shelf,liked,adopt,denied,incredible,donation,outer,crop,deaths,rivers,commonwealth,manhattan,tales,katrina,islam,tu,fy,thumbs,seeds,cited,lite,hub,realized,twelve,founder,decade,dispute,portuguese,tired,adverse,everywhere,eng,steam,discharge,ef,drinks,ace,voices,acute,climbing,stood,sing,tons,perfume,carol,honest,albany,hazardous,restore,stack,somebody,sue,ep,reputation,democrats,hang,curve,creator,amber,qualifications,museums,variation,passage,transferred,trunk,lb,damn,pierre,photograph,oakland,colombia,waves,camel,lamps,underlying,hood,wrestling,suicide,chi,arabia,gathering,projection,juice,chase,mathematical,logical,sauce,fame,extract,specialized,panama,indianapolis,af,payable,corporations,courtesy,criticism,automobile,confidential,statutory,accommodations,athens,northeast,judges,retired,remarks,detected,decades,paintings,walked,arising,bracelet,ins,eggs,juvenile,injection,yorkshire,populations,protective,afraid,railway,indicator,pointed,causing,mistake,norton,locked,eliminate,fusion,mineral,ruby,steering,beads,fortune,preference,canvas,threshold,parish,claimed,screens,cemetery,croatia,flows,venezuela,exploration,fewer,nurses,stem,proxy,astronomy,lanka,edwards,drew,contests,translate,announces,costume,berkeley,voted,killer,gates,adjusted,rap,tune,bishop,pulled,corn,shaped,compression,seasonal,establishing,farmer,counters,puts,constitutional,grew,perfectly,tin,slave,instantly,cultures,norfolk,coaching,examined,trek,encoding,litigation,heroes,painted,ir,horizontal,resulted,portrait,ethical,carriers,mobility,floral,builders,ties,struggle,schemes,suffering,neutral,fisher,rat,spears,prospective,bedding,ultimately,joining,heading,equally,artificial,bearing,spectacular,seniors,worlds,guilty,affiliated,naturally,haven,tablet,jury,dos,tail,subscribers,charm,lawn,violent,underwear,basin,soup,potentially,ranch,crossing,inclusive,cottage,drunk,considerable,crimes,resolved,byte,nose,branches,delhi,holdings,alien,selecting,processors,broke,nepal,zimbabwe,difficulties,juan,complexity,constantly,browsing,resolve,barcelona,presidential,documentary,cod,territories,melissa,moscow,thesis,thru,jews,discs,rocky,bargains,frequent,nigeria,ceiling,ensuring,legislature,hospitality,gen,anybody,diamonds,fleet,bunch,singing,theoretical,afford,exercises,surveillance,quit,distinct,lung,substitute,inclusion,hopefully,brilliant,turner,sucking,cents,ti,todd,spoken,stayed,civic,manuals,sees,termination,watched,thereof,households,redeem,rogers,grain,authentic,regime,wishes,bull,montgomery,architectural,louisville,depend,differ,movements,ranging,monica,repairs,breath,amenities,virtually,cole,mart,candle,hanging,colored,authorization,tale,verified,lynn,formerly,bp,situated,comparative,seeks,loving,strictly,routing,docs,stanley,psychological,surprised,elegant,gains,renewal,genealogy,opposed,deemed,scoring,expenditure,brooklyn,liverpool,sisters,critics,spots,oo,hacker,madrid,similarly,margin,coin,solely,fake,salon,norman,excluding,headed,voters,cure,madonna,commander,arch,ni,murphy,thinks,suggestion,soldier,phillips,aimed,justin,bomb,harm,interval,mirrors,tricks,brush,investigate,thy,panels,repeated,assault,spare,deer,tongue,bowling,tri,pal,monkey,proportion,filename,skirt,florence,invest,honey,um,analyses,drawings,significance,ye,lovers,atomic,arabic,gauge,essentials,junction,protecting,faced,mat,rachel,solving,transmitted,produces,oven,ted,intensive,chains,kingston,sixth,engage,noon,switching,quoted,correspondence,farms,imports,supervision,cheat,bronze,expenditures,sandy,separation,testimony,suspect,celebrities,sender,boundaries,crucial,celebration,adjacent,filtering,tuition,spouse,exotic,threats,luxembourg,puzzles,reaching,vb,damaged,laugh,joel,surgical,destroy,citation,pitch,yo,premises,perry,proved,offensive,imperial,dozen,benjamin,teeth,cloth,studying,colleagues,stamp,lotus,salmon,olympus,separated,cargo,tan,salem,mate,likes,butter,pepper,weapon,luggage,burden,chef,zones,races,isle,stylish,slim,maple,luke,grocery,offshore,depot,kenneth,comp,alt,pie,blend,harrison,julie,occasionally,attending,emission,pete,finest,janet,bow,penn,recruiting,apparent,autumn,traveling,probe,midi,toilet,ranked,jackets,routes,packed,excited,helen,mounting,recover,tied,balanced,prescribed,catherine,timely,talked,delayed,chuck,reproduced,hon,dale,explicit,calculation,villas,ebook,consolidated,occasions,brooks,newton,oils,sept,exceptional,anxiety,whilst,unto,prompt,precious,minds,annually,considerations,pays,cox,fingers,sunny,ebooks,delivers,je,queensland,necklace,musicians,leeds,composite,cedar,arranged,lang,theaters,advocacy,raleigh,stud,fold,essentially,designing,threaded,uv,qualify,fingering,blair,hopes,mason,diagram,burns,pumps,slut,ejaculation,sg,vic,peoples,victor,mario,pos,attach,licenses,removing,advised,brunswick,spider,ranges,pairs,trails,preservation,hudson,isolated,interim,assisted,divine,streaming,approve,chose,compound,intensity,technological,syndicate,abortion,venues,blast,calcium,newport,addressing,pole,discounted,indians,shield,harvest,membrane,prague,bangladesh,constitute,locally,concluded,desperate,mothers,iceland,demonstration,governmental,manufactured,candles,graduation,bend,sailing,variations,sacred,morocco,tommy,springfield,refused,brake,exterior,greeting,oliver,congo,glen,delays,synthesis,olive,undefined,unemployment,scored,newcastle,velocity,relay,composed,tears,performances,oasis,cab,angry,fa,societies,brazilian,identical,petroleum,compete,ist,norwegian,lover,belong,honolulu,lips,escort,retention,exchanges,pond,rolls,thomson,barnes,wondering,malta,daddy,ferry,rabbit,profession,seating,dam,separately,physiology,collecting,das,exports,omaha,tire,dominican,chad,loads,friendship,heather,passport,unions,treasury,warrant,frozen,occupied,josh,royalty,scales,rally,observer,sunshine,strain,drag,ceremony,somehow,arrested,expanding,provincial,investigations,ripe,rely,hebrew,gained,rochester,dying,laundry,stuck,solomon,placing,stops,adjust,assessed,enabling,filling,sophisticated,imposed,silence,soviet,possession,cu,laboratories,treaty,vocal,trainer,organ,stronger,volumes,advances,vegetables,lemon,darkness,nuts,nail,vienna,implied,span,stanford,stockings,joke,respondent,packing,statute,rejected,satisfy,destroyed,shelter,chapel,manufacture,layers,guided,accredited,appliance,compressed,bahamas,powell,mixture,bench,tub,rider,radius,perspectives,mortality,logging,hampton,christians,borders,pads,butts,inns,bobby,impressive,sheep,accordingly,architect,railroad,lectures,challenging,wines,nursery,harder,cups,ash,microwave,cheapest,accidents,stuart,contributors,salvador,ali,salad,monroe,tender,violations,foam,temperatures,paste,clouds,discretion,tanzania,preserve,poem,unsigned,staying,easter,theories,repository,praise,jeremy,venice,jo,christianity,veteran,streams,landing,signing,executed,katie,negotiations,realistic,integral,asks,relax,namibia,generating,christina,congressional,synopsis,hardly,prairie,reunion,composer,bean,sword,absent,photographic,sells,ecuador,hoping,accessed,spirits,modifications,coral,float,colin,bias,imported,paths,bubble,por,acquire,contrary,millennium,tribune,vessel,acids,cheaper,admitted,dairy,admit,mem,fancy,equality,samoa,achieving,tap,fisheries,exceptions,reactions,beliefs,ci,companion,squad,analyze,ashley,scroll,relate,divisions,swim,wages,suffer,forests,fellowship,invalid,concerts,martial,males,victorian,retain,colours,execute,tunnel,genres,cambodia,patents,yn,chaos,lithuania,wheat,chronicles,obtaining,beaver,distribute,readings,decorative,confused,compiler,enlargement,eagles,bases,vii,accused,bee,campaigns,unity,loud,bride,rats,defines,airports,instances,indigenous,begun,brunette,packets,anchor,socks,parade,corruption,stat,trigger,incentives,gathered,essex,notified,differential,beaches,dramatic,surfaces,terrible,cruz,pendant,dresses,baptist,scientist,hiring,clocks,females,wallace,nevertheless,reflects,taxation,fever,cuisine,surely,practitioners,transcript,inflation,thee,ruth,pray,compounds,pope,drums,contracting,arnold,reasonably,chicks,bare,hung,cattle,radical,graduates,rover,recommends,controlling,treasure,flame,tanks,assuming,monetary,elderly,pit,arlington,floating,extraordinary,tile,indicating,bolivia,spell,hottest,stevens,kuwait,exclusively,emily,alleged,limitation,compile,webster,struck,illustration,plymouth,warnings,construct,inquiries,bridal,annex,mag,inspiration,tribal,curious,affecting,freight,eclipse,sudan,downloading,shuttle,aggregate,stunning,cycles,affects,detect,actively,knee,prep,pb,complicated,fastest,butler,injured,decorating,expressions,ton,courier,shakespeare,hints,collapse,unlikely,oe,gif,pros,conflicts,beverage,tribute,wired,immune,travelers,forestry,barriers,cant,rarely,infected,offerings,martha,genesis,barrier,argue,incorrect,trains,metals,bicycle,furnishings,letting,arise,guatemala,celtic,thereby,jamie,particle,perception,minerals,advise,humidity,bottles,boxing,wy,renaissance,pathology,sara,bra,ordinance,hughes,bitch,jeffrey,chess,operates,survive,oscar,festivals,menus,joan,possibilities,duck,reveal,canal,phi,contributing,herbs,cow,manitoba,analytical,missions,watson,lying,costumes,strict,dive,circulation,drill,offense,bryan,cet,protest,assumption,jerusalem,hobby,tries,invention,nickname,fiji,enquiries,washing,exploring,trick,enquiry,raid,timber,intense,showers,supporters,ruling,steady,dirt,statutes,withdrawal,myers,drops,predicted,wider,saskatchewan,enrolled,screw,ministers,publicly,hourly,blame,geneva,veterinary,handed,suffered,informal,incentive,butterfly,mechanics,heavily,fifty,mistakes,numerical,ons,uncle,defining,counting,reflection,sink,accompanied,assure,invitation,devoted,princeton,jacob,sodium,randy,spirituality,meanwhile,proprietary,timothy,brick,grip,naval,medieval,porcelain,bridges,captured,watt,decent,casting,dayton,translated,shortly,cameron,pins,carlos,reno,donna,andreas,warrior,diploma,cabin,innocent,scanning,consensus,polo,copying,delivering,patricia,horn,eddie,uganda,fired,journalism,perth,frog,grammar,intention,syria,disagree,klein,harvey,tires,logs,undertaken,hazard,leo,gregory,episodes,circular,anger,mainland,illustrations,suits,chances,snap,happiness,arg,substantially,bizarre,glenn,ur,auckland,fruits,geo,ribbon,calculations,doe,conducting,trinidad,kissing,wal,handy,swap,exempt,crops,reduces,accomplished,geometry,impression,guild,correlation,gorgeous,capitol,sim,dishes,barbados,nervous,refuse,extends,fragrance,mcdonald,replica,brussels,tribe,neighbors,trades,superb,buzz,transparent,rid,trinity,charleston,handled,legends,boom,calm,champions,floors,selections,inappropriate,exhaust,comparing,shanghai,speaks,burton,vocational,davidson,copied,scotia,farming,gibson,fork,troy,roller,batch,organize,appreciated,alter,ghana,edges,mixing,handles,skilled,fitted,albuquerque,harmony,distinguished,projected,assumptions,shareholders,twins,rip,triangle,amend,anticipated,oriental,reward,windsor,zambia,completing,hydrogen,comparable,chick,advocate,sims,confusion,copyrighted,tray,warranties,escorts,thong,medal,coaches,vessels,harbour,walks,sucks,sol,sage,knives,vulnerable,arrange,artistic,bat,honors,booth,reflected,unified,bones,breed,ignored,polar,fallen,precise,sussex,respiratory,invoice,lip,sap,gather,maternity,backed,alfred,colonial,carey,forming,embassy,cave,journalists,danny,rebecca,slight,proceeds,indirect,amongst,wool,foundations,arrest,horizon,nu,deeply,marina,liabilities,prizes,bosnia,decreased,patio,tolerance,lloyd,describing,optics,pursue,lightning,overcome,eyed,ou,quotations,grab,inspector,attract,brighton,beans,bookmarks,ellis,disable,snake,succeed,leonard,lending,reminder,xi,searched,riverside,plains,raymond,abilities,initiated,sullivan,za,trap,lonely,fool,ve,lancaster,suspended,observe,julia,attitudes,karl,berry,collar,simultaneously,racial,bermuda,amanda,sociology,exhibitions,confident,retrieved,exhibits,officially,dies,terrace,bacteria,replied,novels,recipients,ought,delicious,traditions,jail,safely,finite,kidney,periodically,fixes,sends,durable,allied,throws,moisture,hungarian,referring,spencer,uruguay,transform,tablets,tuning,gotten,educators,tyler,futures,vegetable,verse,humanities,independently,wanting,custody,scratch,launches,henderson,bk,britannica,ellen,competitors,rocket,bullet,towers,racks,lace,nasty,latitude,consciousness,ste,tumor,ugly,deposits,beverly,mistress,encounter,trustees,watts,duncan,hart,bernard,resolutions,ment,forty,tubes,attempted,col,priest,floyd,ronald,queue,trance,nicholas,yu,bundle,hammer,invasion,witnesses,runner,rows,administered,notion,sq,skins,mailed,spelling,arctic,rewards,beneath,strengthen,defend,frederick,seventh,gods,une,welsh,belly,aggressive,advertisements,quarters,stolen,soonest,haiti,disturbed,determines,sculpture,ears,fist,fitting,fixtures,mere,agrees,passengers,quantities,petersburg,consistently,cons,elder,cheers,dig,taxi,punishment,appreciation,subsequently,om,nat,gravity,providence,thumb,restriction,incorporate,backgrounds,treasurer,essence,flooring,ethiopia,mighty,athletes,humanity,transcription,holmes,complications,scholars,remembered,galaxy,chester,loc,worn,synthetic,shaw,vp,segments,testament,twist,stomach,partially,buried,minimize,darwin,ranks,wilderness,debut,generations,tournaments,bradley,deny,anatomy,judy,fraction,trio,proceeding,cube,defects,uncertainty,breakdown,milton,reconstruction,subsidiary,clarity,rugs,sandra,adelaide,encouraging,furnished,monaco,settled,folding,comparisons,beneficial,belize,fate,promised,penny,robust,threatened,republicans,discusses,porter,gras,jungle,ver,responded,rim,zen,ivory,alpine,dis,prediction,fabulous,alias,individually,battlefield,literally,newer,kay,spice,oval,implies,soma,ser,cooler,consisting,periodic,submitting,overhead,ascii,prospect,shipment,breeding,citations,geographical,donor,mozambique,tension,trash,shapes,tier,earl,manor,envelope,diane,disclaimers,excluded,andrea,breeds,rapids,sheffield,bailey,aus,finishing,emotions,wellington,incoming,prospects,bulgarian,eternal,cite,aboriginal,remarkable,rotation,nam,productive,boulevard,eugene,ix,gdp,pig,metric,minus,penalties,bennett,imagination,joshua,armenia,varied,grande,closest,actress,mess,assign,armstrong,politicians,lit,accommodate,tigers,aurora,una,slides,milan,premiere,lender,villages,shade,chorus,christine,rhythm,digit,argued,dietary,symphony,clarke,sudden,accepting,precipitation,lions,ada,pools,tb,lyric,claire,isolation,speeds,sustained,matched,approximate,rope,carroll,rational,fighters,chambers,dump,greetings,inherited,warming,incomplete,chronicle,fountain,chubby,grave,legitimate,biographies,burner,investigator,plaintiff,finnish,gentle,prisoners,deeper,muslims,hose,mediterranean,worthy,reveals,architects,saints,carries,sig,duo,excessive,devon,helena,saves,regarded,valuation,unexpected,cigarette,fog,characteristic,marion,lobby,egyptian,tunisia,outlined,consequently,treating,punch,appointments,gotta,cowboy,narrative,enormous,karma,consist,betty,queens,quantitative,lucas,subdivision,tribes,defeat,distinction,honduras,naughty,hazards,insured,harper,livestock,exemption,tenant,cabinets,tattoo,shake,algebra,shadows,holly,silly,yea,mercy,hartford,freely,marcus,sunrise,wrapping,mild,fur,nicaragua,tar,belongs,readily,soc,fence,infinite,diana,relatives,lindsay,clan,legally,shame,satisfactory,revolutionary,bracelets,civilian,mesa,fatal,remedy,breathing,briefly,thickness,adjustments,genius,discussing,fighter,flesh,retreat,adapted,barely,wherever,estates,rug,democrat,borough,maintains,failing,ka,retained,pamela,andrews,marble,extending,jesse,hull,surrey,dem,blackberry,highland,meditation,macedonia,combining,brandon,instrumental,giants,organizing,shed,balloon,winston,ham,solved,tide,hawaiian,partition,invisible,consoles,funk,magnet,translations,jaguar,reel,sheer,commodity,posing,wang,kilometers,bind,thanksgiving,rand,hopkins,urgent,guarantees,infants,gothic,cylinder,witch,buck,indication,eh,congratulations,cohen,sie,puppy,acre,cigarettes,revenge,expires,enemies,aqua,chen,emma,finances,accepts,enjoying,conventions,eva,patrol,smell,pest,coordinates,carnival,roughly,promises,responding,reef,physically,divide,consecutive,satin,bon,deserve,attempting,representations,chan,worried,tunes,garbage,competing,combines,mas,beth,bradford,len,phrases,kai,peninsula,chelsea,boring,reynolds,dom,jill,accurately,speeches,reaches,considers,sofa,ministries,vacancies,parliamentary,prefix,lucia,savannah,barrel,typing,nerve,dans,planets,deficit,boulder,pointing,renew,coupled,viii,harold,circuits,texture,jar,somerset,acknowledge,thoroughly,antigua,nottingham,thunder,tent,caution,identifies,qualification,locks,modelling,namely,miniature,hack,dare,interstate,pirates,aerial,hawk,consequence,rebel,systematic,perceived,origins,hired,textile,lamb,madagascar,nathan,tobago,presenting,cos,centuries,magnitude,richardson,hindu,vocabulary,licking,earthquake,fundraising,weights,albania,geological,lasting,wicked,introduces,kills,pushed,ro,participated,junk,wax,lucy,answering,hans,impressed,slope,failures,poet,conspiracy,surname,theology,nails,evident,epic,saturn,organizer,nut,sake,twisted,combinations,preceding,merit,cumulative,planes,edmonton,tackle,disks,arbitrary,prominent,retrieve,lexington,vernon,sans,irs,fairy,builds,shaft,lean,bye,occasional,leslie,deutsche,ana,innovations,kitty,drain,monte,fires,algeria,blessed,luis,reviewing,cardiff,cornwall,favors,potato,panic,explicitly,sticks,leone,ez,citizenship,excuse,reforms,basement,onion,strand,sandwich,uw,lawsuit,alto,cheque,hierarchy,influenced,banners,reject,eau,abandoned,bd,circles,italic,merry,mil,gore,complement,cult,dash,passive,mauritius,valued,cage,requesting,courage,verde,extraction,elevation,coleman,hugh,lap,utilization,beverages,jake,efficiently,textbook,dried,entertaining,luther,frontier,settle,stopping,refugees,knights,hypothesis,palmer,medicines,flux,derby,peaceful,altered,doctrine,scenic,intersection,sewing,consistency,collectors,conclude,recognised,munich,oman,propose,azerbaijan,lighter,rage,uh,prix,astrology,pavilion,tactics,trusts,occurring,supplemental,travelling,talented,annie,pillow,induction,derek,precisely,shorter,harley,spreading,provinces,relying,paraguay,steal,parcel,refined,bo,fifteen,widespread,incidence,fears,predict,boutique,rolled,avon,incidents,peterson,rays,shannon,enhancing,flavor,alike,walt,homeless,horrible,hungry,metallic,blocked,interference,warriors,palestine,undo,atmospheric,wm,dana,halo,curtis,parental,strikes,lesser,publicity,marathon,ant,proposition,pressing,gasoline,apt,dressed,scout,belfast,dealt,niagara,inf,eos,charms,trader,bucks,allowance,denial,uri,designation,thrown,raises,gem,duplicate,criterion,badge,wrist,civilization,analyzed,heath,tremendous,ballot,varying,remedies,validity,trustee,weighted,angola,performs,realm,corrected,jenny,helmet,salaries,elephant,yemen,encountered,scholar,nickel,surrounded,geology,creatures,coating,commented,wallet,cleared,accomplish,boating,drainage,corners,broader,vegetarian,rouge,yeast,yale,newfoundland,sn,pas,clearing,investigated,ambassador,coated,intend,stephanie,contacting,vegetation,doom,louise,kenny,specially,owen,hitting,yukon,beings,bite,aquatic,reliance,habits,striking,myth,infectious,singh,gig,gilbert,continuity,brook,fu,phenomenon,ensemble,assured,biblical,weed,conscious,accent,eleven,wives,utilize,mileage,auburn,unlock,pledge,vampire,angela,relates,nitrogen,dice,dock,differently,framing,organised,musician,blocking,sorts,limiting,dispatch,revisions,papua,restored,hint,armor,riders,chargers,remark,dozens,varies,reasoning,rendered,picking,charitable,guards,annotated,convinced,openings,buys,replacing,watershed,councils,occupations,acknowledged,nudity,pockets,granny,pork,zu,equilibrium,inquire,pipes,characterized,laden,cottages,merge,privilege,edgar,develops,qualifying,estimation,barn,pushing,fleece,fare,pierce,allan,dressing,sperm,bald,frost,leon,institutes,mold,dame,fo,sally,yacht,tracy,prefers,drilling,herb,ate,breach,whale,traveller,appropriations,suspected,tomatoes,beginners,instructors,bedford,stationery,idle,mustang,unauthorized,clusters,competent,momentum,fin,io,pastor,mud,calvin,uni,shark,contributor,demonstrates,phases,grateful,emerald,gradually,laughing,grows,cliff,desirable,tract,ballet,ol,journalist,abraham,bumper,afterwards,religions,garlic,shine,senegal,explosion,banned,briefs,signatures,cove,casa,mu,daughters,conversations,radios,tariff,opponent,simplified,muscles,wrapped,swift,vagina,eden,distant,champagne,ala,decimal,deviation,superintendent,dip,hostel,housewives,employ,mongolia,penguin,magical,influences,irrigation,miracle,reprint,reid,hydraulic,centered,robertson,yearly,penetration,wound,belle,rosa,conviction,hash,omissions,writings,hamburg,lazy,qualities,fathers,charging,cas,marvel,lined,cio,dow,prototype,petite,apparatus,terrain,pens,explaining,yen,strips,gossip,rangers,nomination,empirical,rotary,worm,dependence,beginner,boxed,lid,cubic,deaf,commitments,suggesting,sapphire,skirts,mats,remainder,crawford,labeled,privileges,marking,commodities,serbia,sheriff,griffin,declined,guyana,spies,neighbor,elect,highways,concentrate,intimate,reproductive,preston,deadly,molecules,rounds,refrigerator,intervals,sentences,exclusion,holocaust,keen,peas,receivers,disposition,variance,navigator,investigators,cameroon,baking,computed,needle,baths,cathedral,brakes,og,nirvana,ko,owns,til,sticky,destiny,generous,madness,climb,blowing,fascinating,landscapes,heated,lafayette,wto,computation,hay,salvation,dover,adrian,predictions,accompanying,vatican,brutal,selective,arbitration,token,editorials,zinc,sacrifice,seekers,isa,removable,yields,gibraltar,levy,suited,anthropology,skating,aberdeen,emperor,grad,bras,belts,blacks,educated,reporters,burke,proudly,necessity,rendering,inserted,pulling,curves,suburban,touring,clara,tomato,waterproof,expired,travels,flush,pale,hayes,humanitarian,invitations,functioning,delight,survivor,garcia,economies,alexandria,moses,counted,undertake,declare,continuously,johns,valves,gaps,impaired,achievements,donors,tear,jewel,teddy,convertible,teaches,ventures,nil,stranger,tragedy,julian,nest,painful,velvet,tribunal,ruled,pensions,prayers,nowhere,cop,paragraphs,gale,joins,adolescent,nominations,wesley,dim,lately,cancelled,mattress,likewise,banana,introductory,cakes,stan,reservoir,occurrence,idol,bloody,remind,worcester,charming,mai,tooth,disciplinary,annoying,respected,stays,disclose,affair,drove,upset,restrict,beside,mines,portraits,rebound,logan,mentor,interpreted,fought,baghdad,elimination,metres,hypothetical,immigrants,complimentary,pencil,freeze,performer,abu,titled,commissions,sphere,moss,concord,graduated,endorsed,ty,surprising,walnut,lance,ladder,italia,unnecessary,dramatically,liberia,sherman,cork,hansen,senators,mali,yugoslavia,bleeding,characterization,colon,likelihood,lanes,purse,fundamentals,contamination,endangered,compromise,masturbation,stating,dome,caroline,expiration,bless,engaging,negotiation,crest,opponents,triumph,nominated,electoral,welding,deferred,alternatively,heel,alloy,plots,polished,yang,gently,locking,casey,controversial,draws,blanket,bloom,lou,elliott,recovered,fraser,justify,blades,loops,surge,aw,tahoe,advert,possess,demanding,defensive,sip,forbidden,vanilla,deutschland,picnic,souls,arrivals,practitioner,dumb,smithsonian,hollow,vault,securely,examining,groove,revelation,pursuit,delegation,wires,dictionaries,mails,backing,greenhouse,sleeps,blake,transparency,dee,travis,endless,orbit,niger,bacon,survivors,colony,cannon,circus,forbes,mae,mel,descending,spine,trout,enclosed,feat,cooked,transmit,fatty,gerald,pressed,scanned,reflections,hunger,sic,municipality,joyce,detective,surgeon,cement,experiencing,fireplace,endorsement,disputes,textiles,missile,closes,seq,persistent,deborah,marco,assists,summaries,glow,gabriel,auditor,violin,prophet,bracket,isaac,oxide,oaks,magnificent,erik,colleague,naples,promptly,adaptation,hu,harmful,sexually,enclosure,dividend,newark,kw,paso,phantom,westminster,turtle,distances,absorption,treasures,warned,ware,fossil,mia,badly,apollo,wan,disappointed,persian,continually,communist,greene,grenada,creations,jade,scoop,acquisitions,foul,earning,excitement,somalia,verbal,blink,presently,seas,carlo,mysterious,novelty,bryant,tiles,librarian,switched,stockholm,pose,grams,richards,promising,relaxation,goat,render,carmen,ira,sen,thereafter,hardwood,temporal,sail,forge,commissioners,dense,brave,forwarding,awful,nightmare,reductions,southampton,impose,organisms,telescope,asbestos,portsmouth,meyer,enters,pod,savage,advancement,wu,willow,resumes,bolt,gage,throwing,existed,whore,generators,lu,wagon,dat,favour,knock,urge,generates,potatoes,thorough,inexpensive,kurt,peers,roland,quilt,huntington,creature,ours,mounts,syracuse,lone,refresh,aluminium,michel,subtle,notre,shipments,stripes,antarctica,cope,shepherd,cradle,chancellor,lime,kirk,flour,controversy,legendary,sympathy,choir,avoiding,beautifully,blond,expects,fabrics,hygiene,wit,poultry,virtue,burst,examinations,surgeons,bouquet,promotes,mandate,departmental,ind,corpus,johnston,terminology,gentleman,fibre,reproduce,shades,jets,qui,threatening,spokesman,frankfurt,prisoner,daisy,halifax,encourages,assembled,earliest,donated,insects,terminals,crude,morrison,maiden,sufficiently,examines,viking,myrtle,bored,yarn,knit,conditional,mug,bother,budapest,knitting,attacked,mating,compute,arrives,translator,automobiles,allah,continent,ob,fares,longitude,resist,challenged,hoped,pike,insertion,hugo,wagner,constraint,touched,strengthening,cologne,wishing,ranger,smallest,insulation,newman,marsh,scared,infringement,bent,laos,subjective,monsters,asylum,robbie,stake,cocktail,outlets,varieties,arbor,poison,dominated,costly,derivatives,prevents,stitch,rifle,severity,notable,warfare,judiciary,embroidery,mama,inland,greenland,interpret,accord,modest,countryside,sorting,liaison,unused,bulbs,consuming,tourists,sandals,seconded,waist,attributed,seychelles,fatigue,owl,patriot,sewer,crystals,kathleen,bosch,forthcoming,num,treats,marino,detention,carson,exceeds,complementary,gallon,coil,battles,traders,carlton,bitter,memorandum,burned,cardinal,dragons,converting,romeo,din,incredibly,delegates,turks,roma,balancing,att,vet,sided,claiming,courtyard,presidents,offenders,depart,cuban,tenants,expressly,distinctive,lily,brackets,unofficial,oversight,privately,minded,resin,allies,twilight,preserved,crossed,kensington,monterey,linen,rita,ascending,seals,nominal,alicia,decay,weaknesses,quartz,registers,eighth,usher,herbert,authorised,improves,advocates,phenomena,buffet,deciding,skate,joey,hackers,tilt,granite,repeatedly,lynch,masses,transformed,athlete,franc,bead,enforce,similarity,landlord,leak,timor,assorted,implements,adviser,flats,compelling,vouchers,expecting,heels,voter,urine,capri,towel,ginger,suburbs,imagery,sears,als,flies,competence,inadequate,crying,matthews,amateurs,crane,defendants,deployed,governed,considerably,investigating,rotten,habit,bulb,scattered,honour,useless,protects,northwestern,audiences,iris,coupe,hal,benin,bach,manages,erosion,abundance,carpenter,khan,insufficient,highlands,peters,fertility,clever,primer,che,lords,bu,tends,enjoyable,crescent,freshman,playground,negotiate,sixty,exploit,orgies,permanently,concentrated,distinguish,ei,projections,spark,illustrate,lin,patience,securing,pathway,shallow,stir,spike,plated,jacques,drawer,ingredient,togo,lifting,judith,curtain,disclosed,davies,tactical,pilots,copenhagen,expedition,pile,operative,humour,maturity,caller,distortion,prosecution,het,tonga,imprint,natalie,receipts,assisting,shirley,sanctions,goodbye,emerged,defect,poorly,goddess,backs,observers,magnets,formulas,spacious,shoulders,nas,argues,wade,soils,chapman,organs,det,loyalty,beloved,sometime,ballard,beating,faithful,libya,offence,invested,whatsoever,numbered,terminated,expands,sedan,pony,comprises,leap,bolton,founding,swan,covenant,dropping,archaeology,sailor,fittings,lining,banquet,cares,sanctuary,flora,statue,hilary,quotation,equals,hardy,caravan,diagrams,harness,manipulation,bells,vascular,alongside,impressions,yankees,forwarded,gal,transmitter,dorothy,freeman,andre,ems,puppies,relaxing,delphi,trophy,emotion,nets,sights,uniforms,disasters,asterisk,versatile,liquor,kindergarten,profitable,wounded,clayton,derivative,suffolk,necklaces,tot,occupancy,doses,educate,baked,glove,prejudice,herzegovina,probable,baldwin,incorporation,rem,evolutionary,arriving,decoration,trojan,assistants,counselor,spinal,eliminated,sooner,struggling,enacted,tenure,plush,weber,unstable,elk,nelly,fulfill,urged,reflecting,brent,gaining,definitive,appropriately,shifts,inactive,lansing,traveled,adapt,extracted,accession,patterson,carriage,therein,terminate,rex,fuels,traditionally,withdraw,soy,brett,anchorage,paula,landmark,greens,neat,naming,stern,bentley,bud,slaves,dentist,utilizing,mis,burkina,tutor,idiot,comprised,winnipeg,charities,mickey,sebastian,aliens,domino,raven,defeated,strains,dwelling,slice,tanning,gambia,aspen,lacking,symbolic,cest,objectionable,angles,pressures,webb,mediation,venus,bump,cowboys,flames,primitive,auf,stocking,esp,balloons,malcolm,georgetown,norwich,halls,decorations,pause,simplicity,postscript,dividends,relaxed,periodicals,pearson,demon,welcomed,infinity,gabon,notation,chandler,aunt,interviewed,crow,dia,discontinued,concurrent,decides,caption,bargaining,complain,pulmonary,adhesive,toledo,asses,altitude,compass,closet,couch,evolved,downs,exceeding,rogue,unfair,electronically,augusta,infantry,renowned,corridor,philosophical,scripture,celebrating,sahara,justification,rebuild,vacant,manuscript,fixing,gram,hiding,methodist,dye,sits,alphabet,shelves,toes,cleaned,honored,optic,hannah,telephones,insect,frances,diaries,chili,grief,leicester,sweat,dolphin,pendants,wonders,ventilation,masks,bust,lateral,quake,alley,gardner,sanders,pathways,telegraph,pertaining,memorable,professors,monument,formally,twain,ile,nevis,dew,lavender,justified,withdrawn,breeze,debates,gems,outgoing,mann,yankee,outs,deficiency,gum,progression,adv,saddle,malaria,loyal,torrent,odyssey,spite,nero,capita,imply,inaccuracies,tendency,caledonia,wholly,chill,utilized,embrace,ein,liner,manila,auxiliary,initiate,ua,elevated,purely,fry,lifts,vivid,allegations,stationary,corresponds,foil,whitney,celebrated,alarms,hunters,roi,allison,stairs,kt,acted,byron,critique,honestly,skull,continuation,carnegie,servant,falcon,jointly,canadians,avoided,comprising,tick,terrier,listened,explanations,renewed,incorporating,variant,riley,equatorial,critic,sediment,translators,squares,deg,bot,lea,vans,od,honeymoon,percussion,glue,cone,margins,sands,survived,spinning,adequately,spectral,prevalence,dominica,contaminated,fragment,finishes,lecturer,embroidered,bucket,steak,commits,cobra,threw,sutton,djibouti,authorize,decorated,credited,cherokee,apo,ao,recruit,simmons,gals,hoc,wherein,appearances,performers,dessert,dissertation,walsh,nos,marry,blankets,enthusiasm,confusing,celebrations,approaching,bounce,ivan,spiral,governors,weakness,wills,katherine,atoms,jacobs,mauritania,tissues,reminded,drake,cynthia,roosevelt,practicing,schmidt,nicely,surprisingly,expressing,della,laurel,carolyn,rails,fried,cairo,ambulance,practically,traded,signaling,vivo,domination,shrimp,chords,molecule,dedication,desires,woody,dismissed,cried,psychic,cracks,analyzing,sincerely,beaten,piercing,antilles,establishments,marginal,visions,efficacy,prestige,cocaine,accelerated,pinnacle,tucker,recognizes,plugs,responsive,supra,omitted,molly,proximity,ku,belonging,unbiased,pear,chiefs,franz,collision,supplementary,clue,scandal,lodges,dangers,lys,travellers,gia,scream,discrepancies,pirate,senses,repeats,willie,rival,slower,simulated,culinary,fairfax,beck,huh,accountant,propaganda,offender,waterloo,warwick,rounded,boarding,vanity,mitigation,tome,prof,homer,daylight,macdonald,gases,dependency,dioxide,fireworks,genus,approached,catching,cutter,connects,ont,liberals,aperture,roofing,dixon,elastic,melody,sins,cousin,hath,recalls,consultations,debts,phillip,burial,balcony,prescriptions,prop,avril,willis,myths,camden,coupling,knees,neglect,emerge,winchester,clutch,shy,poets,auditorium,pedro,maid,sid,carrie,towels,canterbury,trent,barber,intuitive,rigid,sta,degradation,ret,orthodox,erin,ferguson,fragments,mariana,qualitative,claude,minorities,blown,diffusion,baton,polynesia,barton,umbrella,rods,stimulation,abbey,pigs,olivia,refugee,straps,maya,discourse,lancashire,headache,stained,marital,socialist,bruno,attracted,undertaking,slavery,notwithstanding,feasible,romans,credibility,shores,fest,thames,flowing,montenegro,deed,whirlpool,perfumes,sustain,mechanic,bauer,eliminating,rejection,bowls,dissemination,cardinals,cosmic,dawson,defective,lengths,beacon,hoover,politically,elective,forensic,botanical,quartet,suspense,drafting,cruel,observing,advertised,commencement,southwestern,conform,helmets,firing,eager,denise,touching,vacancy,papa,settlements,strawberry,chang,gloria,elevator,pupil,feast,maggie,redemption,profound,canton,nina,registering,seth,warn,conservatives,bonnie,laying,provisional,compiling,strive,releasing,martinique,shells,painter,ankle,peso,leagues,monkeys,historically,transitions,prevented,digits,err,banker,sup,easiest,borrow,bamboo,lv,denotes,communicating,ki,decks,vibration,stepped,vent,blunt,protector,aux,react,understands,rises,issuing,accents,insane,buddha,voyage,een,colonel,transitional,mozart,acceleration,sketch,hoffman,balances,firearms,nightly,pitt,deduction,dancer,coats,pol,capsules,hyde,firmly,doo,dots,pursuing,aston,mugs,washed,resonance,mosaic,rhodes,fiesta,vase,forcing,fairs,flute,durability,meadows,hindi,harsh,outfit,substitution,burma,cease,deserves,aboard,irving,perfection,joints,overwhelming,poles,bounds,lyon,santiago,vera,advising,altogether,devils,dignity,europa,wondered,cheshire,boyd,sliding,accumulation,descriptive,inst,feasibility,negotiating,homo,pier,sioux,cote,premiums,lutheran,fellows,valencia,superman,perkins,ideally,splash,equip,saga,probation,ast,gran,commissioned,hedge,ke,fender,violet,dancers,mutation,envelopes,alle,compulsory,favorable,rue,preparations,maxwell,illustrates,inheritance,curry,oblique,pearls,worms,satisfying,succeeded,apples,elf,dewey,surviving,pouch,advent,proposes,hooks,ces,exploitation,singers,mayo,tasmania,mansion,cha,surrender,schneider,accumulated,arsenal,dub,screws,pyramid,enjoys,hacking,stripe,averages,peaks,tai,como,lisp,limousine,churchill,affirmative,keynote,planted,petitioner,spoon,bombs,niche,fortunately,cigar,vis,calculating,erie,berkshire,proportional,credentials,deprecated,municipalities,chin,locker,jenkins,squash,expectation,severely,spotted,curse,ajax,coconut,interrupt,conductor,wont,liberation,grandfather,removes,luxurious,titan,booked,anita,indirectly,nile,blessing,lumber,pillows,portals,illustrator,asleep,potassium,prompted,shout,presidency,abnormal,delicate,convince,whoever,straw,lifted,mankind,uncertain,paramount,upright,breakfasts,inspectors,emergencies,ernest,shocked,alcoholic,bakery,lieutenant,orchid,histories,loses,atkins,variability,observatory,soda,waited,preventive,peach,calculus,stefan,breathe,dunn,smiling,ounces,economically,uncut,intact,noting,shifting,samurai,moines,ivy,delegate,lightly,negotiated,herman,congestion,runners,stove,accidental,talents,nixon,refuge,brady,guadeloupe,walton,carved,ark,freak,obstacles,preferably,bluff,jasper,sed,newborn,sadly,laughed,avail,emerson,regulate,orchard,mythology,trousers,hatch,replaces,tomb,regina,stein,shortage,privileged,spill,goodness,drift,extracts,professions,explored,mysteries,fuller,decreases,crisp,cor,keeper,reinforced,johannesburg,spells,specifying,buddhist,inevitable,etiquette,environ,nic,coloured,births,kr,cubs,wheeler,ritual,miguel,pulp,onset,interpreter,specimens,initiation,assay,reconciliation,pots,recognizing,leigh,slam,respects,tents,plaque,accounted,deposited,lowe,beavers,crib,defending,pulls,autonomous,granting,motoring,appropriation,condensed,philippine,theological,quietly,scenery,drying,assemblies,collateral,learner,welcomes,swallow,tara,transplant,usenet,marines,lighthouse,proves,crab,jen,brightness,maurice,brooke,consumed,maxim,bore,depreciation,technically,enjoyment,cows,austrian,correspond,slate,suzanne,confined,inhabitants,straightforward,delighted,morton,peel,cue,jupiter,simultaneous,monopoly,debris,han,intentions,pagan,widow,sac,peg,randall,benson,sleeves,troubled,footnote,vibrant,evolving,sweater,approximation,skies,barrett,burners,alison,fitzgerald,kicks,disappeared,canoe,sovereign,reminds,organism,corrupt,violated,correspondent,drought,bake,hurricanes,symptom,laughter,propagation,ignorance,explosive,inventor,scaling,juicy,moody,fashioned,grains,vicinity,thyroid,purification,heal,southeastern,wizards,horoscope,prosperity,rainfall,mum,launching,pedal,plantation,storing,asa,tote,jumped,seemingly,tuned,passionate,staples,mayer,backward,sour,combustion,scrap,administer,bilateral,bella,blondes,disposable,williamson,sock,gentlemen,terra,literal,questioned,guiding,charcoal,vapor,beware,aloud,glorious,overlap,handsome,grounded,bail,goose,fn,judgement,cruiser,cumberland,gifted,esteem,cascade,endorse,strokes,shelby,hen,ancestry,dolphins,adopting,landed,nucleus,detached,scouts,warsaw,ib,mist,verb,chic,objection,phosphate,noisy,abide,sentinel,birthdays,preserving,vest,neal,economist,meridian,marriages,regret,stakes,rotating,brigade,movable,doubles,bliss,humiliation,tens,litter,reflective,abbreviations,executing,greenwich,flooding,rugged,jelly,grandmother,renovation,puma,appoint,panthers,perceptions,greenwood,ignition,humble,petrol,midway,mania,edwin,ax,clare,recognise,hostile,aphrodite,establishes,whites,rant,trapped,bolts,diplomatic,fringe,linguistic,internally,planetary,laurent,ego,manuel,gaza,influenza,gill,rude,sang,steele,citing,viewpoint,nay,servants,meanings,conception,unemployed,heavenly,exeter,amusement,middlesex,curl,albanian,overflow,hastings,subsidies,thirds,willingness,implicit,patriotic,simplify,darling,schwartz,satan,ornaments,oppose,terrific,definite,congregation,regiment,cheer,everett,reviewers,misleading,marty,vine,vale,whereby,deceased,sparks,simpler,captures,capitalism,hancock,falkland,cur,mammals,grape,russ,peppers,deeds,lively,inequality,educator,premature,tripod,immigrant,demonstrations,obsolete,rust,lon,interfere,traps,shuffle,wardrobe,vin,successes,racer,fabrication,guilt,sweep,nash,exploited,bladder,inflammatory,iss,immunity,bets,doyle,ducks,paints,neighbourhood,cheating,carr,fade,tastes,storms,smiled,jurisdictions,scrutiny,regeneration,lunar,differentiation,shields,nonsense,invented,elaine,posed,subjected,tasting,gwen,mob,expose,borrowing,arises,imf,precautions,branded,manning,lisbon,forks,monk,boxer,shining,weigh,clerical,voyager,hobart,moose,dorset,buenos,conscience,crush,mystic,solicitation,rectangular,fischer,pooh,enthusiast,positively,shaping,ich,afghan,inspire,paulo,torn,meantime,pumping,patented,revival,disappear,lever,redundant,regency,tasty,gag,mccarthy,heck,civilians,bark,carts,wasted,cocoa,invites,cushion,reversed,lynx,goa,specimen,ancestors,panther,mixes,graves,branding,examiner,vineyard,meadow,feeder,mercer,roms,goodman,listener,chloride,awaiting,kane,becker,bulls,orion,councillor,hurry,clarkson,beneficiary,hanson,offspring,panorama,roth,odor,demanded,wastes,clash,fidelity,sis,castro,flew,holden,ale,sem,rhapsody,trumpet,solitaire,decreasing,freezing,kaiser,wallis,criminals,retire,rumors,accomplishments,emergence,theatres,apex,crimson,compassion,needing,twentieth,pronounced,extensively,stain,conrad,wished,transient,kicked,coloring,curb,reign,trivial,coke,clauses,baron,sensible,unlawful,bates,webs,swinging,accountable,thrust,proving,opposing,novice,hewitt,dei,delightful,cane,cruising,fury,personalities,stiff,todo,noah,wore,christchurch,traces,rabbi,puffy,weston,headings,enthusiasts,ridiculous,scattering,secretaries,contracted,elbow,fights,scholarly,detailing,stark,roberto,strongest,hammond,padded,circa,revise,contributes,surroundings,proficiency,uranium,honours,consolidate,daniels,billions,hut,stafford,labrador,refusal,lima,suppression,weaver,readiness,secular,majesty,fishery,teresa,distributing,estimating,outdated,dues,pewter,distress,pumpkin,notably,intends,trevor,homosexual,garment,supplying,secondly,razor,cough,cerebral,grandma,oceans,displacement,backwards,arrows,volunteering,presumably,plea,constructive,bundles,tibet,pres,isles,stretching,ovens,garrett,esther,abundant,deductible,priests,accompany,compares,hesitate,inspiring,prey,deposition,laurie,tas,zodiac,pavement,keller,pedestrian,fencing,artery,inlet,rub,violate,stimulate,realise,fluids,conveniently,lick,gov,stealth,ter,ness,repayment,canopy,gloss,whip,porch,pertinent,lifelong,promoter,collegiate,construed,interchange,remotely,fletcher,concise,fibers,handful,brains,curtains,eaten,indigo,retaining,kelley,autobiography,conditioned,prohibition,motions,emphasize,excite,rebels,believing,hilarious,salisbury,gu,quoting,sinks,steep,dynasty,creed,nan,raiders,spreads,elegance,volatile,pointers,sensory,throne,chartered,slopes,socially,unfortunate,seized,territorial,leases,consisted,randolph,memoirs,alkaline,expire,och,midst,borne,forgive,competitor,mansfield,neighbours,marvin,conversions,usable,tempo,mutations,readable,almanac,conway,ay,gail,responds,denote,slayer,payne,purchaser,relies,inserting,tibetan,prepares,concludes,waterford,rodney,cylinders,mus,selects,fulton,directing,nationality,torch,zurich,stretched,depressed,encounters,haunted,spares,symmetry,bout,salons,olympia,hank,negligence,screened,helper,carlisle,rancho,transferring,stepping,hacks,attic,appetite,sensation,piper,morality,honorable,wealthy,handicap,skinny,sewage,endowment,demonstrating,avec,sonoma,esta,defender,amos,wretch,sunlight,stems,wo,ventura,convey,ang,evergreen,bearings,govern,feather,fond,sore,fiat,sixteen,blinds,traits,tightly,graded,successor,intrusion,sickness,guiana,underneath,prohibit,noel,cans,abused,avery,brushes,tenth,anthology,prosecutor,smiles,merged,auditors,grandchildren,desks,capsule,aided,suspend,eternity,introductions,weighing,currents,aide,kindly,nes,protests,sharks,notch,minors,dances,revealing,reprinted,fernando,mapped,resurrection,lieu,decree,tor,discovering,tuberculosis,lacks,horizons,daytime,elaborate,contour,gamble,fra,descent,gravel,analyse,disturbing,judged,shutter,illusion,ambitious,ole,notorious,ibid,residue,reds,enlarged,stephens,transforming,stripping,bart,assert,fluctuations,bowie,archaeological,inspect,thrice,babylon,edison,casualty,musings,poses,noir,eli,evan,mushroom,designate,scent,sequel,gymnastics,titanic,knob,wolves,exquisite,upward,sentenced,dundee,principe,acquiring,judging,unchanged,kicking,meg,fines,grasp,streak,ounce,thirteen,tragic,theodore,buena,irrelevant,professionally,liberties,sounding,milano,toast,happily,hooked,shrink,knox,unesco,mutually,beaded,remembering,boca,exodus,compartment,brittany,dove,testified,iis,cunningham,derive,affinity,presbyterian,pretend,buddhism,amnesty,borrower,gloucester,warrants,owens,fairness,needles,coll,quota,discreet,versa,imp,oi,mack,pu,sung,lowell,whichever,starr,elliot,uae,chooses,tuscany,crowded,tickling,wee,unreal,wounds,advisers,manufactures,physiological,addison,charters,generalized,unprecedented,flint,dummy,financially,awake,sanitation,swivel,ally,dissolved,cleanliness,kung,collectively,inhibition,burnt,solidarity,frustrated,muhammad,alma,ger,hanover,inverse,clifton,holt,isis,verdict,nominee,medals,dickinson,christi,lister,recurring,studs,rhetoric,modifying,incubus,impulse,surveyed,creditors,dull,tis,cabins,commenced,ballroom,employing,satellites,ignoring,stevenson,coherent,beetle,converts,majestic,bicycles,omni,clifford,critically,cy,composers,localities,owe,reciprocal,accelerate,hatred,questioning,manifest,indications,petty,permitting,som,behave,bees,zeppelin,felix,shiny,carmel,encore,smash,angelina,braun,destructive,sockets,claimant,psa,ample,countless,energies,repealed,listeners,abusive,merits,scarf,strangers,garland,voor,riviera,apprentice,obscure,napoleon,glamour,hated,sigh,trolley,principals,sidney,spicy,frankly,chronological,itinerary,fools,beard,discoveries,economical,miniatures,wedge,adjusting,mock,peggy,bats,patriots,ruins,sheila,dependencies,benton,chateau,denis,homestead,changer,sergeant,melt,syrian,ned,cypress,courtney,cites,prospectus,protectors,interiors,encouragement,disadvantages,abbott,tailor,chocolates,faux,supervised,interpreting,pascal,tha,serenity,ore,pant,sheridan,gallons,attainment,sanitary,cooperate,dreaming,fortunate,mushrooms,interpretations,geoffrey,faults,silva,grease,diablo,cairns,premise,epidemic,prima,rite,cinnamon,lac,discharged,alba,underworld,variants,palms,lawsuits,seated,lattice,realization,absorbed,sirius,chord,vous,turf,asphalt,improper,dilemma,rebuilding,livingston,commenting,shifted,tangible,smoked,hawks,irons,comet,berg,baltic,corrective,competency,muse,probing,teachings,tyne,fowler,xv,youngest,contingent,refreshing,syrup,xii,warmth,hawkins,lust,correlated,augustine,dominion,verses,astronomical,solvent,luna,amplitude,aesthetic,commercially,dion,wolfgang,completeness,irregular,barker,solids,capturing,certify,consulted,realised,jude,eighteen,singular,jennings,demons,unacceptable,redistribute,coping,baxter,outbreak,abdominal,deficiencies,curved,milestone,erase,lien,nip,bites,prose,marx,incidental,toni,arguing,vein,hale,swear,bel,clown,spontaneous,summers,taboo,equestrian,malicious,consume,amazed,fourteen,legislators,volcano,capacities,skeleton,tsp,suspects,displaced,sounded,honesty,dwarf,bis,northeastern,shocks,rewarding,battalion,candid,schooling,thornton,schoolgirl,caesar,pines,stellar,davenport,locating,monogram,philippe,aix,ornament,urges,sophie,attacking,microscope,threaten,bait,badges,kitten,brides,dent,stealing,bullets,emphasized,glossy,informations,haired,alterations,pablo,biographical,confirms,cavity,molded,vladimir,ida,probate,terrestrial,completes,beams,props,incense,formulated,dough,stool,towing,welch,rosemary,millionaire,turquoise,exposures,boone,substituted,horde,paperwork,nanny,suburb,hutchinson,cohort,succession,alliances,sums,averaged,glacier,pueblo,rigorous,relieve,clarion,override,angus,enthusiastic,lame,squeeze,sar,burgundy,struggles,farewell,soho,ashes,vanguard,natal,locus,evenings,misses,troubles,elton,purity,shaking,witnessed,cellar,friction,prone,valerie,enclosures,mer,equitable,fuse,lobster,judaism,atlantis,amid,onions,corinthians,crosses,uncomfortable,sylvia,furnace,poisoning,doubled,clues,inflammation,rabbits,icc,transported,crews,goodwill,anxious,tariffs,norris,ly,baptism,cutlery,overlooking,knot,rad,gut,staffordshire,factories,swords,advancing,timed,evolve,yuan,esa,suspicious,leased,subscribed,tate,dartmouth,brewing,coop,blossom,scare,confessions,bergen,lowered,thief,prisons,pictured,feminine,grabbed,rocking,nichols,blackwell,fulfilled,sweets,nautical,imprisonment,employs,gutenberg,bubbles,ashton,pitcher,judgments,muscular,motif,illnesses,plum,saloon,prophecy,loft,historian,elm,facsimile,hurts,folded,sofia,comprise,lump,disposed,chestnut,engraved,halt,alta,pastoral,unpaid,ghosts,doubts,locality,substantive,bulletins,worries,hug,rejects,spear,nigel,referee,transporter,jolie,broadly,ethereal,crossroads,aero,constructing,smoothly,parsons,bury,blanc,autonomy,bounded,insist,birch,slash,exercised,detecting,howell,digestive,entertain,cinderella,sesame,duct,touches,joanne,housewife,pursued,lend,corvette,yachts,stacy,christie,unrelated,lois,levi,stimulating,mont,misuse,cosmos,speculation,dixie,pans,enforced,legion,fulfillment,assertion,shook,lincolnshire,dismissal,mah,shocking,overland,prolonged,isaiah,backbone,unanimously,sausage,neighboring,uncommon,centralized,stratford,heidi,objections,unpublished,ames,slaughter,enlightenment,pistol,juniors,rockets,seymour,arithmetic,supposedly,bombay,originals,enrichment,milford,buckle,bartlett,fetch,kitchens,wat,rey,divers,townsend,blackburn,founders,sundays,upside,admiral,patron,sandwiches,sinclair,boiler,anticipate,induce,annapolis,padding,diagonal,unite,cracked,debtor,polk,mets,shear,mortal,sovereignty,franchises,rams,cleansing,gown,ponds,archery,excludes,sabbath,ruin,trump,nate,escaped,precursor,mates,stella,passages,vu,cereal,comprehension,sy,tow,resolving,drills,alexandra,champ,agreeing,rented,deductions,harrisburg,brushed,augmentation,otto,annuity,assortment,credible,ik,cultured,importing,deliberately,openly,crawl,theo,sparkling,bindings,convincing,flaws,este,tracing,deviations,incomes,fragile,jeremiah,sapiens,nyt,olsen,serbian,hai,restoring,sanchez,rushing,behold,amherst,alteration,murdered,hazel,ledger,scarlet,crushed,laughs,connie,referendum,modulation,statues,depths,spices,communion,uncertainties,colonies,followers,caldwell,squadron,bei,rupee,subsidy,demolition,irene,felony,lungs,monuments,veronica,filtered,growers,vinci,adj,haul,acknowledgement,duly,roasted,tenders,inviting,rig,ov,mick,mustard,strait,masterpiece,obey,donkey,jacks,conceived,boasts,praying,oss,multiply,intercourse,radial,mare,instructed,stole,kirby,armour,summarized,avalanche,northampton,manuscripts,cary,exhibited,disciples,shaving,bishops,kite,destroying,humorous,faa,corona,heap,griffith,erection,quasi,energetic,disturbance,saunders,ribbons,jew,exile,bilder,reside,cashier,jaw,butterflies,eats,knots,flea,offences,anton,pals,celebrates,hail,armenian,longitudinal,historians,realities,mentions,samson,jumps,fleming,optimistic,wasting,acclaimed,seldom,morrow,glitter,giovanni,lasted,awhile,scaled,contingency,wiltshire,vague,wraps,constituents,herd,handicapped,exported,lag,warns,harmless,sting,bravo,believers,dispersion,curiosity,resting,missiles,persistence,coarse,continents,carpets,recovering,submarine,blessings,prevailing,originated,axe,sculptures,intrinsic,thoughtful,nicht,archer,hertfordshire,warmer,calf,basil,grouped,dominate,orient,contra,damaging,populated,renee,boiling,journeys,parsing,splitting,derbyshire,abandon,rave,ej,dy,cigars,nicolas,inference,ras,recalled,transformer,weiss,declarations,rib,chattanooga,giles,drafts,excursions,jerk,shack,marrow,tavern,bathing,lambert,epilepsy,allowances,goggles,ses,unhappy,foregoing,certainty,sleek,gerard,antarctic,ord,successive,neglected,ariel,monty,cafes,classmates,hitch,fracture,ama,foremost,nineteenth,chesapeake,mahogany,actresses,clarence,ernst,buster,moderated,mal,nassau,flap,ignorant,allowable,compositions,sings,marcos,sorrow,carte,canned,collects,treaties,endurance,teaspoon,insulated,dupont,harriet,philosopher,rectangle,woo,queer,pains,decatur,wrapper,ahmed,buchanan,drummer,sobre,ceremonies,satisfies,appellate,comma,conformity,avant,supper,fulfilling,hooded,instability,seminary,presenter,offenses,emulation,lengthy,sonata,fortress,contiguous,perez,inaccurate,explanatory,settlers,stools,ministerial,xavier,torah,fao,publishes,stacks,owning,andersen,sermon,facilitating,complained,ferdinand,taps,thrill,lagoon,undoubtedly,withheld,insisted,reluctant,headaches,ramsey,oath,pigeon,rivals,freed,constrained,parrot,magnum,invoked,invaluable,keystone,inclined,gala,cheek,traction,utterly,gavin,illuminated,lasts,gloucestershire,psychologist,dane,claudia,perpetual,solicitor,clustering,glimpse,verbatim,innocence,quicker,grandparents,cardboard,attributable,sketches,angelo,tertiary,exhausted,smarter,shelters,attain,dora,inconvenience,tang,vaccination,farther,chats,riot,fats,mandarin,dungeon,germans,lilly,shire,mosquito,kashmir,lyons,putnam,corpse,speedy,ming,lush,barrels,transformations,analogue,werner,clyde,honorary,irwin,brewer,exchanged,adhere,fran,rafael,ccc,enquire,toilets,mains,whales,lindsey,parity,partitions,grim,hubbard,prism,chasing,flop,aggregation,shelley,batting,borrowed,rests,toss,depicted,grapes,proposing,winding,ripped,cobalt,pity,downward,catalogues,aspire,harvesting,garfield,groom,jewels,saturated,georges,quincy,doughty,weeds,stripped,clive,fixture,canary,steadily,imagined,darby,woke,fills,proportions,grips,clergy,solicitors,moderately,altar,salvage,stanton,creators,kilometres,cuff,repeating,empires,oyster,sturdy,massacre,undergo,risen,blended,imperative,beg,digging,lantern,catches,evangelical,eaton,ruler,henri,tokens,piping,swept,staring,seventy,troop,arose,decomposition,chatham,becky,elders,interpreters,supporter,klaus,conquest,repairing,assemble,whistle,dresden,diversified,fertilizer,analytic,predominantly,amethyst,woodward,rewritten,concerto,adorable,ambition,torres,apologize,restraint,eddy,condemned,berger,parole,corey,kendall,slips,trays,stewardship,esq,kisses,kerr,regulating,flock,exporting,arabian,bending,boris,ammunition,vega,pleasures,shortest,denying,shave,sexe,disruption,galway,colt,artillery,furnish,precedence,grinding,rubbish,missionary,knocked,swamp,pitching,bordeaux,manifold,wf,tornado,possessed,upstairs,turtles,vauxhall,welcoming,learns,manipulate,dividing,hickory,renovated,inmates,slices,cody,lawson,quo,damned,beethoven,faint,rebuilt,proceeded,lei,tentative,peterborough,fierce,jars,authenticity,hips,rene,gland,wigs,resignation,striped,zion,blends,garments,fraternity,tapestry,originating,stu,chap,blows,inevitably,converse,gardener,winnie,ita,higgins,warwickshire,penguins,attracting,jeeves,harp,wes,denton,anthem,tack,whitman,nowadays,woodstock,sack,inferior,abuses,inspected,deb,jockey,indicative,incumbent,ithaca,edmund,upholstery,aggression,practiced,ella,casualties,monarch,housed,administering,temptation,havana,roe,nasal,restrictive,costing,ranged,hier,spruce,paradox,billings,jeanne,oxidation,marin,halfway,amending,conflicting,georgian,compensate,recherche,loser,claus,braves,cracking,sued,shoots,interrupted,hemisphere,miranda,clover,kindness,porto,directs,jolly,snakes,swelling,spanning,politician,femme,unanimous,railways,approves,scriptures,misconduct,lester,resides,wording,obliged,perceive,rockies,siege,exercising,voluntarily,atkinson,nord,truths,grouping,wolfe,thereto,authorizing,enamel,toby,radiant,virgins,firstly,martini,butte,reeves,suspicion,disadvantage,bastard,spends,hicks,pratt,pedigree,fraudulent,sherwood,forgiveness,almond,har,petitions,francais,trenton,chalk,omar,alexis,axle,puppet,cultivation,surveying,grazing,pillar,mirage,questionable,seaside,precinct,renamed,cobb,unbelievable,soluble,piracy,rowing,siding,hardest,forrest,reminders,negro,blanca,equivalents,johann,pineapple,wrath,opal,simplest,patrons,peculiar,toon,europeans,commence,descendants,redmond,safeguard,lars,obsession,grind,albeit,billiards,clint,bankers,righteous,eo,redistribution,freaks,tra,sincere,intentionally,blitz,tended,censorship,cactus,viva,attained,blew,howe,nap,splendid,janice,lava,leonardo,sucked,scissors,cooks,sharply,granada,laurence,rebellion,rainy,tho,regent,evelyn,vinegar,vie,pluto,gil,vail,fisherman,misery,undergoing,limerick,envy,sweeping,healthier,ussr,preface,jameson,grievance,unread,sentiment,pencils,galloway,forged,viola,disclosures,provence,computerized,rustic,rumor,dillon,shah,eleanor,deception,conducts,divorced,rushed,weighs,magnolia,diver,disappointment,castles,notions,plateau,dexter,palette,blaze,wreck,threatens,strengthened,sammy,wakefield,devastating,centro,arabs,bild,robbery,eine,jasmine,crochet,brock,crowds,hoops,macon,stamped,increment,ju,ideals,chloe,ape,gee,apologies,malignant,dismiss,preceded,lawful,stag,crosby,rash,gateways,collapsed,horns,diversion,fantasies,beginnings,reversal,lex,presses,ordination,oxfordshire,yves,tandem,boil,deliberate,gagged,surprises,abe,roc,barley,potent,vo,amusing,mastering,nerves,retains,chimney,naomi,proverbs,risky,mistaken,carving,miracles,clair,slipped,realism,crete,fractions,bloodhound,sherry,desperately,indies,tulip,madame,remedial,vain,bert,dalton,bologna,departing,maze,barefoot,remuneration,bohemian,imposing,damon,tivoli,rode,amen,marching,evacuation,owing,warp,catholics,imo,faculties,denies,reinforce,inception,draper,bowman,subversion,benny,spires,barney,homosexuality,declares,masonry,medicinal,accrued,temples,realizing,annum,cemeteries,indoors,telescopes,magellan,champs,averaging,salads,addicted,flashlight,disappointing,eighty,unlocked,scarce,roche,ropes,spiders,obedience,plague,diluted,canine,gladly,brewery,lineage,mehr,brew,vaughan,kern,julius,coup,cannes,morse,dominance,piston,itu,cords,revisited,cass,sealing,topped,rag,despair,fore,absorb,injected,alps,commodore,enlisted,prophets,supernatural,overlooked,ditch,feared,prelude,rowe,slick,limestone,commentaries,manpower,lec,chunk,reels,lob,slept,gregg,drafted,chalet,hopper,sus,specialization,abstraction,ludwig,scandinavian,detained,luncheon,zenith,browns,waits,tenor,softly,plenary,scrub,wilkinson,limb,intestinal,poe,refusing,suffers,occupy,gan,bethlehem,caves,authoritative,celestial,immense,audrey,merlin,aiming,seizure,stuttgart,diplomacy,differing,foreigners,limp,capitalist,mute,prescott,protestant,metre,tricky,ordinances,koch,topaz,ans,imaginary,albion,sutherland,dar,dart,wrought,robe,theresa,heidelberg,multitude,tutors,ezra,housekeeping,captive,kettle,visitation,chr,gibbs,baggage,dusty,patty,serena,satire,tortured,pioneers,crate,episcopal,moonlight,mast,unfinished,goth,cared,affection,sworn,bowen,vicious,educating,kin,cozy,mackenzie,slippers,earthquakes,hayward,wandering,comb,liquids,beech,vineyards,amer,zur,frogs,consequential,unreasonable,osborne,stimulus,economists,miners,agnes,constituency,rocker,acknowledges,alas,sawyer,maori,tense,predicting,filipino,cooled,prudential,basel,migrant,devotion,invoke,arte,leaning,paddle,watkins,oxley,anterior,chop,rooted,onyx,benches,illumination,freedoms,foolish,finale,weaker,foley,fir,stirling,moran,compose,nausea,comfortably,hoop,temps,clearer,floods,fritz,mover,modeled,erica,malaga,sustaining,repaired,diocese,francois,obituary,painters,thistle,tem,sleepy,footnotes,rupert,shrine,purified,striving,dire,attendant,gull,jour,mir,northumberland,memoir,betsy,meredith,fauna,cliffs,hayden,roadside,smells,dispose,waking,feathers,reflex,falcons,spurs,sion,crashed,travelled,urgency,gould,brit,eliza,graduating,rims,harmonic,darts,shin,intriguing,flaw,tails,emulator,discarded,bibles,hangs,joanna,synonyms,stranded,horton,dolce,hercules,pane,browning,angular,veins,folds,sneak,incorrectly,avoidance,sauces,conquer,probabilities,immortal,mariners,endeavor,creole,mateo,teas,settling,badger,mohammed,saturdays,partisan,pri,gratitude,impress,willy,anon,eminent,ribs,communicated,exceptionally,quilts,splits,subscribing,companions,cheques,edith,screwed,magna,sectional,fashionable,polly,tidal,ballots,hog,testify,poole,boycott,vitality,clerks,crust,bothered,traverse,vengeance,dolly,garrison,sal,barb,huns,miner,fashions,barr,analogy,insomnia,constituent,aura,cecil,sponge,sect,diner,anticipation,enduring,scarborough,regis,winters,nous,explosives,mound,xiv,backgammon,ox,snatch,mole,obs,owed,ethan,kissed,buff,butcher,psalms,rum,chefs,engraving,constituted,hamlet,clad,excursion,inverness,orb,grange,resigned,fled,enriched,harrington,brandy,swings,scion,elle,reptiles,vortex,swallowing,purses,bodily,xiii,awe,beaumont,australasia,mandy,hoods,fireplaces,requisite,retrospective,emphasizes,lizard,hawthorne,bouquets,wears,shropshire,baja,regal,safeguards,cabbage,cub,spectator,arrests,circumstance,numbering,sliced,reproductions,byrd,sidewalk,prob,breaker,curly,alberto,asserted,jealous,refinement,durban,learnt,hound,squirrel,concealed,wharf,rhythms,departures,shotgun,stimulated,chickens,langley,briggs,cheyenne,lug,surveyor,maize,extinction,unaware,discretionary,ry,psalm,scented,gowns,spying,nicholson,lied,ek,bloc,recurrent,talbot,leaks,tam,swell,obstacle,ville,mantle,chico,driveway,irony,gesture,fairbanks,parfum,armies,hy,hugs,greenfield,santos,owls,cutters,acquires,ceased,merging,plaques,breadth,mammoth,convictions,intentional,sophia,prohibits,innings,reorganization,pronunciation,concession,measurable,ami,parcels,pastry,manners,phosphorus,viper,hid,volcanic,gypsy,thieves,preaching,repeal,uncovered,hemp,eileen,proficient,pelican,apocalypse,cousins,discharges,giorgio,admire,nk,poured,usefulness,unsolicited,binds,unveiled,burt,titus,suffix,installment,spindle,heavens,wink,mister,rounding,inorganic,flare,scholastic,wight,withholding,foliage,nod,ocr,fife,generals,crank,goats,autographs,stub,fundamentally,creamy,exposition,rains,buckley,middleton,organise,tort,brace,novelties,gigantic,abdul,sheldon,ryder,octave,struts,ud,suppress,harding,dams,deserved,violates,rutherford,separates,proofs,precedent,confirming,garth,nolan,mach,facilitated,paolo,metaphor,bridget,infusion,jessie,organising,argus,mango,spur,jubilee,landmarks,polite,sith,thigh,paving,cyclone,perennial,jacqueline,seventeen,meats,wie,bulldog,cleavage,analysed,uma,gradual,brethren,embodiment,violating,recruited,toilette,trailing,pact,honourable,lulu,windy,punished,chronology,mastery,thermometer,cranberry,kan,downhill,vita,steer,nesting,vogue,aired,outward,whisper,ipswich,compromised,confession,deprived,benedict,vodka,molding,zaire,bricks,communism,leopard,flowering,wig,jingle,bounty,arcadia,fishes,ringing,knobs,taurus,whiskey,absurd,tolerant,stoves,enactment,embryo,ska,nora,salts,marietta,furious,iteration,vida,ceilings,dispenser,respecting,approving,unsafe,separating,soups,residing,richie,markings,moist,trina,drained,mule,cummings,cessation,append,motive,pests,seasoned,sunflower,duel,bernardino,stocked,bethel,entre,sunderland,doris,motives,reinforcement,dwight,provost,guessing,tal,mead,harlem,throttle,gong,ber,sympathetic,fridays,isolate,unconscious,bays,faulty,affidavit,messiah,infamous,pleasing,seizures,appealed,surveyors,tenacious,waterfall,sensual,persecution,petit,burgess,gaze,chlorine,freshly,saxon,cabo,rye,isabella,monies,assassination,remarkably,pointe,stall,deere,entirety,destined,marcel,lad,hulk,ora,bal,flores,olivier,portage,dwellings,informing,yellowstone,characterize,ricardo,yourselves,rotterdam,hostage,cracker,anglican,monks,compliment,camino,storey,scotch,sermons,remembers,freddie,contention,juliet,adjunct,guernsey,bangor,persia,axes,stirring,wil,haze,pits,utter,bottled,ants,gastric,influencing,rents,christy,theirs,mattresses,donovan,lax,colts,rehearsal,strauss,reputable,wei,tuck,rei,slab,lure,ren,archbishop,ling,incompatible,emblem,roadway,overlapping,walters,dunes,murders,miserable,unsuccessful,decorate,appleton,bottoms,revocation,vomiting,chesterfield,exposing,pea,tubs,simulate,medina,thankful,alaskan,friedrich,elephants,pinch,flynn,braces,calhoun,deficient,annotations,filth,moderation,worrying,outrageous,kraft,blackboard,nitrate,skates,comstock,hers,grin,footprint,tunnels,crises,trillion,comforter,cashmere,heavier,meteorological,spit,labelled,darker,salomon,globes,dissent,daly,choral,unrestricted,happenings,leicestershire,neu,contempt,socialism,hem,edible,anarchy,arden,clicked,ineffective,drawers,byrne,acme,leakage,shady,chemist,evenly,reclamation,rove,lionel,praised,rhymes,blizzard,erect,refining,concessions,commandments,malone,confront,vests,lydia,coyote,breeder,electrode,pollen,drunken,mot,avis,valet,cheng,shrubs,watering,barrow,eliot,jung,transporting,rifles,posterior,aria,elgin,excise,poetic,mortar,blamed,rae,recommending,inmate,dirk,posture,thereon,valleys,declaring,commencing,armada,wrench,thanked,arranging,thrilled,bas,amelia,jonah,discomfort,scar,indictment,apology,collars,andover,pudding,plato,examiners,salzburg,rot,possesses,squared,needless,pies,palma,barnett,ther,heterogeneous,aspirations,fences,excavation,luckily,rutland,lighted,pneumonia,monastery,erected,expresses,migrate,carton,lorraine,councillors,hague,transforms,ammonia,roxy,outlaw,saws,bovine,dislike,systematically,ogden,interruption,demi,imminent,madam,tights,compelled,criticized,hypertext,electra,communal,landlords,emu,libby,seite,dynamite,tease,motley,aroma,pierced,translates,mais,cognition,cain,verona,syn,delegated,chatting,punish,fishermen,conforming,causal,stringent,rowan,assigning,dwell,hacked,inaugural,awkward,weaving,metropolis,psychologists,diligence,stair,dine,enforcing,struggled,lookout,arterial,injustice,mystical,ironing,commanded,woodlands,guardians,manifesto,slap,jaws,finn,pedestal,widening,underwood,saline,sonny,longevity,paw,isabel,sterile,botany,dissolution,pauline,quart,bison,suppressed,allegro,materially,cit,amor,xvi,fungi,phyllis,bengal,scrolls,awakening,fairies,prescribe,greed,nominate,sparkle,autograph,migrating,refrain,lastly,overcoming,wander,kona,relieved,luc,elena,intermittent,ante,vols,revolving,bundled,covert,crater,leah,favored,bred,fractional,fostering,thence,birthplace,bleed,reverend,transmitting,serie,neptune,caucasian,goblet,inventions,dea,practicable,fronts,ancestor,russians,incur,canonical,nodded,confronted,believer,australians,declines,peacock,utmost,yates,leroy,helpers,elapsed,academies,tout,gre,imitation,harvested,dab,hopeful,furnishing,negatively,residences,spinach,liquidation,predecessor,cheeks,hare,beasts,philanthropy,peanuts,discovers,discard,cavalry,breakers,quorum,forwards,prevalent,plat,exploits,dukes,offended,trimmed,py,worcestershire,bonn,prostitution,mosque,horseback,vested,terribly,earnest,homme,clancy,tory,rossi,oldham,gonzales,vor,confederate,presumed,annette,climax,blending,weave,postponed,philosophers,speeding,creditor,exits,pardon,oder,abby,teller,mandates,siena,veil,peck,custodian,dante,lange,quarry,seneca,oceanic,tres,helm,burbank,festive,rosen,alla,preserves,ingram,jess,secretion,insult,scraps,waived,cured,buggy,kennel,drilled,souvenirs,prescribing,slack,gin,differentiate,jays,pilgrim,vines,susceptibility,ambiguous,disputed,scouting,royale,instinct,gorge,righteousness,carrot,opaque,bullying,saul,flaming,apis,marian,liens,caterpillar,remington,chew,benefited,prevail,musik,undermine,omission,boyle,mio,diminished,jonas,locke,cages,jolla,capitals,correctness,implication,pap,banjo,shaker,natives,tive,stout,rewarded,athena,deepest,matthias,duane,sane,climbed,corrupted,relays,hanna,husbands,fading,colchester,persuade,roaming,determinations,weighed,ashamed,concierge,gorilla,gatherings,endure,nom,cheltenham,dickens,juniper,repetition,siberian,preparatory,fielding,dune,hee,adler,yosemite,cursed,youths,migrants,massey,tumble,stare,unlocking,missy,meade,contradiction,helium,wonderfully,dug,congenital,trojans,insanity,embraced,finely,authenticated,reformed,tolerate,lest,adhesion,tic,noticeable,cette,aesthetics,smoker,benign,hypotheses,afforded,aisle,dunno,blur,evidently,limbs,unforgettable,punt,tanned,altering,bunker,multiplication,paved,fabricated,pasture,richest,cruelty,mormon,scots,genuinely,neighbouring,plugged,tyson,souvenir,mifflin,cucumber,occurrences,marshal,anders,seize,decisive,spawn,blanks,dungeons,sailors,stony,fayette,shelving,annals,sadness,periodical,moe,dime,losers,punta,flavour,crypt,accomplishment,onwards,bogus,carp,prompts,witches,skinner,dusk,nouveau,customary,vertically,crashing,cautious,possessions,urging,passions,faded,counterpart,utensils,secretly,tying,lent,magician,indulgence,johan,melted,lund,fam,nel,extremes,puff,galileo,bloomfield,obsessed,flavored,groceries,motto,singled,alton,staple,pathetic,craftsman,irritation,rulers,collisions,militia,eis,conservatory,bananas,adherence,defended,grille,elisabeth,claw,pushes,alain,flagship,kittens,illegally,deter,tyre,furry,cubes,transcribed,bouncing,wand,cavalier,ish,rinse,outfits,charlton,respectfully,ulster,tides,chu,weld,venom,writ,patagonia,dispensing,puppets,tapping,immersion,explode,toulouse,escapes,berries,happier,mummy,punjab,stacked,brighter,cries,speciality,warranted,ruined,damp,sanity,ether,suction,crusade,rumble,correcting,shattered,heroic,retreats,formulate,sheds,anomalies,homogeneous,humphrey,spheres,belonged,assigns,sofas,croix,cushions,fern,defenders,odessa,lore,whipped,vox,dinners,rosie,genealogical,terre,selfish,eventual,nach,mitigate,jamestown,elisa,shelton,boiled,neville,natasha,endeavour,roswell,haute,herring,unfamiliar,expectancy,deterioration,proclaimed,arid,coincidence,idiots,mona,muddy,nuevo,hitchcock,cid,neighbour,raspberry,illusions,spikes,enumeration,suche,permissible,yielded,nuisance,siam,latent,marcia,drowning,spun,shalt,ric,loch,commanding,sparrow,poorest,hector,brotherhood,milling,sinking,sulphur,wicker,balm,figs,browne,nephew,confess,chit,chaotic,alexandre,lays,principally,visor,mundo,jarvis,drip,traced,outright,melodies,myriad,stains,sandal,rubbing,naive,wien,skeptical,remembrance,detects,dragged,foreman,allegiance,conduit,dependable,echoes,ladders,prudent,glowing,alchemy,linden,sven,geographically,alternating,tristan,audible,folio,presiding,mans,waterways,aff,fractures,apprenticeship,childbirth,dumped,barre,rama,johannes,fiery,convex,richer,mop,urn,soleil,connor,northamptonshire,biscuits,disclaims,sich,restless,unanswered,paired,vaults,ahmad,tossed,caucus,cooke,pillars,katy,zoe,overwhelmed,salute,parody,compensated,lacked,circulated,soo,maltese,acorn,bosses,pint,ascension,ply,mornings,mentioning,flagstaff,pretoria,thrive,rightly,paragon,basal,persist,wilde,indispensable,illicit,liar,pledged,pictorial,curling,ares,smoky,opus,aromatic,flirt,slang,emporium,princes,restricting,promoters,soothing,freshmen,departed,aristotle,finch,inherently,krishna,forefront,largo,amazingly,plural,dominic,skipped,hereinafter,nur,extracting,analogous,hebrews,tally,unpleasant,uno,tempted,blindness,creep,staining,shaded,cot,plaster,novo,hearted,obstruction,agility,complying,otis,overture,newcomers,noteworthy,agile,sacks,ionic,stray,runaway,slowing,watchers,supplemented,poppy,monmouth,frenzy,jargon,kangaroo,sleeper,elemental,unnamed,doncaster,particulars,jerking,bungalow,bazaar,predicate,recurrence,recruits,sharper,tablespoons,supervise,termed,frauen,stamping,coolest,reilly,basque,ire,pegasus,silhouette,dorado,daring,realms,maestro,turin,gus,forte,tipping,holster,fiddle,crunch,leipzig,bard,kellogg,reap,exemplary,caliber,apostle,playful,icelandic,multiplied,enchanted,belgrade,styled,commanders,thor,waive,bethany,vance,soprano,polishing,marquis,wen,translating,frontiers,adjoining,greet,acclaim,hardship,hast,miriam,cavaliers,rollers,carleton,pumped,differentiated,sonia,verifying,almighty,vel,intuition,revoked,openness,circulating,bryce,ilo,latch,verbs,drank,darlington,slippery,galerie,outpost,seville,mira,chatter,santo,lettuce,raging,tidy,jong,oppression,bows,yielding,torso,occult,expeditions,nok,hooker,lorenzo,beau,subordinate,lilies,articulate,ecstasy,sweetheart,fulfil,calcutta,hobbs,mediator,tad,cultivated,rang,disconnected,consulate,wilkes,disagreement,strands,sicily,compost,adjourned,familiarity,erroneous,pulses,theses,stuffing,jeux,wilton,flooded,reverted,crackers,greyhound,corsair,ironic,wards,unsupported,hinge,ultima,cockpit,venetian,sew,carrots,faire,laps,memorials,resumed,conversely,emory,stunt,excuses,vitae,hustle,stimuli,upwards,witty,transcend,loosely,anchors,hun,atheist,capped,oro,liking,preacher,complied,intangible,compassionate,substitutes,flown,frau,dubbed,silky,vows,macy,distorted,nathaniel,attracts,bern,qualifies,grizzly,micah,hurting,homicide,await,sparse,corridors,sont,mcdowell,fossils,victories,chemically,compliments,cider,crooked,gangs,segregation,nemo,overcast,inverted,lenny,achieves,forehead,skye,percy,scratches,conan,lilac,intellect,charmed,denny,harman,hears,wilhelm,nationalism,pervasive,auch,enfield,nie,clears,knowingly,pivot,undergraduates,digestion,mixtures,soaring,dragging,virtues,flushing,deprivation,delights,foreword,glide,transverse,engagements,withstand,newbury,authorizes,blooms,soar,uniformly,todos,piedmont,empowered,asi,lena,outlying,slogan,subdivisions,deducted,ezekiel,totaling,elijah,compton,vigorous,flee,biscuit,creme,submits,woes,waltz,menace,emerges,classify,paige,downstairs,statesman,cheerful,blush,leaflet,monde,weymouth,spherical,favourable,informs,dramas,cher,billiard,aut,malay,unseen,optimism,silica,kara,unusually,widest,impotence,medley,cadet,redskins,temper,asserts,stew,hereafter,retiring,smashing,accumulate,tahiti,mariner,collier,hush,whispered,generosity,vibrating,lama,artisan,akin,raphael,lola,embarrassing,aqueous,pembroke,stockholders,lillian,splinter,ibn,preferable,juices,ironically,morale,morales,solder,trench,persuasion,practise,lodged,revolt,renders,pristine,francaise,shines,catalan,auditory,applause,trait,popped,busted,basins,farmhouse,pounding,picturesque,ottoman,eater,utopia,insists,willard,lettering,marlborough,pouring,concentrating,soak,buckingham,hides,goodwin,manure,savior,dade,secrecy,wesleyan,duplicated,dreamed,fertile,hinges,plausible,creepy,narrator,augustus,fahrenheit,hillside,standpoint,nationalist,piazza,denoted,oneself,royalties,abbreviation,blanco,critiques,stroll,anomaly,thighs,boa,expressive,infect,pers,dotted,frontal,havoc,ubiquitous,arsenic,synonym,yer,doomed,francs,ballad,sling,contraction,devised,explorers,billie,ravens,underline,obscene,mes,hymn,continual,slowed,aladdin,tolerated,quay,outing,instruct,wilcox,overhaul,peruvian,indemnity,lev,imaginative,weir,remarked,portrayed,clarendon,ferris,julio,spelled,epoch,mourning,phelps,aft,plaid,fable,rescued,exploded,padres,scars,whisky,tes,uptown,susie,batter,reyes,vivian,nuggets,silently,pesos,shakes,dram,impartial,punctuation,initials,spans,pallet,pistols,mara,tanner,avenues,dun,compress,apostles,sober,tread,legitimacy,zoology,steals,unwilling,lis,paddy,plunge,pearce,vos,sinister,burr,arteries,formations,vantage,texans,diffuse,boredom,norma,crosse,mondo,helpless,wyatt,spades,slug,visionary,coffin,otter,navajo,earns,amplified,recess,dispersed,shouted,shilling,resemble,carbonate,mimi,discriminate,stared,crocodile,ratification,vases,advises,sind,coward,inequalities,garde,dyes,viz,turbulence,yell,fins,ritchie,dresser,rake,ornamental,riches,resign,injunction,intervene,poised,barking,josephine,dread,dag,handwriting,serpent,tapped,articulated,pitched,wisely,accustomed,bremen,steaks,playhouse,superficial,suns,josef,casts,bunk,stab,sanction,dyer,effected,tubular,moi,ode,avoids,richter,evidenced,heinz,argos,dit,larvae,dyke,cassidy,kernels,mobilization,amt,wilkins,manipulated,alleviate,seam,riddle,comedies,fainter,respectful,cabaret,recession,awaited,nozzle,externally,needy,wheeled,booksellers,darn,diners,greeks,reich,armored,weary,solitary,photographed,tweed,snowy,pianist,emmanuel,acapulco,surrounds,knocking,cosmopolitan,magistrate,everlasting,pigment,faction,tous,argentine,scandinavia,minnie,genie,linn,handel,microscopic,clarified,coherence,sensations,orphan,conferred,acp,disturbances,chandelier,embryonic,carver,paterson,delle,graceful,intercept,shouts,ascertain,veto,exhaustive,annoyed,bureaucracy,paz,stalls,fined,bien,inward,reflector,greeted,hartley,defenses,meaningless,clam,francesco,hes,georg,negligible,starch,melinda,godfather,apron,guts,ros,pragmatic,tyranny,warehouses,regimen,axel,antony,hahn,fluffy,marianne,slender,hereford,aides,forma,absorbing,cherries,gaelic,gomez,alec,distinguishing,glazed,judd,dashed,libyan,dickson,distressed,shouting,bullock,villagers,acknowledgments,ethiopian,mermaid,buds,sexes,wilder,sire,centred,confinement,islanders,ding,uncover,contested,coma,husky,conserve,bland,abatement,originator,whipping,skipping,routed,rudolph,abigail,missionaries,householder,plotting,yan,succeeding,elmer,sails,schuster,overlook,robes,sham,fungus,astonishing,graveyard,chunks,bourne,revert,ignores,popping,captains,loaf,pandora,gabrielle,stad,abel,enigma,glands,militant,jug,inferno,torrents,outset,confuse,yvonne,attaching,adept,doubtful,ratified,insecure,explosions,trunks,gareth,versatility,lothian,fem,intricate,strata,depository,hubert,proclamation,beauties,hybrids,gillian,darrell,irrespective,imposition,ensured,kidnapped,sai,cereals,outrage,poop,scrubs,orchestral,bellingham,dripping,afterward,devote,facets,musique,frightened,noises,ambiguity,booths,discourage,elusive,speculative,madeira,intimacy,hallway,whey,ripping,mei,hob,reloaded,garry,ester,annan,thriving,hampers,bragg,gracious,snail,curt,demise,theoretically,grooves,sutra,conveyed,swine,typographical,ellison,ado,trophies,quicken,werden,heron,graft,moth,crossings,derrick,mash,germ,envoy,breckenridge,pug,antoine,domingo,resembles,doorway,grandson,tat,catalina,redding,accompaniment,derivation,warden,voir,tug,margarita,clans,instituted,notary,thi,sociological,offending,forgetting,macedonian,votre,reservoirs,barlow,tyrone,halle,edged,encompass,spade,hermes,glare,metaphysical,insignificant,exchanging,pledges,mentality,turbulent,pip,pup,fortunes,sultan,masked,casing,plotted,haley,generously,amounted,icy,repression,reaper,honoring,facto,climatic,broaden,begging,wharton,sui,freddy,bushes,contend,restraints,truncated,gibbons,nitric,atop,glover,railroads,unicorn,normandy,floats,justices,orderly,wafer,puck,roofs,reefs,hover,quarantine,detrimental,molds,elias,hou,subsistence,chilled,foe,citadel,topography,leaflets,wrinkle,contemplated,adolescence,nun,harmon,indulge,bernhard,hearth,edna,embarrassed,aggressively,coincide,maynard,genoa,enlightened,clippings,radicals,penetrate,stride,catastrophe,greatness,archie,parasites,entertained,inventors,ferret,louisa,agony,marseille,taller,doubling,stupidity,moor,stephenson,enrich,foreground,revelations,replying,incapable,parte,acknowledgment,labyrinth,africans,sway,undergone,lacey,preach,triangular,disabling,cones,inversion,thankfully,taxed,presumption,excitation,salesman,hatfield,constantine,confederation,petals,imprisoned,heller,docks,landowners,sul,juno,deux,defiance,bully,valiant,constructions,youngsters,toad,breasted,banging,vertigo,unsatisfactory,fluent,rhyme,eros,aan,mcintosh,suffice,convened,nah,accusations,debated,stallion,equipments,necessities,camelot,deserted,keepers,logically,caravans,oranges,bum,presse,olga,contends,snort,occupants,organiser,vim,luminous,crowe,unparalleled,anyhow,waterfalls,obtains,antwerp,ulrich,hardened,primal,straits,upheld,wir,malt,sinai,endowed,cameo,attire,blaine,typewriter,pomona,goddard,fanny,plagiarism,milky,combs,upland,unconstitutional,adopts,macao,snaps,defends,depicts,pilgrimage,elevators,ohne,narrowed,eighteenth,hurst,inscription,ascent,pisa,tedious,pods,universally,chewing,accommodated,tendencies,rowland,welded,conforms,reggie,refreshments,depict,coils,callers,navel,arbitrator,prolific,nurseries,footsteps,indefinitely,sucker,bumps,frightening,wildly,sable,retarded,neatly,singleton,spaniel,somerville,worthless,git,spool,jeopardy,rovers,voiced,annoy,clap,aspiring,dazzling,cornelius,scientifically,grandpa,cornish,guessed,kennels,sera,axiom,stamina,hardness,abound,curing,socrates,aztec,confer,vents,mater,oneida,aiken,crowned,sandstone,adapting,cranes,rooster,proctor,prehistoric,balkans,dictate,joker,wiped,contours,abdomen,baden,tudor,paws,villains,poke,prayed,inefficient,heirs,parasite,shortcomings,cures,concentrates,preclude,fasting,loudly,horseshoe,zeus,constellation,recital,utrecht,freud,bedtime,thinkers,hume,reminiscent,rapport,ephesians,dope,truss,kiln,peaches,depressing,strangely,narratives,sud,skipper,gy,drains,maxima,unification,sous,testimonial,khaki,distributes,navigating,slough,prodigy,embossed,mould,jock,blasts,poorer,anglia,dyed,dissatisfied,bourbon,staggering,bismarck,hoe,rubbed,wasp,bookseller,fuss,muir,uterus,chimes,webber,aggregated,pico,exhibiting,gimme,nee,beaufort,radically,terminating,platter,chamberlain,steamboat,brewster,inferred,croft,ism,uplifting,penal,exclusions,pageant,henley,purchasers,pitchers,tracts,morally,hosiery,yt,reptile,overdue,cowan,mohawk,riots,hassan,schwarz,persuaded,teasing,rejecting,emphasizing,unbound,quentin,shepard,sacrifices,delinquent,contrasting,nestle,correspondents,guthrie,imperfect,disguise,eleventh,embassies,lapse,wally,phenomenal,civilizations,friendships,marjorie,shrub,kindred,reconsider,sanctioned,parfums,condemn,renegade,awaits,hue,augmented,amends,fullest,shafts,finer,ys,burdens,invocation,gillespie,brooch,motifs,nineteen,griffiths,invaders,edmond,volunteered,swollen,liste,grasses,scatter,steward,ito,cherished,smack,incidentally,sine,depleted,holiness,divinity,campaigning,tougher,sherlock,comprehend,cloak,pamphlet,clipper,umbrellas,priceless,mig,assassin,exploiting,cynical,toro,etched,bray,choke,underwent,comforts,appoints,keene,rachael,swallowed,imperialism,mouths,halter,ley,ike,pumpkins,shrinking,roar,novelist,potomac,arroyo,tipped,amidst,insurgents,wanda,etching,discouraged,gall,oblivion,gravy,inherit,sprinkle,stitching,advisable,loi,meme,gladstone,jugs,congregations,handing,payer,ze,beforehand,laborer,watcher,vibrations,apes,strawberries,abbas,moods,dobson,ives,soaked,abridged,palate,thierry,masculine,realizes,kahn,petitioners,constable,sayings,unconditional,vue,progressively,topping,baird,chilling,translucent,glaze,newcomer,branching,unmarried,unexpectedly,funniest,bona,scorpion,mirrored,sel,anatomical,misdemeanor,tobias,salle,infra,strasbourg,commemorative,implicitly,ewing,austen,assurances,comedian,rascal,nid,roberta,dizzy,outbreaks,annuities,slit,whitening,occupying,depicting,ordnance,verge,ransom,nomad,dagger,thorn,preamble,mor,spins,solicit,provoking,orchids,buckets,spoil,blazing,palermo,snapped,alligator,detectives,rochelle,nomenclature,abdullah,invade,regulates,rendezvous,strives,trapping,gardeners,clemens,deuteronomy,diminish,britannia,manifestations,tak,stitches,promulgated,mediocre,passports,ayrshire,invent,eagerly,damascus,reformation,hypocrisy,parishes,trooper,bun,compendium,disappears,hymns,monotone,palsy,propositions,locomotive,debating,cuffs,prosperous,famine,orally,elliptical,grabbing,jogging,stipulated,persuasive,horrors,bearer,pastors,acquainted,dependents,dizziness,ture,brilliance,nicky,originate,respectable,horace,prohibiting,disappearance,morals,invaded,spoiled,monet,pickle,quaker,haunting,manipulating,tangent,tempest,petra,dominique,waving,dai,uneven,plata,plurality,warrington,adventurous,luigi,bayou,accueil,confluence,blossoms,succeeds,orphans,louder,boilers,reunions,yelling,trough,leaned,quadrant,discrepancy,slid,antioch,tonic,magnus,harrow,jig,reckless,raining,peasant,vader,qua,figuring,crushing,thorpe,ordained,hodges,saucer,chinook,passover,byzantine,tomas,triangles,curvature,rites,sideways,devious,dreamer,acknowledging,estuary,burglary,pouches,thrilling,spectacle,sentiments,ditto,nana,waiter,oddly,suchen,raft,cul,nutshell,arrogant,hermann,induces,thrift,sae,admired,stunts,iaea,youthful,stumbled,emitted,sufficiency,tempered,slipping,solitude,cylindrical,destroyer,fide,undesirable,mongolian,weakly,parsley,undue,stunned,smiths,magyar,hostility,groves,pursuits,reflux,adaptations,jurisprudence,invariably,lecturers,progressed,brow,elves,kearney,graeme,kimball,chant,turnkey,sprays,tighten,revolver,crowns,intermediary,matted,apricot,tufts,cuckold,unreliable,rosewood,parry,existent,tongues,dictator,jehovah,fanatics,coeur,perpendicular,fay,hedgehog,raves,mamma,entails,folly,wheeling,sharpe,hawthorn,mural,bankrupt,wager,purge,interpolation,adjournment,pitfalls,stationed,ambrose,nightmares,aggravated,deem,melville,cavern,ene,sumner,descended,disgusting,flax,weakened,imposes,withdrew,tart,guerrilla,spoons,persona,poser,tram,distinctions,peabody,alia,iced,faulkner,scarcely,excused,fused,madeleine,roaring,witchcraft,stopper,fibres,cullen,crested,stump,scalp,gunn,erwin,conductors,criticisms,hadley,diplomat,sylvester,melon,tablespoon,manganese,siren,clasp,olives,nino,summons,lucrative,porous,shrewsbury,bile,siegel,cara,ese,ils,hinduism,elevations,thirst,endeavors,sportsman,scratching,iodine,phoebe,wipes,fro,krone,urgently,exposes,natures,liberalism,meer,derry,suisse,frankenstein,parc,heir,phy,successors,eccentric,yarmouth,transports,amour,illustrative,prosecuted,sailed,craving,advocating,titel,leaking,escaping,possessing,suicidal,cruisers,masonic,forage,loco,hellenic,kwh,ethel,distinctly,assertions,baba,pebble,staffs,ets,hoo,denomination,patched,patriotism,battling,tickle,bandit,acquaintance,lambs,loom,blouse,heightened,chests,ambitions,feline,grub,ulcer,slew,menstrual,canals,negatives,threading,duet,intolerance,ammonium,zephyr,tearing,muffins,naar,autor,fannie,foothills,atrium,thine,superiority,gestures,nemesis,engel,confessional,cardigan,taunton,evaporation,devise,abolished,sorrento,blanchard,uns,toying,parma,wreath,plight,opium,irrational,arches,naturalist,encompassing,penetrating,destroys,prussia,lowers,cookery,nal,beatrice,policeman,cartilage,turnpike,migratory,jurors,mea,enumerated,sheltered,doctrines,seams,pleaded,pca,elasticity,cel,gutter,ulcers,sloppy,flannel,volcanoes,ridden,contradictory,misunderstood,steamer,cong,barometer,exclaimed,diem,barge,spartan,nea,crystalline,rumours,famed,brandt,riga,bengali,respite,grimm,shetland,provocative,guido,tasted,licked,banged,rufus,hopeless,henrik,safest,daphne,ame,pollock,meteor,granville,veneer,anonymously,manageable,slant,disciplined,pollard,comme,chops,broom,plainly,ibrahim,snare,shank,uphold,revising,insignia,nurture,leash,hunts,faber,plantations,factions,falmouth,humility,commentators,impeachment,acton,engages,carbide,pullman,characterised,kinder,deems,outsiders,dodd,dissolve,adrienne,deduct,crawling,modifier,muck,colombo,hoax,cohesion,reconnaissance,antagonists,bachelors,observes,corporal,ligne,wary,locust,condenser,articulation,villain,tre,oft,secures,leviticus,impending,rejoice,pickering,poisson,bursts,versailles,hurdles,lucie,geese,condemnation,candies,sidewalks,formidable,pun,autres,mecca,rested,paused,macbeth,abandonment,nada,bertrand,broth,wentworth,seduction,fertilizers,maison,contrasts,giuseppe,tae,improperly,nebula,crows,blooming,mace,seminole,taper,synagogue,sugars,burnham,allure,intestine,ambassadors,reclaim,isla,kingdoms,richness,converge,pianos,dol,workings,penelope,extinct,ponder,revue,lunches,fooled,smear,rigging,derives,praises,detachment,luca,caracas,lids,pore,ey,radiance,oily,quitting,ina,grover,screams,masking,patchwork,heinrich,breton,assures,joys,involuntary,allegation,infinitely,dorchester,serge,morphine,gymnasium,waldo,diese,chiefly,judah,conjecture,mich,restitution,indicted,blasting,confronting,mastered,powders,debtors,grit,slain,nearer,ancestral,mujeres,faithfully,revolutions,sei,quail,tanker,administrations,sho,rector,ballast,immature,recognises,taxing,icing,substituting,executes,originality,pinned,gables,discontinue,bantam,bianca,zimmer,earthly,conceive,forfeiture,disastrous,gladiator,poplar,ence,recourse,martian,equinox,hinder,fredericksburg,presume,weil,armchair,cecilia,strut,kari,pavel,appropriateness,tame,solstice,oats,italien,wolff,plume,sparta,calypso,pantry,etienne,italics,reversing,murderer,courteous,wilt,smoothing,billet,pretending,hammock,receptions,revoke,intruder,wagons,jennie,platte,plank,paddling,ting,interrogation,neue,longing,irresistible,pilgrims,disappearing,sau,enact,inertia,misunderstanding,deity,pruning,agra,mandolin,rolf,swiftly,claws,brightly,manly,emit,shortened,fearful,potency,ifc,flawless,peril,alessandro,breaches,resultant,nestled,hairs,dumfries,drastic,guarded,celery,reconcile,grammatical,collin,ven,admiration,zanzibar,offend,severance,somali,combating,numb,retina,maids,tempting,bureaus,voyages,galatians,flo,planters,rocco,sheath,louie,chaplain,benefiting,dubious,occupies,mammal,shielded,degeneration,listens,swirl,emery,twists,scot,intrigue,blanche,dialect,nominating,fanatic,upton,pave,coverings,danced,slightest,libre,bromley,revive,corolla,predominant,abode,savoy,vogel,insecurity,trustworthy,uniformity,conquered,alarming,dur,amused,horizontally,knitted,exploding,narrowly,campo,rampant,suitcase,embarrassment,spectators,coronado,retaliation,inquirer,dreadful,metaphysics,drifting,ritter,attends,nicer,mellow,boast,gents,respiration,absentee,duplicates,dubois,corollary,tighter,predetermined,asparagus,airy,progresses,canister,stiffness,thrifty,canning,workmanship,complexities,shan,wrinkles,illustrating,perch,craven,divergence,homage,atrocities,londonderry,hops,emmy,chez,admittedly,ruiz,angst,liturgy,nativity,surety,tranquil,disseminated,staircase,cutler,cradles,electorate,airs,reconstructed,resent,opposes,silvia,distraction,dominates,kimberley,despatch,fugitive,tucked,jericho,turmoil,gilles,dietrich,haines,unjust,markedly,fascinated,disturb,terminates,exempted,bounced,rankin,brightest,saddles,scotsman,fitzpatrick,gushing,distracted,secluded,criticize,bog,livelihood,godfrey,minerva,superseded,iceberg,caleb,christening,jealousy,plumber,hagen,squeezed,judas,valle,dole,wick,gertrude,communists,owes,scents,bertha,levied,sag,barns,covenants,peat,proprietor,lizzie,raids,solos,compartments,maj,foi,importation,mss,planter,ici,metz,immaculate,pur,reindeer,telegram,ruben,shaken,wares,rivalry,verve,charley,carpenters,spree,sunk,morley,bespoke,inflicted,abbreviated,drowned,escorted,brute,barracks,kidneys,warbler,onward,kidnapping,inducing,lancet,antelope,terminus,castings,flanders,pellets,enclosing,starred,deacon,kabul,sweeps,butch,mercure,bookcase,assembling,diaphragm,questo,chores,consignment,yarns,liv,seedlings,fortified,reconsideration,barnard,profoundly,bartender,mayfair,jag,maneuver,ridder,vanished,lair,enclose,sinners,lille,calves,defer,desmond,liars,els,sod,lacy,pharaoh,advocated,itching,alles,devotional,taft,comparatively,spartans,tourney,reasoned,lawton,degli,saith,astral,ach,parallels,yelled,wren,terence,hamper,balkan,blurred,smuggling,instincts,hutton,masquerade,deans,duality,sensational,kites,smoother,expulsion,withhold,romano,grievances,betrayed,dumps,buckles,joyful,generalization,hin,pancakes,crave,cordova,focussed,ripple,claimants,consolidating,goldsmith,inclination,measles,arcs,portman,baptized,expelled,rupees,betrayal,flourish,heed,mein,graf,hawking,divides,composing,handicrafts,healed,burmese,boon,valor,pedestrians,gathers,pawn,stitched,camille,ceases,dorsal,collie,hereditary,exaggerated,buccaneers,spleen,allotment,jeu,multiplying,empress,orbits,whence,bois,trusting,sabre,stigma,abduction,attaches,tartan,twisting,tore,eth,mimic,shielding,stormy,vulgar,pathological,hodge,trimming,emanuel,serene,obligatory,corrugated,queenstown,forbid,unhealthy,felicity,ticks,fascination,sono,experimenting,splendor,vigil,robbed,rebirth,winona,progressing,fragrant,defeating,hotter,instantaneous,operatives,carmichael,bulky,exponent,desperation,parlor,setter,monumental,olaf,fer,stirred,toughest,fil,facade,frankfort,monograph,booze,widen,adjective,disciple,cipher,arrears,rhythmic,unaffected,starving,vide,lennox,sil,hearty,triton,deus,devine,adore,entertainer,colds,dependant,thicker,weeping,chandeliers,moneys,infancy,dips,honoured,yachting,cleanse,chilly,digs,bolivar,womb,irritating,monarchy,corset,hinged,attendants,cummins,robins,booming,artikel,scandals,screamed,cramps,enid,herrera,digger,espionage,pups,avenged,norte,glade,pendulum,bounces,nehemiah,thinner,noch,licks,soto,caste,jus,daft,sampson,psyche,rudolf,angling,stubborn,diplomats,physicist,tagalog,coo,requiem,bleu,redeemed,sighed,lures,bavaria,devastation,heroine,bingham,achilles,flaps,indifferent,cadence,frosted,schubert,rhine,manifested,denominations,interrupts,rattle,insults,oatmeal,marta,distilled,stricken,unrest,cascades,druid,dunbar,outsider,ris,abstinence,nag,poodle,wunder,stefano,sitter,colder,laborers,whispers,swarm,elise,ledge,winthrop,historia,peasants,nectar,anecdotes,gilt,masterpieces,symbolism,monsoon,drown,strife,esprit,attaining,consular,treason,reckon,gaston,prosper,napier,supremacy,capillary,germain,islington,anchored,yong,vers,mulberry,sinful,cheeses,bradshaw,mythical,abyss,whitehall,malachi,ble,clipping,niece,irresponsible,pleas,softer,paralysis,devastated,tarzan,shutters,flask,arisen,femmes,relentless,ribbed,omnibus,stables,inhabited,hereof,untold,observable,gretchen,lanterns,tulips,vigorously,interfering,idols,designating,nugget,reminding,gusts,xviii,magistrates,procession,spiritually,attentive,rupture,trad,assimilation,lyrical,concorde,angelica,braided,wooded,intensely,propelled,artisans,bastards,bassett,aspiration,appended,slammed,aviator,implicated,seriousness,conformation,intimidation,paladin,ihr,nests,civilized,marched,cassandra,cath,sighted,hopping,destin,rosary,platoon,andres,loneliness,pulley,alleging,synonymous,confectionery,regrets,consciously,cours,footprints,priscilla,stimulates,darkest,implying,conducive,uncontrolled,ballads,mathew,hugely,sevilla,hostages,rosario,fruitful,franks,indemnify,satisfactorily,thinker,contestants,sia,influx,convoy,sled,pyramids,depended,conveyance,tortoise,milo,cultivate,crocker,dialogues,abolition,coax,padre,lees,mari,quattro,foresight,peppermint,tod,castillo,remnants,nailed,alum,frantic,zachary,comrades,cocoon,doth,gladys,bowers,strengthens,qual,dictatorship,breezy,plow,mundane,douglass,barclay,foes,cloths,clowns,lombard,barren,histoire,plead,behaved,embargo,condensation,yokohama,vow,claudio,blot,primera,commentator,patterned,sheen,specter,imam,assent,hove,shading,scrubbed,warts,roundabout,harmed,paternity,conceal,starvation,appointing,seine,flowed,sewn,zulu,rin,barnet,rift,saviour,lapel,turk,cupboard,archipelago,peep,deceptive,undertakings,tinted,congratulate,constance,vanishing,legislator,notifying,aches,kitchener,leaked,genera,idioms,gardiner,gli,poisonous,chime,spence,mischief,argent,delinquency,cou,sentimental,unsuitable,mildly,forging,pew,waitress,caribou,merced,expansive,footing,manu,sligo,remit,bonnet,stumble,undertook,promenade,exhaustion,unborn,wendell,hammers,coasts,emitting,concur,exert,madeline,sanskrit,torre,worldly,wedges,corded,heirloom,pleasantly,portray,pero,esoteric,luxe,messengers,landings,graphically,shameless,communicates,bourgeois,yeh,napkins,unloading,bakers,selma,pears,heats,lucid,lobe,canaan,oppressed,infer,prosecute,thatcher,bret,hauling,inconsistencies,indebtedness,scramble,adversary,elsa,quaint,oswald,dipping,revere,troopers,domaine,olde,guerra,solemn,eruption,celeste,gentry,enchanting,preached,mica,cadets,lads,endured,ensuite,fermentation,careless,chemists,inca,fad,julien,dandy,narcotic,moulin,paine,incompetent,ain,predecessors,lancer,sorcerer,fishers,invoking,muffin,motherhood,wexford,ihre,dressings,partridge,synod,noticing,inte,newmarket,amigo,discerning,caddy,burrows,furnaces,zee,occupant,livingstone,juggling,wildfire,seductive,scala,pamphlets,rambling,kidd,bedside,lausanne,legality,arbitrarily,heb,luz,regulars,robson,mysticism,accompanies,summed,chopin,torches,dominating,joiner,viejo,explorations,guaranty,procure,stillwater,sunsets,cropping,anastasia,arrogance,diverted,forgiven,bleak,christophe,wenn,drudge,dolores,tramp,saliva,chichester,artemis,lessen,weller,syringe,diversions,admiralty,powdered,granger,prevailed,glacial,alleges,shredded,antiquity,zeal,valparaiso,blaming,embark,manned,porte,johanna,granular,sant,orkney,bah,vero,oscillations,sphinx,spiegel,mujer,ceremonial,sonnet,constituencies,sprung,hedges,inflated,crooks,prospecting,quilted,walled,immensely,trafalgar,relapse,descend,jakob,bolster,nietzsche,fol,rocked,rancid,disparity,malice,vom,knapp,swimmers,syllable,painfully,sweating,demolished,catholicism,trident,lemonade,absences,andes,ciudad,josie,persists,propeller,dents,anarchist,submerged,entrusted,essen,calming,intending,cromwell,drummond,dissertations,highlander,solicitations,lar,punto,survives,darcy,funnel,moons,gent,thirsty,freshness,lathe,shabby,punched,petri,virgil,gaa,marbles,cottonwood,mildred,deletions,cleopatra,undecided,startling,inductive,inadvertently,bursting,wird,halves,moulding,melancholy,observance,leaps,halen,galvanized,hoy,teapot,conveys,lends,squire,ache,counterfeit,waller,duval,yoke,resonant,mak,outskirts,expedite,grayson,sweetness,crook,rearing,davison,tins,deliberations,indifference,xix,invading,dives,loot,coyotes,stale,cosmo,levers,cog,incarnation,strained,putty,reacted,admissible,sunless,puzzled,unexplained,patsy,thermometers,fourteenth,compounded,chippewa,eldest,terrifying,climbs,uprising,gasp,swans,tories,hap,remnant,immoral,sacrificed,unequal,weaken,braxton,categorical,cupid,stalking,sturgeon,jap,piers,ensuing,mitigating,tint,dykes,revived,joachim,eet,earle,hosea,sua,haste,flakes,alfalfa,corfu,argyll,emil,joking,rhetorical,simmer,vert,smallpox,overwhelmingly,waterway,migrated,reacts,bain,norbert,complication,aubrey,adaptable,sainte,bitte,fleur,muy,berth,uninterrupted,lint,chalmers,crabs,tuscan,lingo,einer,budding,roam,resemblance,hackney,toto,hebron,saber,cataract,midday,fait,innate,medallion,prominently,kant,nazareth,nadia,glanced,calais,rapture,sunbeam,abruptly,beetles,caspian,impair,stun,shepherds,susanna,philosophies,lager,projecting,goblin,bluffs,parrots,anthems,terrified,nocturnal,nueva,emulate,accuse,hunted,diminishing,lew,ridley,produits,zipped,intrepid,babel,clustered,primate,eyebrows,compromising,willingly,harlequin,revisit,insulting,prominence,cuckoo,parrish,inspires,acacia,fang,netting,contemplating,erasmus,sop,recalling,practising,hermitage,starlight,foyer,palaces,brood,azure,compel,contradictions,festivities,trenches,sabine,doorstep,sniff,dangling,negligent,gliding,woe,meditations,tranquility,halted,liza,drawback,smyrna,hostess,weep,posse,mosquitoes,commun,weldon,frying,hesitation,imprinted,bereavement,surrendered,iam,bestand,westward,converged,leopold,recognizable,ludlow,sprague,saba,embraces,gustav,waxing,gael,sinner,auspices,coles,ergo,dissenting,melee,radcliffe,countess,pleading,crafty,llama,montague,troubling,vowel,reuben,cob,fearing,coronation,isabelle,reluctance,inconsistency,apostolic,summoned,treble,galley,shovel,kam,entail,mashed,aire,pacing,moan,opec,jimmie,henson,unfolding,tottenham,deserts,milking,wilbur,suitably,enormously,aber,cicero,scribe,nellie,sleigh,formulae,fen,sank,frontage,blister,ration,humid,portrayal,guile,lacquer,unfold,hammered,tutti,mined,caucasus,intervening,bale,astronomers,thrills,therefor,sores,fel,pastures,unattended,playwright,carthage,zechariah,selves,naturalization,whispering,dissipation,sprite,keel,leighton,atheism,gripping,cellars,tainted,remission,praxis,affirmation,perturbation,wandered,reeds,angler,astounding,cosy,resend,augment,flares,shedding,glastonbury,funerals,eucalyptus,conservatism,questa,bumped,fortuna,cripple,lofty,proclaim,cropped,merton,ere,richly,ravi,dogma,priori,vaguely,yam,ple,siberia,melons,farley,seer,evils,spontaneously,unavoidable,ruthless,almonds,ecclesiastes,aptitude,vial,chao,sharpening,seniority,prompting,objected,equator,guilds,blatant,favoured,ridges,oysters,gust,cate,receptacle,mendoza,haus,puberty,shorten,shawl,samaritan,bends,grimes,unison,tabular,amir,dormant,nell,restrained,tropics,concerted,avenir,refrigerated,crouch,pence,formulating,lamentations,napkin,emile,contagious,inaccessible,administers,crockett,conspicuous,barbarian,soaking,reforming,gar,intrusive,thyme,parasitic,abusing,receptive,capt,uwe,xvii,vulcan,musk,lucille,executions,refreshed,guarding,atwood,windmill,lice,garter,footed,dedicate,libros,renewing,burroughs,ioc,skim,touche,welt,veal,perpetrators,embarked,quickest,euclid,tremendously,anglais,smashed,oscillation,thunderstorm,retrospect,jog,hailed,bahia,miraculous,hounds,tightening,draining,paroles,sensibility,rags,punching,distinguishes,poi,dazzle,dangle,eaters,exceedingly,inauguration,inquired,repentance,unprotected,merle,savory,evacuated,reclaimed,prefecture,accented,crawley,baum,racket,hannibal,sickle,violently,attest,untouched,comforting,creeping,kerosene,appraised,restorative,chet,peacefully,stature,sentry,pel,assaults,berwick,vices,amo,tolls,degrading,forster,fireman,maniac,antics,deze,formative,recognising,wordsworth,wrongly,cree,physicists,falsely,abbot,officio,consul,plagued,lahore,aiding,kunst,suckers,swallows,patronage,canoes,matilda,fodder,impetus,peeled,whining,arson,hirsch,tapestries,transatlantic,jak,freeing,kilkenny,redress,settles,seaman,skulls,cayenne,treatise,defeats,testimonies,kali,weitere,itch,withdrawing,solicited,jai,gard,brilliantly,deja,mccann,spalding,dill,reopen,potts,erased,resisting,congregational,antiquities,dunham,monsieur,inhaled,fuses,britt,blinded,madras,sacrificing,faiths,tinker,sonora,echoed,elisha,gazing,skepticism,zane,eighties,groupe,freehold,braid,ance,forester,resisted,alp,munro,agar,arundel,shiraz,disgrace,mediate,rein,realisation,irritable,cunning,fists,pennies,jos,hemorrhage,awning,ointment,spilled,tripping,occidental,vigor,chariot,buoy,geraldine,matrimonial,squads,niet,tenn,disclosing,masthead,ursula,disbursements,boucher,chadwick,candidacy,hypnotic,adultery,fis,seventeenth,temperament,prostitutes,healer,hive,circulate,glued,sycamore,belinda,westmoreland,shuts,tenderness,ocular,smelling,dung,keine,scratched,conclusive,alder,polluted,undersigned,lark,oda,carlyle,restores,lullaby,sanderson,hoes,lawns,midas,choking,castor,plentiful,bonner,stately,raced,deuce,oma,squirrels,paddington,drawbacks,evoked,dictates,studded,individuality,spared,anticipating,californian,brownie,undressing,quits,ensign,restraining,blockade,girard,nearing,ruff,burglar,warped,tributes,freezes,knoll,thinning,reddy,primrose,parting,humber,michelangelo,corduroy,torpedo,muffler,troublesome,eucharist,wadsworth,magnetism,hodgson,inventive,speculate,craze,dispatches,craftsmen,desiring,felipe,hoffmann,texan,nombre,grated,submarines,provoke,romana,accommodating,grenoble,calvary,banded,deportation,harald,cuttings,invests,sculptor,kildare,commended,roper,narrowing,sergey,mechanically,profanity,playmate,scum,seasoning,adolf,adjourn,widows,conveying,precincts,volta,mediums,discern,bran,fumes,futile,disqualified,fenced,eel,animate,faro,resembling,buren,totem,experimentally,drinkers,hermione,indus,harms,asserting,affluent,ell,protesting,dix,lonesome,liberated,unconventional,amore,reckoning,fabian,concurrence,closets,carve,metaphors,muster,labourer,heartfelt,pertain,democracies,gideon,mallory,gauntlet,martyrs,cots,victorious,sylvan,beverley,unnatural,swish,confessed,nae,drumming,patching,fret,abiding,luscious,sighting,relic,slipper,augsburg,bil,argyle,cling,prophetic,commune,agatha,tut,haut,gesellschaft,circumcision,neutrality,aqui,snoring,trembling,reproducing,comets,unitarian,governs,gums,delaying,mainz,reconstruct,toned,erred,modelled,expiring,mabel,whistles,jewellers,kann,caron,understandings,dared,herndon,nudge,seeming,rosebud,alf,andromeda,sixteenth,origination,uso,doves,landowner,preachers,leiden,ramona,glib,brutality,fictitious,francesca,rumour,immortality,saffron,ragged,peerless,constitutions,improbable,reiterated,jesuit,excessively,mounds,extraordinarily,parted,munster,sufferers,skunk,interruptions,placer,lingering,brooches,heaps,hydra,anvil,blinking,sweetest,noe,dishonest,stalk,kun,inert,favorably,vocation,tribunals,cedric,favours,witnessing,eject,seventies,rayon,dryden,foreigner,policemen,unfavorable,anomalous,katharine,barter,rowley,modifies,frugal,starry,thanking,nouns,consequent,entrances,danube,evasion,filenames,mayors,gospels,wicket,cora,lazarus,vile,misguided,reunited,conversational,inspirations,blasted,shingles,gresham,cumbersome,immersed,philemon,roasting,accrue,loire,vented,pont,consolation,cer,frazer,outlay,dreaded,airing,alternately,gracefully,intrigued,antagonist,exalted,cadre,serb,jaeger,overthrow,patiently,cabot,controversies,narrated,squat,illuminating,artificially,saucepan,freshest,noi,martyr,hacienda,koran,quito,tiara,elegantly,temptations,skinned,irrigated,hives,groundwork,cyril,kew,resentment,glaciers,peri,manfred,gaping,infringe,porta,inferences,abrupt,gambler,dissection,nightingale,landau,contemplate,amigos,putt,colonization,coon,crock,ailments,disagreed,boldly,narration,unopened,insisting,yeas,brushing,resolves,sacrament,cram,shortening,cloves,marketable,presto,hiram,broadening,hens,bowed,whimsical,harden,molten,repaid,warmly,hogs,sporadic,eyebrow,strickland,unnecessarily,iom,tess,trois,painless,serbs,verdi,annexation,dissatisfaction,alpes,applaud,haben,primo,abolish,climates,uneasy,busiest,fray,florian,clogs,flank,cartel,numerically,perforated,intensified,sexton,postmaster,washes,shrugged,electors,departs,mindful,lurking,hitherto,egyptians,looms,spectre,downright,refractory,counsellor,inexperienced,outraged,belgique,smother,frosty,mules,sash,truro,moaning,ponies,originates,blight,physique,independents,contentious,cheering,archibald,emancipation,duchess,commemorate,spout,perish,hoist,narrower,captivity,peyton,overloaded,shorthand,ceres,bravery,lizards,einen,fergus,sincerity,calder,oar,mullins,flagged,relics,relish,imagining,belongings,lire,legislatures,unchecked,knocks,alfonso,contradict,fleurs,scarcity,ashby,fleeing,filament,abingdon,theorists,hof,southwark,celia,disguised,implanted,thrash,antiquarian,dina,fluency,uniting,behaves,slabs,conceivable,agate,incline,hartmann,bai,soliciting,thoroughbred,calle,oneness,climber,commonplace,intellectually,casanova,himalayan,downfall,bookcases,strides,vanish,ute,transmits,adair,impatient,aforesaid,elbows,truce,bette,stairway,woodrow,sou,boar,vertebrate,laird,multiplicity,objectively,resigns,anguish,petal,perfected,tomlinson,odors,mite,blackstone,clipped,lago,jed,dries,mejor,sikh,annoyance,grating,prostitute,mina,elixir,guardianship,gamblers,autre,peeps,rol,reverence,sardinia,outweigh,verne,gaylord,bunting,avenger,spar,waugh,captivating,tiers,centurion,propagate,prosecuting,montpellier,willem,slavic,nutritious,marguerite,vapour,pluck,cautiously,prick,contingencies,coercion,picard,rubble,scrambled,agitation,chas,truthful,woodpecker,herds,corsica,penetrated,sein,adder,weakest,weakening,nome,thorne,anticipates,poignant,germs,frees,punishable,fractured,waterman,brat,uranus,salient,gabe,censor,semitic,wits,perverted,bordering,widowed,tombstone,begged,flushed,cautions,lavish,roscoe,brighten,vixen,whips,marches,xxi,anew,commandment,undetermined,horner,yah,conceded,circumference,postpone,disproportionate,pheasant,alonso,bally,zijn,guillaume,marrying,carvings,complains,resided,terriers,weasel,venerable,preis,toasted,admirable,illuminate,holbrook,fades,bulge,eller,lucinda,brittle,bandits,politely,desde,watermelon,ingenious,carols,pensioners,obadiah,mannheim,hepburn,fetched,alderman,lockwood,coughing,hiatus,upholstered,evangelist,louvre,spurious,gloom,severn,angelic,astrological,nobility,bayern,afternoons,ramifications,wakes,ashore,workman,swimmer,sitio,unload,loon,marge,wanderers,sips,badness,undertakes,miscarriage,vulgate,stoned,provoked,herr,fables,crumbs,wort,palisades,confidently,commences,dispense,dangerously,figaro,sadie,protested,capitalists,accusing,stink,convent,valdez,childish,adhered,priesthood,jagged,dispersal,overt,verbally,squeak,constituting,nuns,pronounce,scorpions,incompleteness,thurston,dearly,suggestive,osa,electrified,unbalanced,gypsum,slime,baroness,winnings,imaginable,bromide,lui,crusaders,summing,lament,gregor,terraces,canyons,predatory,towne,descendant,disgust,banked,rationality,screwing,dismal,ranches,cochin,wipo,prologue,whaling,patrols,stumbling,swung,outlaws,sinn,waved,libel,ellipse,alarmed,justine,jest,garda,eskimo,caesars,luce,strapped,reluctantly,woodwork,centrifugal,authorship,cavities,buxton,cravings,decidedly,pau,apathy,mercantile,stalled,infused,peaked,stronghold,huxley,moritz,bearded,greasy,vowed,carnage,asher,ingenuity,mort,infested,creeks,bessie,adele,ota,rattan,coroner,irregularities,tiled,elaboration,hectic,lun,snuff,convene,vai,calmly,horribly,dilute,contemplation,sino,uhr,carta,gaseous,afflicted,gloomy,kirkwood,orchards,prophecies,marques,septuagint,pertains,clothed,plummer,italians,talon,repellent,laval,sorcery,abstain,elsie,barring,undermined,tid,bestowed,habeas,inactivity,crewe,grassy,aprons,clumsy,columbian,ayr,pounded,carrington,stint,rousseau,sarcasm,accomplishing,overturned,uphill,maximus,warmed,parable,jolt,affords,deadlock,deriving,quadrangle,elects,liebe,eradicate,likeness,ral,jem,unter,alpaca,degrade,flemish,shred,conseil,steamed,aroused,remittance,sieve,bloch,alienation,reddish,impulses,interpol,pleads,whitby,goliath,caprice,hors,horned,fowl,janus,hester,benevolent,superstition,cohorts,camilla,rarity,limbo,shove,accusation,bernardo,flake,hating,pate,sewers,spores,mahmoud,shears,mucho,flutes,tabernacle,minced,westerly,despatched,munitions,symmetrical,ornate,midwife,uniformed,snug,coveted,prohibitions,moulded,deceived,convict,nai,tossing,regularity,criticised,lawfully,goethe,slade,dumas,jester,notifies,recount,dearest,nook,commensurate,schiller,bowler,wiser,gallant,disbelief,gon,unqualified,cautioned,recollection,locomotives,condemns,fastening,jeweler,nuremberg,ostrich,maud,flirting,misplaced,prosecutions,dido,poisoned,researches,chou,discriminating,exclamation,collingwood,intercepted,ascendant,flung,clovis,eam,railing,cremation,banter,balconies,awaken,pigeons,singularity,signify,granddaughter,subdirectory,bancroft,progeny,alters,gratefully,divergent,fleets,dorian,juli,tackled,shoals,tributary,clique,rosy,satanic,stubbs,durch,torment,mussels,emigration,howl,wel,iglesias,hir,ecclesiastical,crippled,hilltop,tabor,peut,tenet,fifteenth,chute,bohemia,mountainous,fonds,ogre,unforeseen,pickles,submissive,curses,stampede,utilised,trieste,whine,nus,fatality,tierra,looming,zo,sped,ankles,mosques,fuchs,guerilla,squeezing,fisk,canes,follower,euler,alumina,degenerate,spiked,cru,misrepresentation,strung,chanting,wrestler,officiating,hermit,behaving,colbert,josiah,deepen,acadia,eso,remy,pats,valentin,mora,cri,enrico,reciprocity,crease,wis,ook,bartholomew,perseverance,catalonia,yorktown,impede,clasps,tilted,vicar,confines,prank,dass,repent,dio,agreeable,riddles,bennington,pulpit,appreciates,marshes,bellies,corrosive,ambush,palazzo,franciscan,figurative,gait,emphasised,bonfire,aversion,vicente,stiles,stewards,chauffeur,elicit,henrietta,slapped,bitten,lind,salamanca,martyn,dynamo,hobson,stow,summon,skeletons,parchment,lingua,distractions,forfeit,pepe,paddles,unpopular,republics,inspecting,retainer,hardening,loosen,beowulf,undiscovered,einem,imputed,cabs,cheated,willows,hump,delft,communicative,grieving,chastity,faust,fright,harbors,adorned,obnoxious,diligently,decays,mortimer,marvellous,nouvelle,easing,mathieu,picket,thrones,emilia,eyre,maturing,seu,illogical,awakened,beet,suing,brine,lorna,waning,cartwright,armoire,piled,twinkle,lodgings,maitland,supple,geld,soi,fabio,unfit,uttered,rumanian,shaggy,elongated,ordeal,pegs,astronomer,incompetence,flicker,ramsay,relieving,towering,operas,slaughtered,assaulted,mena,rouse,appel,armand,spiel,impurities,stemming,inscriptions,hos,tentatively,tragedies,interlude,oates,dialects,vas,ovid,carcass,casually,scamp,freedman,reprise,zig,lash,ills,simms,danes,pebbles,quicksilver,sacked,omen,forfeited,stipend,conceptions,lii,amulet,informally,sarcastic,indemnification,hawke,complexion,daisies,informant,sorrows,ite,aegean,andere,sluggish,brig,tiempo,marsden,coy,grouse,reginald,wierd,pasted,moths,batavia,evoke,dispositions,haywood,staunton,nit,amorphous,tributaries,townships,nantes,assam,mousse,shameful,chiffon,archaic,elevate,deafness,bec,sala,laureate,contemporaries,syphilis,vigilance,appalling,palmyra,foxes,davie,affixed,ticking,pantheon,gully,bitterness,brill,defy,stor,consumes,lovingly,agua,thrush,bribery,smokes,ventilated,kettles,ascend,nutmeg,chained,magnify,precautionary,travail,livres,fiddler,wholesome,wrists,severed,mites,puddle,azores,vegetative,agora,sob,elaborated,reeve,embellishments,willful,grandeur,plough,pritchard,mansions,macpherson,overheard,persisted,whereabouts,haydn,symphonies,reclining,rodrigo,bounding,annexed,atheists,umpire,orthodoxy,kilt,doubtless,keyed,esquire,cryptic,primus,wherefore,cholera,midsummer,colouring,intoxicated,mysore,jerks,mise,darius,bullion,deflection,hateful,propensity,journalistic,essences,dispensed,lemons,stratum,vendetta,lod,felicia,restrain,clutches,cults,whit,amaze,manassas,rembrandt,estado,easel,reisen,potion,ovation,paddock,numerals,surpassed,vino,gable,johnnie,thirteenth,laced,quill,saa,mares,enthusiastically,fetching,chaps,tendon,bellows,keats,deceit,caro,unmarked,joyous,boswell,venting,infringing,blythe,chisholm,gunner,verso,samoan,absorbent,grossly,cleft,clog,hongkong,impoverished,stabbed,teaspoons,comedians,awnings,sill,lucknow,bleaching,isolde,startled,mathematician,untrue,algonquin,hurried,vir,dieser,staggered,vacated,vente,fitz,dura,fingered,apprentices,cerca,booted,allie,sens,sprouts,bower,moab,wolcott,extremity,orphaned,requisites,prudence,kaufmann,bij,gingerbread,biggs,tasteful,puritan,osiris,affirming,salud,excavations,forearm,distract,seaport,flashed,longs,dawes,buns,deceive,civilisation,starved,amico,colosseum,stipulation,emptiness,maddox,shoemaker,cushioned,dada,osborn,hastily,ful,invader,patriarch,consents,nils,polynesian,swain,lain,groningen,emilio,mourn,abandoning,oddities,soften,troupe,blacksmith,suicides,powerfully,compromises,helene,thirdly,classifying,deepening,unfairly,connexions,calico,wrongs,pores,johnstone,undermining,burnside,colossus,frivolous,indecent,dishonesty,oiled,turnbull,microbes,sharpen,phonetic,oppressive,coined,tito,moray,simeon,onslaught,nationale,noses,treasured,sharpness,corral,fortnight,lia,plunged,reals,modulated,defiant,brisk,meath,jena,ponce,perjury,mua,generality,vigilant,pronto,vistas,eerie,arne,stonewall,wrestlers,jackass,geometrical,priory,epsom,corpses,wiping,mercenaries,bronchitis,therese,whirlwind,howling,apprehension,raisins,turkeys,tio,hora,bobbie,shale,diligent,nachrichten,dann,adversity,wiggins,torts,egress,adjectives,crepe,dum,sheepskin,concave,heresy,armory,forthwith,avert,oat,guise,curiously,fullness,culminating,kipling,vomit,compounding,afar,ebb,shaky,brutally,pennant,nicest,willoughby,necks,lak,mathias,levee,hindus,powerless,populace,deliberation,soles,jetty,luster,overrun,undone,delia,habitual,alhambra,mee,uplift,causeway,murderers,reopened,guid,inhabit,lorenz,conglomerate,fastened,tompkins,extradition,geschichte,perils,jerky,proportionate,compte,algo,boroughs,deliverance,resists,lovell,discourses,subdued,adhering,falk,suspicions,hampered,bruxelles,detriment,prejudices,purported,tron,ine,mangrove,gab,fawn,scaffolding,prin,narrows,sensed,insuring,babcock,rhys,boasting,norah,ascertained,fluctuation,jeannie,ond,twenties,monstrous,stetson,accuses,calibre,nobles,fumble,attrition,atherton,lassen,proverb,darin,mercenary,clams,reis,tightened,levies,speck,gutters,murderous,rudder,amusements,scares,deformed,wretched,decadent,incarcerated,unsurpassed,surpass,annihilation,pietro,memoranda,steaming,magnifying,serra,hideous,abreast,intuitively,extremities,tyrant,decency,papal,sprang,palais,obscured,duets,mountaineers,blount,butchers,apologise,geologist,piccadilly,axioms,mogul,fiercely,varnish,hysteria,nei,insistence,aer,clockwork,mecklenburg,intelligently,fuer,vials,imputation,albrecht,densely,droit,odin,colton,distrust,ulm,assassins,hatton,fraternal,refinements,eloquent,cwt,silas,wondrous,decrees,touchstone,etext,drayton,grieve,reigns,pleasurable,dobbs,tunis,olin,bustling,galt,flue,lucerne,fiasco,emir,deacons,slings,dwarfs,apportionment,thoreau,reins,anson,broadest,scrambling,misfortune,drenched,astonished,kiel,subconscious,agi,incandescent,disappoint,mobs,cris,rehearsals,massa,firewood,serenade,weathered,truffles,anno,kepler,teatro,lawless,gout,coincides,inhuman,gentiles,jardin,fag,rubs,irritated,despise,floated,fresco,auteur,custard,prius,dias,hasan,branched,shipbuilding,mildew,tombs,frown,fulfilment,accords,privy,caretaker,antonia,feeble,gentile,contractions,combatants,annuals,champlain,valence,deteriorated,droits,disobedience,gat,unpack,divination,haw,nationalities,cultivating,triumphant,superbly,hombres,constrain,magicians,gra,hobbes,contended,nazarene,potsdam,genevieve,shiloh,damper,afrika,forgiving,yahweh,madman,sor,slumber,shimmering,rigidity,bane,marius,inventing,chipped,ane,forts,tumbling,interprets,surat,dormitory,confiscated,discharging,unnoticed,ridicule,thaw,vandals,reinstated,lizzy,unpacking,darien,intersect,finden,janvier,garnish,designates,peeling,levis,blindly,unintentional,durant,repertory,toi,disagreements,gatt,bene,fifties,goody,dugout,battleship,talisman,eels,shun,blackwood,giggle,worden,deforestation,streaks,roderick,bor,corinth,perverse,glittering,jails,casket,brigitte,detour,husbandry,visibly,defunct,unveil,circulars,merciful,ines,tun,tipperary,kinship,springtime,philipp,blouses,hemlock,sniffing,uncanny,stork,concede,combustible,fallacy,nicknames,noxious,tunic,farce,drowsiness,chants,ashe,rhone,lunatic,pyrenees,auctioneer,recovers,haggard,manger,chills,whack,drone,breezes,esteemed,godly,spire,distillation,edging,langdon,mathematicians,soe,cymbals,antidote,emblems,caricature,shroud,stead,recoil,reconciled,daze,raisin,amb,amounting,schon,boer,poisons,nameless,trot,musically,intensify,voltaire,harmonies,benito,accumulating,indebted,wald,breathed,misled,mani,culprit,transact,billig,spiced,berne,pron,puncture,nella,lighten,practised,canteen,fein,hysterical,fick,darkened,requisition,shrug,boils,enchantment,greta,covey,donne,pena,loathing,duc,woof,ominous,parlour,hammocks,quieter,poking,tallest,wrestle,entrenched,rectify,virtuous,ous,davy,snails,decipher,incapacity,mittens,ferns,curls,ens,wrecked,wince,friendliness,invincible,healthiest,prometheus,rushes,deities,wor,comanche,melts,trickle,disapprove,erratic,familiarize,insufficiency,drifted,propagated,hardships,sabres,foraging,wasps,chien,mitre,tonnage,corals,mille,continuance,unrecognized,premieres,affectionate,baptiste,unimportant,ferrara,greener,bowles,endowments,grudge,zoological,norse,wetting,bosom,bales,blackbird,causation,persecuted,deciduous,straighten,convocation,merrick,precaution,playmates,philanthropic,maneuvers,stratified,critter,begs,emphasise,uit,adresse,connell,busts,cutaneous,porters,forgery,pereira,infrequent,mull,ort,brandenburg,incision,jumble,cognac,wading,imitate,grasping,borneo,mortuary,bode,thorns,rightful,scarecrow,mosaics,pious,utterance,undeveloped,basalt,undisputed,distracting,urns,unfolds,brocade,seaweed,prevails,candlelight,votive,wafers,messina,schumann,tarts,cuthbert,nance,babble,pessimistic,niches,untill,quid,cadiz,shortwave,overlooks,diversify,hugging,postman,oas,overboard,goddesses,faithless,regained,coolidge,ephraim,foggy,shone,criticizing,leafy,passionately,stroking,matured,dolor,procured,excellency,camels,partie,tou,justifying,eased,slay,deprive,kremlin,thea,lusty,virtuoso,buzzing,dauphin,steed,cowley,paraffin,unites,stimulant,realising,millet,invert,vermilion,grinned,marche,thelma,enlightening,endlessly,hasty,dexterity,puzzling,nods,dieses,sumatra,nigger,scrape,kendrick,prized,arresting,bewitched,resumption,irma,intimidated,traitor,clove,illiterate,widened,bordered,mallet,leech,giver,discontent,gaz,punishing,seedling,dwellers,mouthpiece,nymph,reassuring,astor,myles,prematurely,frail,adventurer,irradiated,awfully,mayflower,arched,enlist,vedic,exemplified,profane,ubi,cornelia,romney,macaroni,electing,dictation,tage,robber,evacuate,tus,conveniences,roving,drinker,softened,peking,fillet,maar,churn,nimbus,nog,smartest,neale,ett,madre,impart,feats,concomitant,donner,scaffold,oui,ano,millie,libro,leisurely,loki,dislikes,mayonnaise,dra,limitless,knopf,hangman,sloping,mitt,constitutionally,disapproval,bavarian,crucified,pocahontas,masons,surges,literatures,unlucky,yawn,distort,mun,wahl,loosing,canopies,handicraft,buscar,piling,basilica,amine,robbers,juliana,lowland,sausages,spake,feud,subordinated,awoke,unheard,prune,endanger,cairn,nomadic,disgusted,olfactory,prolong,fontaine,knits,thinly,tant,garnett,galen,arable,parallelism,brut,vernacular,latitudes,alkali,mowing,foreseen,palmerston,sever,expend,stahl,gist,auntie,afghans,blames,subdivided,happiest,lucca,francine,reserving,nagasaki,wid,indented,humming,disclaim,frans,diameters,exerted,justifies,freiburg,regenerate,titre,tumbler,bonne,improvised,flocks,bothering,garnered,fling,comrade,ascended,juliette,porcupine,chopping,enacting,stabbing,metamorphosis,hilda,wanderer,flattened,dawkins,spitting,inconvenient,seacoast,imperfections,lewes,chancery,raving,hed,executor,anglesey,choirs,wreaths,tasteless,tomahawk,tact,projet,instructive,absorbs,susannah,toutes,mathematically,godwin,drier,bothers,parades,shoved,invokes,cannons,hamish,chromatic,rife,rallying,enoch,carriages,dales,polled,agnostic,emptied,denounced,delusion,rimini,verity,turret,precede,huts,betts,domes,eras,wildest,foodstuffs,wessex,priming,vowels,sulphate,clandestine,migrations,hovering,texte,tamper,pugh,punishments,dagen,heathen,unduly,rigged,domicile,chargeable,fanning,meu,spurred,broughton,wha,osage,peregrine,tabitha,puede,crumb,fostered,culmination,revolves,mend,theoretic,softening,glimpses,hattie,tastefully,capo,grate,lourdes,diseased,kenilworth,margot,socialists,deduced,buttocks,unmanned,rainbows,gunnar,burials,eunice,bountiful,salazar,mesopotamia,jetzt,poseidon,ratify,mexicans,fiend,drapery,bernice,deported,muzzle,entrant,schoolhouse,retribution,yusuf,stallman,slander,basing,baits,fireside,disposing,herzog,suffrage,triumphs,fortifying,sleepless,schiff,watered,lass,fleas,tully,ventured,recite,kneeling,negation,dismay,smelled,jute,heals,prim,trespass,conciliation,compasses,groomed,leaping,impunity,sunken,inaugurated,encountering,infernal,sewell,pang,swag,reared,pampered,inquiring,numero,praising,momentary,commemoration,favre,poli,holstein,serpentine,hangings,lugar,sundry,protestants,therefrom,espace,wrecking,cristo,pique,swore,novembre,fawcett,journeyman,enlighten,descartes,flashy,prowess,abstractions,enriching,trampling,signet,bello,iroquois,digested,rothschild,trumpets,embodies,messe,manhood,kincaid,cannibal,nephews,oblivious,icao,atmospheres,stricter,jeter,memes,roughness,ancients,snapping,jethro,cauliflower,feudal,unbearable,perpetrated,basses,juni,boarded,olympian,sedgwick,livre,mano,interferes,devotions,myra,devotees,acquaintances,sectarian,fathom,cools,segundo,appreciative,innumerable,parramatta,noticeably,furs,atonement,extant,ignacio,unmask,chisel,mysteriously,wayward,redness,dreamland,wands,illustrious,fishy,nao,pauses,intoxication,glimmer,blooded,slamming,syllables,whim,filmy,timid,ismail,tampering,weavers,magically,pied,thyself,rooting,pretended,nigh,therewith,interment,partitioned,aller,populous,modesty,veils,frei,zest,sumptuous,wayside,spotless,wich,summits,ner,banc,barbed,legions,dona,lustre,wer,sunflowers,sommer,ecstatic,campania,blasphemy,wisp,countenance,skinning,sift,ooze,recounts,adventurers,oktober,bigotry,leaky,contradicts,leven,pagans,dinars,diesem,fume,afloat,bruised,flattering,brigades,leur,engrossed,dashes,impeach,atrophy,hur,brag,earls,confers,totality,circumvent,boulders,negotiator,yolanda,muff,maude,odour,bellamy,snag,fringes,gough,excavated,smoothed,affirms,gulch,irrevocable,wieder,moaned,axles,graciously,radiated,bribe,propel,outspoken,verily,ardent,forcibly,presided,shimmer,tremor,gnp,loaned,violins,extravagant,ghent,astute,jamieson,pemberton,inflict,invalidate,ridiculously,legible,towed,disregarded,auguste,puc,salted,attractiveness,calamity,brewed,aristocrats,fiance,sprawling,vulture,mislead,ventral,twa,retard,medio,platters,canto,germanic,harassed,discriminated,estelle,sponges,cavendish,receptacles,jacinto,revered,harassing,dislocation,shingle,timbers,undergoes,tilting,conquering,harass,meditate,hues,alsace,denominated,ostensibly,lumps,facie,emploi,cretaceous,fished,drizzle,bracing,mesure,blackmail,corte,remorse,navarre,clout,jours,wag,fella,mountaineer,pondering,purposely,worshipped,lucifer,unholy,spectacles,dulce,muttered,aquila,hoff,mme,spat,henceforth,argo,strapping,expedient,unconditionally,ices,secreted,buch,chaucer,livery,recapture,chevalier,incompatibility,anchoring,navigable,personas,milieu,stonehenge,injure,knuckles,zoeken,intermission,amazement,medusa,pagoda,manifests,primed,keg,recited,reformers,ensued,justly,throats,aron,barrage,pis,pari,buoyancy,aussi,curled,raoul,peeping,paces,heaviest,walnuts,ena,broadened,lashes,esplanade,prairies,mandel,conical,tricked,etymology,cheaply,allege,draped,subtly,manslaughter,consort,shad,fleeting,sibley,plumb,needlework,caballero,annoyances,uti,bacchus,chuckle,unfolded,israelites,rit,briar,wavy,moulds,hindered,bloated,pranks,mantel,languedoc,fatima,disordered,belated,englishman,winder,paralyzed,junta,shrunk,crammed,aar,hatchet,unsuspecting,dismissing,cetera,windfall,filaments,jocelyn,companionship,creeper,cuando,epidemics,illegitimate,slag,undisturbed,transcendental,georgina,chantilly,farmed,fuentes,malo,complicate,alston,indistinguishable,skillful,groot,compensating,overrated,reasonableness,nuances,knuckle,bastion,scraping,gypsies,concurring,assemblage,watery,tro,juanita,coiled,yucatan,sipping,beatrix,cheerfully,sledge,gilded,murdering,dijon,unbroken,sages,tropic,capella,beim,condemning,entourage,travers,familia,iota,realist,suppressing,scorn,crusades,pris,whirl,pervert,defences,humiliating,circled,withers,sprout,elicited,swirling,campos,clinging,bunches,bagged,negotiators,deviate,blackened,whereupon,muriel,hostilities,atelier,penned,conte,horatio,cheered,bled,throbbing,sleepers,seiten,zeit,sallie,solace,lucien,havre,moles,unloaded,projectile,transplanted,bandages,handcuffs,beacons,stucco,intrinsically,geschichten,impervious,shams,shawls,aos,flourishing,precedes,bruises,instructs,palatine,lor,carnation,kangaroos,slum,ruffle,knack,rivet,aragon,aggie,tilly,sonya,haue,grunt,talmud,grammars,overalls,doubted,ravaged,whistling,upholding,ailing,obeyed,tattooed,ghostly,mutiny,delusions,foresee,rations,bitterly,windmills,perpetrator,cleverly,misunderstandings,amerika,counsellors,amis,sisterhood,lightening,overturn,doit,thoughtfully,mortem,rencontre,risking,proprietors,tatiana,ingress,gros,barbers,retires,duro,commotion,deduce,bolted,materialism,eternally,senseless,rabid,reassure,recollections,probed,pox,hamlets,unwritten,jammed,moveable,housekeeper,agrarian,humana,lovable,sawmill,abram,catharine,consented,perseus,styx,congested,banished,terraced,buttermilk,laces,toil,hugged,flurry,gower,warmest,horrified,walpole,cada,alte,bertram,perturbations,adversaries,aunts,mau,vapors,skylight,gemma,constantinople,monarchs,unsolved,strenuous,roost,unreasonably,shuffling,ludicrous,tenets,albanians,pius,garb,steadfast,reckoned,promissory,overflows,queried,squarely,softness,crayon,rotting,exhilarating,excepted,flavoured,marque,ditches,millionaires,evade,pars,scourge,twig,lapis,bandage,detach,virginity,mala,doctrinal,adaptability,cramped,wept,ganz,racking,corrects,avignon,servicio,vanishes,obedient,selkirk,mur,sects,modo,anxiously,ascribed,strikers,optimist,gratification,seashore,automaton,otros,pierson,unskilled,brigadier,consonant,acetic,unarmed,dyeing,intolerable,republished,tawny,absinthe,hygienic,sufferings,tahitian,propagating,sacraments,layman,vellum,ignatius,emperors,ferro,stalks,stanza,londres,terminations,novices,grasped,bequest,deo,beggars,redeemer,florin,quixote,chaise,paternal,dey,rained,indigent,trellis,trabajo,mythic,crystallization,marries,echoing,recitation,aptly,alleviation,liege,remittances,romances,nieces,characterizes,papyrus,fop,candlestick,circling,hellas,sheik,pints,girdle,siamese,veiled,blotting,intimates,eruptions,javelin,ipsum,stares,eastward,tecumseh,yon,entree,desist,grasshopper,rheumatic,autobiographical,piety,embody,petites,gris,crawled,soiled,dich,froze,superfluous,gai,disarm,sot,tacit,chansons,parenthesis,reorganized,daybreak,rallied,quakers,pentecost,beulah,unveiling,burg,astray,blisters,infirmary,hinted,sanctity,gad,modus,pedantic,beste,dennison,grandes,bullies,notoriously,lucius,kirsty,caustic,rook,gleaming,dominoes,tua,parochial,bertie,moreau,precedents,exiled,howells,pall,mustered,pretext,whisk,flared,kleine,deference,artful,eld,audacity,margate,judson,downwards,moat,inasmuch,plotters,caress,hails,swam,wilfred,mauve,hazy,twitch,alegre,glorified,combed,reclaiming,baptists,paraphrase,flounder,crept,fibrous,swamps,epilogue,hoof,epistle,exiles,wheatley,clapping,finesse,sociale,cordelia,infrequently,favoring,converging,cour,firma,inquisition,reputed,dinah,seduce,bearers,kimono,guesses,foote,impossibility,ceylon,courant,invasions,eminence,canna,liberate,gracie,gunther,hanged,flatter,acquitted,dimmer,sola,cauldron,dredge,tingling,preferring,cordial,reassurance,superintendents,nervousness,delineated,imaginations,quarrel,bess,aryan,tendering,transitive,furthering,connoisseur,idealism,separable,awa,liqueur,spokes,pastime,pursues,bugle,luxemburg,disperse,incoherent,fours,treffen,devout,strom,alva,unfurnished,blinding,inaction,northward,trotter,subversive,contre,impediments,armoured,breathless,intertwined,steen,corkscrew,trop,affections,inherits,mortals,purgatory,vise,comer,tillage,pere,discloses,easterly,lagged,hawker,vertebrates,toughness,disrespect,lagging,uncovering,indeterminate,refreshment,momentarily,festa,langer,lute,rosette,changeable,tragically,waverley,clapham,trumps,justifiable,twofold,sicilian,marlowe,unearned,thwart,potted,chanson,amelie,incurring,gracias,convalescent,terme,mackerel,goings,brim,clinch,provident,leprosy,chum,cometh,fitter,glut,fasten,locksmith,interrupting,sulla,daggers,pleases,moors,arno,geranium,kendal,revolve,choc,waged,waxed,concourse,confine,jaded,mingle,purify,desolate,withdraws,choked,whereof,pape,gruesome,pleadings,defying,sacs,perished,erskine,tentacles,britons,pringle,outcast,faraday,oblong,ophelia,wearer,propriety,attainable,hearsay,roomy,brutus,obscurity,heros,colonists,matting,overflowing,capers,entice,lasso,soot,yonder,virulence,heretic,draught,comical,generalizations,waiters,gasped,geologists,caverns,boarder,bumping,eines,greets,ova,waxes,whiz,bevel,straining,seduced,angrily,croquet,vacate,stanislaus,soundness,marquise,bonjour,xxiii,protracted,siegfried,affaires,digby,eyelid,undeniable,taming,precluded,repressed,perforce,barons,boundless,hopelessly,grandchild,sucre,pasteur,valuables,indignation,sprinkled,menstruation,stuffs,antichrist,emptying,reiterate,himalayas,monopolies,sowing,frills,wad,shearing,ruining,pinion,yew,windward,hermosa,haunts,unsere,brawl,delirium,unfounded,heroism,gillis,rutledge,barrister,neglecting,saxony,karel,vane,alienated,tum,synagogues,entangled,mane,reise,liberating,embarking,tonneau,cynicism,bayonet,considerate,extraneous,janitor,environs,reverses,reunite,hawkeye,steers,ravenna,crockery,juries,presidente,nang,gare,legacies,tial,theologians,arnaud,enticing,embankment,quadruple,crazed,xxii,equipping,fondly,whither,counteract,sighs,discouraging,flasks,preservative,tribulation,bridesmaids,rhea,raided,salaried,mises,intolerant,rarities,battled,obstructions,discredit,grotesque,artistes,perugia,gij,spoils,monasteries,crucible,modena,generalize,hasta,pronouns,misconception,rudimentary,sown,protege,vulgaris,beak,settler,prag,rabble,rung,piped,orpheus,retour,insurgent,rightfully,hilfe,medici,fabrice,marshals,nue,crumbling,relegated,allotments,immer,stagnant,giacomo,follies,dells,cleanly,unclean,seizing,molasses,tablecloth,hutchins,purifying,delineation,schooner,dignified,numbness,papier,machinist,anima,apologized,meshes,grotto,marais,loam,politique,carnations,rivets,jeune,hatching,leveled,graces,corinne,adheres,collusion,rawhide,propos,knotted,agitated,sorter,misused,relieves,linguist,rigorously,erroneously,especial,betray,dario,cui,heywood,suspending,mormons,davids,bennet,proclaiming,purposeful,undress,procrastination,hemel,gauze,precepts,constellations,gazed,skips,forceful,fuente,magdalena,rut,sehr,hera,subterranean,rumored,galicia,amuse,villager,fixer,condensing,emanating,assassinated,brodie,untimely,associating,romp,idiom,tangle,legitimately,congratulated,couriers,unwelcome,concurred,upsets,sceptre,confederacy,matinee,snatched,plunder,maa,impromptu,searchers,gamut,czar,putney,shattering,refute,amphibious,mush,shudder,eyesight,parson,infidelity,firemen,contrived,exhausts,opposites,dreamers,foal,hesse,hesitated,precarious,hodder,pease,testifying,topographical,instructing,dreary,crispin,horrid,dryness,wreckage,paras,captives,despised,conqueror,innocents,unprepared,dost,treacherous,filet,infidel,volley,carnal,larceny,versed,confronts,parliaments,mitigated,youngster,enigmatic,bridle,stretcher,cosa,enfants,leila,berliner,effecting,hallucinations,unravel,smugglers,intimidate,rubens,galilee,frenchman,tiller,orifice,bragging,hordes,beryl,ferre,forerunner,grinning,slashed,watchful,appalled,silenced,vanities,evaporated,affliction,zag,intestines,saute,iba,schuyler,idyllic,satchel,peruse,revel,alleys,crucifixion,hearn,madly,stiller,experimented,comming,steeped,gripe,summa,eyelids,thereupon,archers,steamers,bubbling,forbids,disdain,exhausting,absurdity,magnified,horsemen,alabaster,reigning,deane,georgie,zara,bribes,kidnap,coercive,romanticism,luo,forme,reinstate,unthinkable,lowly,outburst,scant,mattered,fitzroy,ove,raspberries,sorely,pail,obtainable,elvira,mastiff,drummers,reformer,solemnly,liberally,dahlia,concentric,loin,ved,unwarranted,marmalade,sandoval,applauded,ravine,exponents,brice,ressources,californians,procuring,pours,leer,nave,arranges,valhalla,adoration,amity,superiors,decanter,starve,leek,shortness,fronted,lightest,banquets,picnics,compulsion,prerogative,abscess,paraphernalia,heretofore,memento,lina,tumbled,masterful,insoluble,cockburn,harwich,casas,semper,repressive,clos,sweeter,mattie,deutscher,spilling,saucers,gondola,elizabethan,hein,spines,reiter,amphitheatre,stupendous,flutter,acumen,absolut,shiver,lumiere,shatter,pickled,nieuwe,hades,superimposed,burdened,randal,dandelion,nuance,classmate,catechism,driftwood,rosalind,giorni,juin,bigelow,anointed,mythological,interspersed,horseman,nervously,intruders,chaparral,nya,decaying,vez,muses,padlock,oars,gilead,classed,informer,freer,toute,calabria,dismantled,overcame,exertion,solidly,affidavits,weaves,chimera,handkerchief,foaming,tailors,barbarians,splendour,niveau,sheriffs,tassel,admiring,harmonized,khartoum,leans,frankreich,baffled,wasteful,hertford,tripoli,refraction,grainger,penzance,fillets,aztecs,consults,hoi,foils,retract,inaudible,nurtured,frantically,buoys,tait,disintegration,theologian,aquitaine,sigmund,individualism,starboard,precludes,burdensome,brest,renown,murky,truthfully,deutschen,tongs,perpetuate,vigo,cabal,musa,materia,interwoven,beggar,pard,extinguished,silhouettes,abundantly,declination,excesses,mucous,poked,caricatures,artiste,bogen,repose,hasten,tendered,temperance,risque,resembled,helpfulness,omitting,earthy,adored,embellished,feathered,aggrieved,hacer,assisi,aggravating,insulted,fugitives,passe,anecdote,partake,pseudonym,altitudes,carolinas,strikingly,zy,rancher,morn,bodyguard,gnats,solon,eduard,detract,portraying,pitted,enlarging,wrecks,bombardment,buckner,dares,tems,eigen,siesta,satirical,paar,antoinette,ugo,cynic,amenable,runways,frowned,sass,rout,pus,rubies,checkered,hatched,sketching,hypocritical,trample,courtship,cupboards,tolerable,magi,brescia,alonzo,tutto,attenuated,inefficiency,merci,booms,demented,eri,bonaparte,musketeers,twickenham,glee,forgets,grapple,lowlands,stimulants,greenery,proverbial,tranquillity,numa,monastic,uncles,eph,soared,householders,nestor,impediment,hel,anarchists,freund,perilous,devonshire,tanto,violets,nouvelles,nether,nomads,ramble,ambulances,natura,hams,idiotic,parti,cerberus,bering,formosa,erg,bough,hoot,herewith,workmen,grist,penrose,duster,pronoun,signer,sloth,steely,pulleys,fates,stews,nourishment,gravitation,loophole,drags,retrograde,sade,exaggeration,shadowy,liquors,archangel,fenwick,creases,primordial,nourish,vit,uplifted,percival,gingham,batterie,gossamer,hairdresser,plover,weg,mow,disliked,leinster,impurity,worshipping,chasm,nuovo,greenish,regiments,adel,selfishness,reactionary,adriatic,ejected,grappling,hammering,mingling,earnestly,scribes,leed,monologue,amphitheater,vive,signaled,clem,littered,acutely,razors,masse,legumes,speculated,worded,quant,fleshy,desirability,sundown,persistently,decoy,balsam,baruch,verdicts,authorise,outcry,eyeglass,waterside,grime,extortion,cordon,colorless,idealistic,cutlass,rigor,greyhounds,amalgamation,preponderance,cowardly,pretentious,cervantes,wielding,gusto,maidens,weimar,mijn,humbly,langue,unworthy,expectant,laurens,azalea,jeannette,fruition,florentine,dwelt,vlaanderen,oberon,enslaved,vil,cathay,jura,correspondingly,legalized,predicament,hilly,aisles,trusty,gratuitous,fatally,caged,ephemeral,radium,dissimilar,mutilation,kon,waging,infringed,overwhelm,cognizant,profil,andalusia,rowdy,popes,bravely,sportsmen,stumbles,clematis,slashing,leger,incomprehensible,suez,clogged,gabriella,fluctuating,demeanor,shipboard,labourers,paganism,fido,sounder,mest,caledonian,hegel,stench,cursing,pmb,wickedness,crouching,attila,emits,culminated,thefts,sturm,weiter,auld,spanned,ebenezer,closeness,redeeming,polity,scriptural,transylvania,obscenity,gaul,heartache,reigned,entitles,exacting,wanton,pelle,enforces,necessitate,locket,aver,commemorating,reconciling,desolation,gander,bastille,traceable,voila,savor,darkly,faithfulness,resourceful,heraldry,incomparable,dilated,angered,condone,ahora,mademoiselle,constitutionality,viscount,preliminaries,devolved,liquefied,alcatraz,streamed,resorting,garters,adamant,pontoon,tableau,vernal,napoleonic,tennyson,rubicon,disorderly,tala,ivanhoe,destroyers,analogies,frigate,instalment,dazed,sentient,entrust,iti,puffs,burying,dispatching,cyclops,veritable,posterity,keenly,healthful,nem,meine,repealing,gourd,groaned,ferocious,voicing,mons,sacrificial,defies,abnormally,resuming,bruising,flogging,religiously,mundi,encroachment,demande,seaboard,laplace,southerly,humiliated,unearthed,sut,cataracts,subordinates,vagabond,consecrated,oscillating,jib,bodice,foray,opiate,cristal,unmistakable,filly,rhubarb,silencing,aesop,hab,diminishes,tidings,sneaking,unassisted,insidious,dike,immutable,croton,depots,nodding,jasmin,libri,misrepresented,amici,substantiate,algiers,ocho,templar,cedars,fortitude,aloft,mated,wart,tribus,hollander,ruffled,armament,plums,tien,revisiting,fairer,enterprising,prides,grafting,smoothness,trinket,neutralize,vasco,playwrights,wishful,fal,herod,trailed,habitation,rogues,speechless,expanse,preside,arles,colette,delightfully,oeuvres,concealment,unruly,uncompromising,moriarty,obstruct,unbounded,coincided,encased,undertaker,flickering,sive,gush,saddened,bathe,scarred,ignited,crowding,tew,vrouw,gladiators,krebs,stoddard,scrooge,aeroplane,nagging,contemporaneous,precipitated,hiss,outlawed,injuring,bellow,girth,poppies,inlaid,notched,baldness,didactic,lillie,irritability,provocation,lustrous,reeling,desertification,rennes,crests,molto,loafers,slapping,tiene,squires,insures,slaying,mie,frauds,lobes,dios,thundering,remus,coals,succulent,heartily,hic,yellowish,unsuccessfully,moderne,moustache,geen,lobsters,eventful,feasts,stiletto,teacup,rebekah,kein,alvarado,secession,countered,instinctively,conspiracies,chapels,grado,minions,brunt,infraction,gory,glens,strangest,stagnation,displace,countrymen,perishable,lyra,gustave,proteus,denoting,apiece,jeanie,strasse,gammon,storming,islet,conduits,cinco,headway,friars,maples,alluring,ikke,edouard,buzzard,bony,halting,sana,halley,cranks,headwaters,reviving,burrow,universality,veranda,underrated,insatiable,exquisitely,unfriendly,hatches,christened,actuality,teased,murad,attica,flatten,savant,appreciating,stinging,membres,gulls,prescribes,sultry,sinned,globular,asiatic,macaulay,depositing,engravings,showering,fanatical,caper,yann,predicated,montezuma,lentils,quack,bruges,grooms,ousted,cask,grocer,speedily,auberge,negroes,chases,intervened,mezzo,incarnate,chimneys,hela,preoccupied,hither,diggers,glances,tyrants,constantin,giddy,denounce,entertainments,oaths,furness,ripples,herz,bloodshed,maw,viento,upsetting,durante,oxen,nascent,toda,reinforcements,precept,salerno,pavements,murmured,propellers,violinist,himalaya,gibbon,gratifying,delirious,excepting,unlawfully,spanien,urchin,polygamy,utterances,devising,sustains,woodman,gravely,errands,hells,cartes,impulsive,spasms,rationally,psychologie,uproar,savages,craters,wilmot,mockery,railings,paulina,northerly,tenths,quench,passer,projekt,encompassed,broil,hurrah,modestly,epitaph,allahabad,insurrection,brugge,alger,emigrated,barges,nota,tremblant,antennae,fermented,enfant,headmaster,walrus,secretive,grievous,generative,assyrian,repetitions,pensioner,spellbound,bretagne,tengo,domenico,fend,sapphires,compressing,intoxicating,crumble,resorted,lecturing,retreated,senza,magdalene,veer,netted,dispel,warships,tamar,woodbine,straightening,envious,regretted,colic,oni,membre,adolph,farthest,iniquity,fooling,vaulted,warms,formalities,resounding,aku,brazos,saucy,blistering,illuminates,masque,kazan,shillings,gleaned,decomposed,flowery,scandalous,blas,ciel,menacing,elector,lili,neurotic,bituminous,askew,phipps,groan,dusting,lombardy,uncontrollable,shackles,shrines,bridged,consenting,torturing,toile,relentlessly,bracken,couches,decadence,antes,nourishing,herschel,reconsidered,anche,arduous,morten,assimilated,creeps,gripped,sama,unscrupulous,nymphs,unsettled,inseparable,caso,jurist,vestal,dismisses,variously,arran,unintentionally,sprites,dashing,tiring,abate,piloting,decreed,mossy,ores,banque,keyhole,usages,wickham,vieux,bowels,cornet,reversion,sanctuaries,convicts,osman,lodger,santee,thunderbolt,claudius,tremors,apropos,pitiful,winkel,sparrows,bleached,arbiter,locomotion,hus,antimony,hater,buoyant,expel,martine,combatant,swoop,neuter,prejudicial,gente,introspection,meister,mariage,benedictine,reputations,vitally,mavis,undivided,chatted,lured,hurling,brevity,visage,prickly,septembre,astonishment,overshadowed,rescuing,sensibilities,meritorious,beheld,martyrdom,manna,octobre,moorings,buddhists,soars,gnat,housework,gunpowder,undressed,southward,liszt,zwei,zorn,recounted,denials,prussian,adorn,contemplative,awkwardly,etta,projets,lik,belles,stipulations,lifeless,baffle,pared,sobriety,slums,burnet,spaniards,piloted,successively,cucumbers,squaw,snowdon,pomegranate,glas,bouts,transcends,murmur,bookkeeper,crickets,extinguishing,noche,attache,bulging,chemise,epics,smug,flanking,dons,stadt,prejudiced,larva,laziness,mouldings,tireless,leander,growl,gorges,stata,canons,pastimes,diurnal,coolness,busca,recumbent,shipwreck,fader,unconsciously,buffaloes,marne,dissolving,osmond,highness,abstracted,typhoid,perfecting,nez,furtherance,suis,slits,inquires,yule,phantasy,sprache,hoss,crusty,stillness,precipitate,underlie,pharisees,nicknamed,drones,minster,sully,bate,pert,depositions,camped,fraught,perplexed,replenish,necessitated,slowest,unwillingness,sehen,trimmings,esperanza,divan,lehrer,holborn,concours,extraordinaire,eloquence,definitively,natchez,tripped,strewn,rubles,bewildered,beatings,copious,cade,tremble,instantaneously,thump,ghi,pompeii,alluded,aberrations,sojourn,stateroom,palacio,adherents,herbaceous,distinguishable,immaterial,sina,surging,lop,greased,contraband,flagging,willed,wounding,inclement,ange,magpie,stil,robbing,impartiality,phosphates,harpsichord,capes,impersonal,proposer,interpolated,strolling,moro,salvo,twigs,furiously,epitome,joked,breaths,lilian,glancing,discarding,fared,fleck,inflamed,clough,unlink,shadowing,wert,regimental,signifying,tutte,rectified,savoie,flanked,bayonne,primacy,fuego,buckland,centrale,eyeing,bade,insolvent,mists,nuit,carmine,relinquish,emilie,succinct,palpable,eton,estar,inhale,dreamt,convulsions,snowshoes,fiancee,fue,blumen,yolk,mediocrity,rhyming,sucht,transcendent,lichen,lapsed,stroked,gallop,cull,unsatisfied,wmo,minstrel,ewe,contentment,fareham,cranium,politic,exchequer,falsehood,slugs,carcasses,piero,candlesticks,rosalie,mingled,rafts,indulgent,longed,rammed,wailing,shrugs,negros,vertebrae,moans,buffets,aristocracy,eaves,popularly,brinkley,marred,falconer,watchman,venturing,entitle,bagley,alibi,ahoy,jellies,postponement,brooding,juncture,greenleaf,naturalized,pikes,haar,meager,commandant,copernicus,bourgeoisie,plucked,inflexible,flowered,bueno,discord,patrolling,injurious,voiture,utilitarian,compacted,ende,doughnuts,reread,stormed,crucifix,irreverent,censure,carbine,credo,heartless,contented,vultures,forcible,bushy,thickening,moins,porches,inoculation,luxuries,glorify,abner,maris,admixture,heredity,nominally,forza,chloroform,nettle,mismanagement,convincingly,evangeline,descends,mischievous,fateful,complacency,impregnated,insular,lagoons,sensuality,vere,affix,professed,unrivalled,sensuous,owne,sawing,yelp,herding,mammalia,hopped,sceptical,arma,interfered,halcyon,bowing,cogent,parishioners,traversing,uninformed,yorke,aberration,mollie,nef,conclusively,calcareous,tufted,chieftain,gestalt,honeysuckle,zeitschrift,unspoken,ishmael,apprehended,rhoda,jammer,forbidding,sparring,mindanao,adonis,domed,distressing,prettiest,lif,panes,testifies,filipinos,chambre,dainty,crackle,jes,thwarted,alban,planks,orville,belcher,spirals,speculations,sedentary,extermination,plumes,outweighed,transposition,acheter,beets,repel,pali,coleridge,anxieties,poste,onerous,tenderly,bonny,haddock,virginian,pyjamas,finns,oftentimes,entanglement,miserably,savoir,rojas,argosy,elba,stumps,clouded,diverting,derogatory,esteban,xxiv,sear,rouen,inaccuracy,assimilate,medea,regenerated,laine,gottfried,rapp,credence,welling,patrolled,georgette,lovelace,caen,conferring,incite,divulge,wardens,scrubbing,laughable,momentous,footpath,entreprise,harem,fussy,civility,deluge,squadrons,ventricle,fluted,sweetened,pry,venison,shoal,basking,pare,blushing,breathes,lectured,babylonian,annonce,morte,bord,skillfully,heady,confucius,bombarded,celts,bathed,cortes,intractable,corresponded,speckled,enumerate,persuading,onondaga,diphtheria,plaines,hoard,offre,courting,petrie,lading,woodcock,churning,chariots,battalions,unquestionably,presque,reproach,viol,vishnu,cherub,lieder,trumpeter,straws,serrated,puny,emphatically,reassured,perceiving,commendation,leben,contending,patriarchal,spelt,barks,dodging,antiseptic,browned,oed,hendrik,highlanders,ligaments,wurde,upheaval,cringe,crimea,sugarcane,mouthful,gazelle,gauche,minion,complicity,unstrung,tendons,thrives,penchant,drab,roared,prospector,unwise,financier,allegory,harbours,konstantin,acropolis,stifle,tiberius,paradoxical,rousing,sebastopol,knelt,radiating,devour,treachery,petting,inoculated,princesses,rossini,portraiture,incapacitated,attested,ope,nuestra,overcrowded,warring,arouse,ticked,purged,repulsive,sikkim,seclusion,elucidate,fated,frighten,amputation,halts,subtlety,creditable,protruding,appreciable,delicacy,paradis,cinch,futility,dumplings,diesen,upholds,enlistment,inroads,blissful,boasted,zealanders,stirs,platonic,donkeys,etna,averse,siempre,afield,endearing,mishap,lackey,quod,labors,whooping,sonnets,musing,masai,barricade,inquest,snipe,hapless,cuenta,polen,ably,montagne,brun,mirza,beaux,traversed,sparsely,shrinks,channing,fib,ail,innkeeper,mistrust,overcomes,lordship,egregious,cubans,transacted,blaise,chaplains,conventionally,nuestro,perceptive,haber,lard,destitute,platz,disbanded,singly,headless,petrified,emigrants,thane,salve,hindustan,marseilles,beauchamp,grates,fissure,curtail,talker,divorces,vitesse,winks,harte,loopholes,soit,novelists,bestow,homespun,hulls,complimented,intonation,proclaims,dissecting,clamped,retracted,friar,hospitable,melodrama,creased,preparer,postures,trapper,makeshift,tattered,embarrass,slanted,plagues,jota,harvests,surged,blume,natured,clemency,woolly,blemish,ajouter,bushels,tapers,geniuses,rind,whiskers,huntsman,personne,perpetually,soundings,evicted,rara,divisible,accumulations,lightness,avoir,quelle,admirers,marcello,harbinger,mustache,revolutionize,dwindling,beaker,arcades,baggy,jeweled,rejoicing,uomo,ariadne,dickie,quiver,sylvie,frequented,coronet,agnew,discredited,taverns,prodigal,aden,wield,resolute,adage,wetter,jeg,conjure,rote,recitals,adrift,confiscation,stings,budge,ilk,ose,silks,sequins,fringed,goblins,delineate,organist,kneel,illuminations,chuckled,tacitus,armenians,excels,furthest,virulent,masts,garret,commendable,inadequacy,barbaric,deliciously,ruse,persephone,lifelike,culled,muss,presbytery,tumblers,gunshot,desiree,supposing,sculptors,charme,calicut,inde,castilla,zealous,rattlesnake,iridescent,robberies,elms,excelled,twine,meteors,judicious,unaltered,collation,geist,silvio,parke,diction,unoccupied,tigris,pedestals,tribulations,colman,sabina,meilleurs,buckwheat,enshrined,surpasses,yearling,agape,wrenching,damnation,rapidity,bajo,tempus,deleterious,intersecting,garibaldi,alluvial,xxv,incisive,concealing,clutching,drifts,tenement,discernment,chalice,hypocrite,harrowing,prefect,sweetly,cleave,flimsy,strada,delilah,bedded,shivering,formality,produit,mangroves,suffices,bingley,whosoever,comte,tigre,cham,graced,ultimo,statuary,moraine,moravian,intermittently,armaments,grins,chewed,accomplishes,inapplicable,bly,pasha,scour,motionless,notaries,galant,fallow,indictments,aileen,leapt,pelo,widower,quagmire,taffy,purging,cleansed,bem,fainting,theorist,scaring,serviceable,obstructed,indigestion,jackal,snowflakes,massacres,entailed,curative,bier,traitors,igneous,cambio,lull,rinsed,delectable,proletariat,lise,fanciful,bey,mystics,fresher,consummate,brows,technic,veda,ephesus,domesticated,dismayed,steered,remitted,shew,miraculously,lapses,romagna,freemasonry,dwells,penitentiary,shrewd,impatience,italie,crass,spaulding,jot,gott,benevolence,lancelot,suspiciously,eugenia,reprimand,mangled,staunch,shaven,fez,feld,molestation,quarts,yells,lacs,blindfolded,premiers,wraith,nimble,hyacinth,yonge,durst,naturalists,derelict,gle,shrouded,clarissa,brazen,inundated,joie,brahma,anni,veracity,pinocchio,angers,gustavus,raps,unwittingly,counsels,battlefields,antecedent,matty,dorothea,licht,legislate,voluptuous,complacent,germania,grandmothers,dalla,objet,unaccompanied,schooled,picts,foresters,hag,guerre,dorn,ainsi,orinoco,loveless,sharpened,nostrils,cambrian,impure,gridiron,innermost,wry,pilate,pinning,alms,stung,koko,phantoms,retort,congregate,meditative,smirking,chestnuts,expositions,begotten,gainsborough,sparkles,collared,stringed,barnabas,weeding,evasive,smirk,ancora,pausing,grands,replete,inconceivable,antworten,crutches,apportioned,pawnee,accumulates,failings,otra,bristle,classe,terrors,uriah,oblige,visite,panacea,vibrate,penetrates,mayhew,cathedrals,toads,liber,perceives,nubian,stumped,cramp,sodom,imitations,mistletoe,naam,hallowed,appease,hawes,furlong,heralded,linde,clearest,supersede,shovels,renaud,phrasing,quarries,sensibly,vio,mouthed,gills,braids,milder,inexplicable,counterfeiting,expeditious,intently,chrysalis,rechercher,hoary,corse,crocodiles,ronde,eze,zeno,deceiving,oedipus,beamed,scraped,chagrin,vill,tickled,hindrance,discreetly,sparing,emeralds,wanders,disillusioned,preoccupation,stato,restful,aristocratic,scouring,profitably,pinched,purport,plunging,shambles,juillet,marten,admittance,stinking,porridge,symbolize,standstill,unattractive,diffused,firmer,reproduces,promulgation,unshaven,rakes,sante,incognito,silliness,burgh,giggling,coldest,proviso,quando,barnyard,dikes,vento,donal,artifice,dato,glides,allot,witte,vad,progenitor,abomination,erste,mote,argumentation,passively,hurled,vesta,jacky,wold,habe,straightened,deranged,contesting,darwinian,touchy,rafters,unintelligible,whitworth,hinten,infantile,unspeakable,demolish,comforted,disgraceful,worshippers,servitude,aqueduct,framers,streamers,humbled,marcella,radiate,stipulate,proximate,secretions,attains,gallus,idem,hark,perturbed,cemented,dissolves,crowning,bettina,smuggled,punctuated,blunder,euston,zucker,belted,baal,felon,deen,thud,hagar,antlers,doubting,dunkirk,libretto,debatable,reaping,aborigines,estranged,merthyr,ihn,joh,decisively,swims,undeniably,spasm,kom,notables,eminently,snorting,seguro,mercilessly,firs,cobbler,invigorating,heinous,dusky,kultur,esso,linnaeus,infallible,loaves,dieu,heeled,quibble,meandering,incessant,baines,blick,namen,cheery,curbing,harshly,betterment,rump,oben,sweethearts,slush,mutton,coi,blinked,altri,lenore,townshend,zigzag,lesen,dragoon,sympathies,leggings,benefactor,thales,nacht,merrily,vouch,pompey,blackness,transitory,gales,hypocrites,larynx,droughts,ancona,springing,bethune,nocturne,perdue,altruism,ceasing,dutchman,capricious,angelique,harmonize,crescendo,gipsy,frederik,miserables,amalgamated,obeying,gunners,pent,mishaps,subsidence,plastering,promiscuous,asturias,basso,dusted,sago,inlets,fords,pekka,parentage,mutter,litters,brothel,rive,shelled,outlandish,sneezing,sancho,variegated,abysmal,personnes,bourse,tenacity,partir,moslem,fourths,revolutionized,permanence,coincident,inez,minding,permis,enviable,accessions,carpeted,zeke,eloquently,overtaken,hock,subheading,renews,extinguish,oli,lowing,bullied,accruing,dirge,actuated,bluish,tingle,captivated,parlors,lamented,bruise,cesare,perfumed,dames,unfettered,imogen,lewd,thither,rebuke,collated,occasioned,swayed,dupe,bogs,affording,assuredly,allusions,shadowed,seamen,intelligible,overlaid,censors,shakespearean,edict,octavia,boyhood,sustenance,shrew,freya,disrespectful,confounding,dispensation,arian,depreciated,diagonally,cased,laterally,prays,nonce,lemme,elevating,augustin,beresford,loup,likened,bericht,sketched,plage,firmness,injustices,longfellow,unequivocally,perspiration,mirth,serre,pauper,brooms,horus,casi,fois,ushered,remedied,vocations,depuis,scorched,instep,wilfrid,machiavelli,ivor,mignon,houseboat,krieg,clementine,smokeless,stanhope,thorax,recherches,warship,corinthian,rattles,esti,garten,dislocated,marvels,booby,conceivably,persians,injunctions,crunching,exuberant,dus,composure,contradicted,birthright,errant,proofread,rearranged,heifer,earthen,uplands,paget,portcullis,noose,recur,desirous,exemplar,shivers,smitten,rarest,quiero,averted,publique,dissipated,gregorio,masquerading,discernible,looser,ptolemy,lauded,pais,consonants,demarcation,miocene,steeple,concussion,nailing,deadliest,sparingly,penance,priestly,curtailed,lovejoy,rollo,conspicuously,risked,bowled,modernized,blemishes,eagerness,pearly,recklessly,islets,apothecary,gagne,looted,padua,jointed,heyday,voce,pulsating,beaming,dore,taint,lounging,predisposition,outwardly,tumultuous,overseer,chine,crier,decompose,unimaginable,briton,glistening,moonshine,jurgen,leurs,scribble,anselm,fete,puerta,peculiarities,lichtenstein,favourably,beset,romain,involuntarily,swede,discoverer,livers,plowing,militarism,glassy,riddled,wealthiest,shrill,swedes,headland,agitator,utensil,volk,sheba,glows,heighten,surpassing,ladle,pasa,pinks,rusted,naturalistic,dogmatic,tristram,ballon,surly,presente,sonne,fertilized,admirer,seco,gibt,motioned,catastrophes,thickened,indra,candor,sabin,wigwam,animales,beheaded,postmark,helga,bereaved,malin,drugged,motte,volga,rivalries,gnomes,denne,affectionately,uneducated,necessitates,blunders,proportionately,corea,porque,mocked,holler,fain,hae,sint,darrin,mois,cruelly,tapioca,furrow,fewest,parables,drowsy,bushel,beholder,sedition,lutherans,examen,ghastly,vaudeville,succumb,criticise,inquisitive,doorways,sirs,overruled,menagerie,osgood,teamsters,seul,forked,apprehensive,cowards,cielo,cowl,captors,fils,laity,prefixed,arming,amassed,itinerant,felons,dormitories,dearth,palatable,unmasked,instinctive,corpo,sais,restlessness,baptised,burlesque,regaining,perversion,swells,sujet,acquaint,tog,altro,havelock,lengthening,taut,laa,romulus,sommers,doings,financiers,foolishness,unequivocal,noire,arriba,silken,stringing,bazar,thrusting,pavilions,maddy,clung,hie,bist,needlessly,squatting,cordially,wilkie,succumbed,superstitions,spangled,rectory,alli,multum,iliad,graze,looped,unobtrusive,judea,currant,underlies,intricacies,afoot,oddity,gerrit,cornered,auspicious,splashing,hotly,puffed,disapproved,interlaced,instalments,presumptive,comprehensible,tempore,fallacies,theodor,sawdust,metaphorical,leaped,alertness,embers,assemblages,searchlight,heil,swinton,ize,snob,stave,vertu,snowing,bleeds,canaries,semblance,shins,fickle,outnumbered,recht,lukewarm,quai,rotunda,observances,faintly,indiscriminate,alphonse,piu,raison,eyeballs,barricades,devoting,idolatry,decked,introspective,aggravation,sedge,nou,pinching,tine,pretenders,infidels,dweller,diabolic,demonstrable,letzte,priestess,nimrod,irritate,siguiente,beards,churchyard,despicable,canter,reminiscences,racy,stoop,intr,rendu,facile,christiana,coerced,billets,sneeze,sian,dignitaries,somber,overgrown,statesmen,vecchio,advices,coffers,sikhs,awry,celt,lode,elia,zora,rages,clumps,tithe,subordination,fictions,deposed,trending,disinterested,forsake,conspirators,swinburne,unresponsive,baboon,romani,swamped,ensues,habla,seit,elated,buttered,sangre,selfe,stuffy,depress,eccentricity,transgression,idealized,clings,flamboyant,memoria,nachricht,macht,toma,clergyman,sociales,scape,francia,pledging,dependants,rechte,puddings,partisans,mausoleum,idler,dawned,generale,carelessly,narcissus,crusoe,einfach,skimming,stomachs,namesake,slaps,maximilian,gratuity,reorganize,foothold,reggio,usted,madge,gleam,rudyard,supposition,sprinkling,besieged,malaise,draperies,newby,rococo,brabant,superlative,presser,chamois,dwt,voy,seared,tinged,professorship,diamant,leeward,fruitless,tamer,ticklish,alienate,displeasure,connoisseurs,mutilated,usefully,instituting,balzac,moyen,threefold,innocently,deepened,clef,dak,pura,regarder,trice,pretense,jungles,imitating,shreds,petitioned,thad,archway,danse,loudest,ultimatum,shuffled,moy,shelling,visita,zeitung,observant,unhappiness,cinder,pelt,ung,laurels,methodical,engulfed,bequests,monotonous,pythagoras,operatic,malevolent,lessened,stile,reciting,naught,antagonism,prisms,debby,coinage,unproductive,banqueting,nefarious,stoppage,defray,endangering,zealots,weighty,oeuvre,subsided,sahib,gasping,idiocy,frenzied,postulate,senor,trespassing,pendent,edifice,vermin,loosening,dialectic,tantalizing,rhinoceros,adjutant,otro,sickening,pondered,teil,snows,steeper,rangoon,depriving,stalwart,verandah,schreiben,buttery,deformity,cronies,undervalued,invalidity,soundly,dank,pinkerton,canvases,weakens,paulus,ebcdic,politik,lariat,pursuance,scapegoat,anathema,comptes,trifle,forefathers,piraeus,xxvi,eradicated,toga,fram,inadmissible,strasburg,berths,innocuous,heroines,retake,unpacked,gonzalo,clenched,groupes,evaporate,midwinter,compagnie,bellini,undoing,communes,cassava,disappointments,glace,puns,hilt,devoured,inwardly,adeline,smothered,eulogy,siva,lond,forsythe,pernicious,fenster,continua,babbitt,reims,scrimmage,privates,whims,hew,skirmish,roan,nonsensical,gallows,rheumatism,devotee,nieuw,cowardice,fabled,fangs,animosity,wily,wiles,ensue,jaffa,sagging,chemin,crumbled,sybil,pekin,defied,hopelessness,errand,yeoman,slimy,unser,coerce,overhang,ihren,jeunes,sobbing,muslin,deliberative,gute,tattooing,shekels,emigrant,dodo,jahr,thorny,epistles,trampled,anthracite,meditating,merciless,clump,transcribe,atrocity,elinor,proportionally,untrained,beene,thrusts,tiresome,splashed,antonyms,lune,moccasins,parthenon,abounds,salutes,collided,tilde,potash,boarders,lapping,chivalry,corazon,frustrate,sideboard,poaching,montmartre,foiled,flocked,connaught,tether,hyperbole,borghese,schrieb,brahman,charlemagne,pulsing,heralds,sterility,dynasties,prowl,amiable,akt,sittings,undulating,thatched,felice,esto,irrevocably,bunyan,hinders,tubers,unrelenting,expeditiously,antiquated,jerked,sputtering,opulent,mots,dimly,coconuts,confuses,executors,squall,nothingness,hebrides,demeter,antagonistic,bowery,immovable,caterpillars,consigned,rhein,fervor,pret,scooped,exerts,idling,cursory,dissipate,hymen,refuted,ionian,americanism,pessimism,vehemently,velvety,vedere,wheezing,teeming,paradoxes,lampe,foolishly,ordre,eer,inanimate,panting,comers,romaine,wulf,peckham,tacks,veille,effusion,lunacy,loathe,notoriety,showered,brats,huddle,taxicab,confounded,coughs,pretends,faery,eloise,widens,omnipotent,gautier,poise,zeeland,ringed,cima,huddled,unsteady,zwischen,duchy,malacca,wol,magda,carrion,summarily,heine,voi,ejaculations,leopards,dette,sanctified,tradesmen,excitedly,pentru,braced,gaunt,nourished,cornstarch,doch,effie,daffodils,lettre,boden,pollute,bara,kamen,neuer,pomp,noms,stora,sprouting,summoning,annabel,tartar,brownish,rejoin,rosettes,etats,volition,crawls,suave,riddance,gulp,lottie,hac,lurk,smudge,tulle,helplessness,circumstantial,dermot,naturalism,haga,colle,galloping,indestructible,principality,indulging,allusion,bosh,samaria,smeared,gouvernement,liqueurs,winifred,parasol,coloration,stingy,succinctly,devotes,manet,anos,vigour,snares,schnell,illegible,mortars,didst,curiosities,wither,schloss,seamed,calmed,flattered,babbling,roch,admirably,vipers,nightfall,nul,manos,hurl,loyalists,dory,sheltering,forego,castile,klasse,blockquote,tyrol,irreparable,immunities,broiled,superstitious,evangelists,insides,sedative,defraud,toothed,bygone,wilds,intercession,complet,lettered,mirada,paa,apricots,darkening,depressions,mache,toasting,exhale,markt,altars,abolishing,chauncey,recesses,kinsman,payed,overworked,cecile,orbs,aime,mutable,delicacies,toujours,scorching,coffins,jove,cashed,ushers,jewry,copperfield,chapelle,whoop,cacao,andra,annoys,heiress,godhead,canvassing,portia,shyness,angelus,subjecting,momento,escorte,unsightly,frayed,criminality,woolen,repos,levelling,shrapnel,arthurian,burgos,litany,fairest,nutter,bristles,larder,ganges,machen,truthfulness,atrocious,obelisk,valeria,claret,fru,samos,consecration,forbearance,acerca,plastered,apostrophe,stepmother,ruf,lapland,publius,ihnen,jesuits,voluminous,mottled,plu,tosses,manifesting,estella,publics,rien,normandie,scrip,rocher,inadequately,arabella,matti,throng,flemming,shunned,abandons,appetites,turnip,juxtaposition,crushes,carnivorous,berber,mince,banish,flapping,fino,frets,schism,sculptured,suivant,jemima,heretics,dogged,apparition,barristers,scrutinized,earthworks,thrashing,salome,thumping,vara,quenching,hunch,amaryllis,messes,perdition,wintering,topple,chickasaw,pungent,discontinuance,unbridled,astrologer,dut,canvass,manifestly,emphatic,susy,outgrowth,homeward,withered,baiting,surrendering,fortification,mingo,spurt,elation,wail,artistically,elma,epileptic,crag,hace,feller,enmity,sanctum,mazes,jenks,schutz,materialistic,boaz,jahre,gud,oncoming,racked,cloister,provincia,fancied,spoilt,predisposed,hydrochloric,filippo,strode,agen,marchand,disorganized,shaftesbury,littoral,denn,aggressor,giggled,consummation,fronting,zola,heute,unfaithful,executioner,titular,swears,diminutive,paring,damning,matrimony,armas,humbug,signalled,granulated,ailment,homely,perpetuity,stepfather,disprove,dinero,bernhardt,incurable,dixit,shoving,furnishes,anointing,corinna,strictest,domiciled,minx,eclipses,prise,misdemeanors,hadrian,supremely,mensch,hastened,perpetuating,prostrate,provisionally,cocked,raged,boyne,singularly,elam,gobble,preposterous,symbolized,breech,ripening,pyramidal,shee,choruses,obstructing,phosphoric,parquet,vint,pasquale,reparation,amply,damask,rejoined,impotent,spits,papacy,thimble,lacquered,ablaze,simmering,nettie,grasshoppers,senatorial,thawed,unexplored,transpired,toulon,fortifications,dens,loafer,quin,insurmountable,prettier,peu,haystack,komen,chaque,confining,louvain,etchings,impenetrable,gymnastic,tink,purr,duped,stifling,realises,vindicated,bund,invades,oust,suo,dipper,signified,talkers,exemplify,inane,byways,ibsen,justus,bluntly,bask,mermaids,contemplates,inglis,defensible,spinster,goblets,interrogated,yolks,famille,dello,magdeburg,tarnished,deducting,fie,brimming,ridiculed,baie,ionia,olden,herne,unending,abominable,rattled,basse,farmhouses,tambourine,venomous,impressively,inextricably,etexts,tapering,prinz,unjustly,rehearse,apertures,seducing,screeching,reedy,ceded,sido,imbued,fearsome,bureaux,sleds,christendom,biographer,wreak,planta,bridegroom,swarming,hava,accomplice,vivre,moni,mui,ili,servi,irregularity,gash,impeded,gravestone,pompous,sunt,subvert,hanno,instrumentality,barnaby,antwort,impassioned,mous,esau,desperado,flavoring,mouton,bau,contagion,archimedes,desecration,pocketbook,anselmo,misinterpreted,garlands,varma,mongol,audacious,midshipmen,degrades,maggiore,protestantism,soreness,boldness,schip,inhalt,otras,cassius,powdery,exportation,diverge,loosened,misunderstand,virility,inalienable,norden,untamed,eben,viel,xxviii,meddling,objecting,gib,shoddy,salutation,altercation,octagonal,mended,navigators,notches,odysseus,unfavourable,abject,heretical,riveted,quiescent,strangeness,rideau,tincture,erecting,tenderer,wirtschaft,lucian,jaar,persevere,fittest,tarnish,isthmus,giuliano,wordt,hildebrand,feu,treads,lengthen,bahn,prodigious,spoonful,sociable,requisitions,deftly,raucous,toasts,exaggerate,odes,blushed,saddest,grinds,immorality,addington,marcellus,ciencia,wench,celle,spontaneity,illusory,sympathize,faggot,barrows,tantamount,slaughtering,dissected,borrows,frigid,hemispheres,woollen,musick,speculating,pawns,outermost,selwyn,westphalia,augmenting,winded,poder,methinks,rambles,namur,tyme,dawning,lait,klang,congratulating,sempre,flagrant,wane,loins,uneventful,quis,scoundrels,distraught,assassinate,unwavering,confidentially,piecemeal,soll,inferiority,burnished,clothe,swelled,vides,breda,gentleness,staked,rigidly,simile,phalanx,hindering,sloped,sifting,fixe,isobel,loudness,guillotine,reverting,dionysus,leanings,groans,herbst,canker,keener,embellishment,confesses,mistresses,breakwater,smuggler,busily,poached,aram,shopkeeper,hailing,imparted,traduction,contradicting,headlong,captor,indelible,tethered,whiteness,grazed,unfulfilled,acquittal,meilleur,fluently,ascribe,stalked,deluded,trembled,gens,doon,unobserved,labored,tete,twitching,smacks,silber,troughs,unbelievers,hungerford,brothels,skilful,werk,basta,bolder,omits,endures,heeft,silencio,laski,selle,pueden,impersonation,hote,lavinia,intents,unconnected,ovum,pruned,wedded,lashed,valladolid,contentions,bickering,whaler,unobstructed,menschen,fondling,cref,laissez,ricks,spenser,astounded,permanency,smacked,personen,pallas,anatole,sleet,disgraced,philippa,royaume,grooved,resigning,appareil,alcove,termine,ungodly,felling,landes,hout,ois,disclaimed,aucun,upp,appartement,couleur,montagu,steamship,condescending,recounting,breeches,appellation,mitglied,abbe,montes,exemple,handsomely,fille,segovia,untenable,messer,deformities,necktie,huis,xxvii,tardy,disregarding,matron,seaward,uppermost,adolphus,ciphers,nibble,heim,volver,exerting,fenn,fleeces,industrious,foie,decayed,proprietorship,essere,allgemeine,umsonst,harps,hedged,cleanest,selon,teutonic,viceroy,maintenant,ingrained,caspar,swordsman,commissary,yellows,habitually,naman,maxime,majorities,rendus,mummies,conquests,brimstone,quand,trowel,tyndall,profiting,beseech,hitched,mucha,mair,smelt,fatale,margery,yearn,mismo,culprits,trinkets,whig,enchant,austere,earths,selbst,storehouse,cowhide,plumage,antecedents,diabolical,tugs,rapier,unspoiled,haughty,relinquished,assaulting,admirals,cosi,meisjes,esmeralda,captivate,terug,deterred,agostino,apathetic,uninteresting,lyre,yawning,centralization,prunes,buller,cossacks,attuned,herons,raiding,deft,seething,carne,jardins,alligators,instigated,superstructure,husk,grandiose,clerkship,concisely,sah,scepticism,quatre,constancy,plats,countryman,insufficiently,reappear,boudoir,affinities,glades,crutch,rioting,espoused,mamie,frisch,discursive,disputing,unpaved,lieber,repudiation,clarice,dimples,inhabitant,flourishes,colonized,hessian,feder,ardour,hing,erat,arbeit,levant,imitators,talkative,phonograph,speculators,sty,quelques,smelting,cuss,slats,transcribing,manoeuvre,offends,lumpy,landlocked,embattled,wisest,giulio,zin,diminution,ging,rencontres,southernmost,freckles,civilised,airship,galls,ammon,imitated,inflicting,inducement,heave,cud,gegen,proclamations,rarer,slowness,wrongfully,lessening,aurelius,pout,cognate,mire,sufferer,mores,raindrops,elegy,sanctification,sanded,indignant,godless,sloop,politeness,baffling,hurriedly,characterise,purporting,passo,taunt,ick,hinting,schoolboy,bailiff,outpouring,deflected,inflection,lettres,myrrh,infuse,chaff,defaced,mimicking,counseled,showy,altruistic,aldermen,commends,moorish,etre,bobbing,defiantly,colonels,posible,bli,cualquier,pathos,battleships,smartly,laments,spied,playthings,argumentative,roused,aloof,snore,charred,industria,hij,ihrer,dunstan,bolshevik,unsound,hatter,creepers,recreations,profusely,intelligences,sorrel,reverie,colloquial,callous,oom,perplexing,splashes,homesick,gainer,ochre,dois,bystander,quell,repulsion,capitan,balk,imagines,softens,harnessed,exuberance,flocking,unnumbered,outbursts,undying,stubble,bande,amie,envie,tle,quivering,ete,euery,wein,sark,commending,sofort,flattery,soothes,millstone,mortgaged,impossibly,giorno,compels,succes,drunkenness,indulged,habitable,spn,subtleties,ministre,trappings,afterthought,damsel,euphrates,schoen,decorum,hommes,spoiling,yellowing,robs,giselle,earthenware,incendiary,selina,lenient,dined,idly,freda,devilish,aristocrat,scathing,twinkling,nichts,pantomime,familie,wanderings,decimated,overthrown,moored,peered,bores,regrettable,strangled,maxims,cama,engrossing,fere,jezebel,lethargy,komm,frolic,painstaking,goths,finality,toppled,ewes,mending,wrestled,hurtful,alternation,receding,gast,laban,neuen,paix,candelabra,outposts,treading,hedwig,downy,conformed,characteristically,canadien,goldsmiths,swarms,geographers,somos,evolutions,escorting,irregularly,oratory,sharpest,palisade,moccasin,circumcised,growled,auxiliaries,benefactors,terse,insistent,peppered,sterne,avez,utile,frightful,trite,gentler,vex,dilapidated,mien,avance,wollen,dela,stubby,sixpence,hoch,visto,impaled,forays,charon,flanks,pavia,curbed,efficacious,philanthropist,thaddeus,convinces,rede,minder,orator,abet,dien,ropa,sence,steppe,plowed,sires,transgressions,lingers,smothering,encampment,roque,prophesy,recast,misrepresentations,bards,bestial,neuf,buddhas,oozing,vicenza,richelieu,curd,bookish,subdue,raking,denouncing,ascertaining,stags,vittoria,soldered,privateer,milly,vicarious,traverses,seedy,imbedded,elysium,quenched,antithesis,envoyer,awakens,accentuate,squandered,sortie,withal,eyelashes,colliers,minuten,tilden,asti,blindfold,rampart,possessive,feldspar,facades,idealist,constables,mourns,solidified,cura,conceit,needful,locusts,thatch,cappadocia,weathers,grunts,thicket,zou,depraved,continence,treatises,renseignements,sauvage,prying,rascals,voyageurs,rudely,weeps,deplorable,smacking,aggravate,quoth,snowstorm,lacuna,chambres,rawson,levelled,incessantly,toit,apres,flaring,neues,langton,testa,lye,ditty,pestilence,rapide,thoroughfare,skiff,belligerent,impeached,hight,eclipsed,conspired,catacombs,agonizing,bottomless,sows,attributing,londoners,faut,sardis,excruciating,punctual,runaways,boniface,grafted,watercourse,propped,beaton,telegrams,staking,conversing,acetylene,calamities,viennese,fancies,accuser,bystanders,minos,ganymede,enjoined,animating,mercurial,bargained,repugnant,citron,clave,pageants,grosses,tacked,zeigen,supplant,slates,prue,corroborated,andros,tipsy,tabac,recognisable,neuralgia,timbre,clasped,pecking,womanhood,crimean,exorbitant,tish,grieved,experimenter,tallies,serpents,tampered,severally,bedstead,acquis,bostonian,whirlpools,sotto,caressing,reliefs,tassels,culpa,whiter,froth,obliterated,regalia,peerage,deceitful,storied,unprofitable,doublet,astonishingly,dein,cannibalism,menos,mera,pretender,mosses,subside,burney,conspiring,nostra,retaliate,deafening,beleaguered,jarring,baptismal,magdalen,brackish,direkt,vse,tinsel,edel,scrutinize,adverb,mumbled,commis,yams,breve,mut,worthiness,lazily,disarming,ween,woefully,kaj,promontory,eres,paye,smote,taunting,etruscan,outwards,rend,hezekiah,depravity,wealthier,onda,scientifique,disagreeable,drei,castes,corrupting,massif,murat,kine,lus,overtures,pharaohs,fraudulently,plunges,gibberish,cela,tammany,boulevards,redistributing,darken,dowry,chateaux,quam,skirting,adieu,kindling,affluence,passable,shouldered,hilarity,fulfils,predominance,mitten,conquerors,thar,admonition,ferdinando,perchance,rots,demetrius,precocious,rood,sachsen,luzon,moravia,byzantium,gaf,altre,repress,domini,loro,moiety,steeply,darned,locum,denser,moorland,coincidences,divinely,skimmed,lassie,congratulation,seminaries,hotchkiss,trotting,ambushed,combing,travesty,bewildering,hunchback,aback,deepens,griff,enactments,scaly,heaped,fantastically,cobham,oracles,untied,quince,lage,profusion,conjectures,glint,incitement,hansel,figuratively,sorceress,stoic,fatigued,unconsciousness,quarto,improvise,incipient,avalanches,cheval,crackling,creeds,thro,outrun,extenuating,blackberries,amiss,cavernous,snodgrass,darlings,reprieve,shanty,rapping,proffered,rowena,livid,distasteful,distinctively,luft,hares,overturning,attestation,bravado,overpowering,ravings,childless,voix,grecian,proportioned,lavishly,smite,forthright,kritik,foretold,dado,engraver,saddled,tortures,crusts,vamos,loge,presupposes,trickery,adherent,fragen,populi,astrologers,wuz,vindication,opined,falter,chatty,auvergne,philistines,retainers,tener,cherbourg,imperfection,sorrowful,unchanging,predominate,wodehouse,molested,titres,hyena,wedlock,erstwhile,vist,obtuse,caudal,sternly,chanted,jonson,klug,savour,stabs,indecency,lingered,elke,feasting,suffocation,softest,sniffed,lurks,tenses,lawlessness,recollect,alors,projectiles,heures,larch,interrogatories,dess,whet,impatiently,suspecting,dessous,aline,disjointed,seizes,reine,triomphe,thebes,doer,pandemonium,lege,ravished,discerned,seulement,icicles,fanaticism,flamed,godsend,rubbers,eder,anderen,rehearsed,alix,outrageously,bagdad,petticoat,inhabiting,unrestrained,injures,botha,pigtail,appraising,enthralled,strays,embroiled,toussaint,armistice,ellery,damped,southerners,fissures,clinched,forlorn,apologetic,absolution,inordinate,burdett,clank,individualistic,conseils,marts,obra,artemisia,evermore,engendered,manchu,disconcerting,priestley,appropriating,shinto,attentions,regno,gawd,inhaling,calmer,passers,fluttering,irishman,brier,phoenician,hundredth,firstborn,coves,armes,betraying,screech,fetches,paltry,carelessness,threes,broadside,importante,doers,sods,technicalities,thais,groaning,beckons,rejoiced,quickness,jeunesse,onze,entertains,turban,freie,ruffles,infatuation,gaiters,meisje,geben,nulla,plutarch,curving,misrepresent,tankard,xxxix,amorous,kurz,overflowed,jesu,weaned,armchairs,appartements,vagueness,grumble,wronged,politiques,fireflies,hoisting,falsified,dialectical,schatz,labours,espagne,flatly,harsher,inciting,malleable,indecision,unselfish,shem,starke,alight,epochs,nosotros,genial,langues,revolved,ifad,snowed,cachet,fortify,cherubs,armature,implicate,tolling,provisioned,sista,syriac,dived,baffles,infamy,dapper,belfry,elysian,odious,rehearsing,ellipsis,outhouse,romanesque,gobierno,vanquish,imparts,sobs,laudable,thawing,tienen,writs,omnipresent,gesundheit,hovered,devouring,renunciation,stunted,munching,fumbling,purl,lasse,banal,rears,portico,excites,placard,quartermaster,peculiarly,placards,transposed,ganga,thrace,waistcoat,vier,perusal,petrus,childlike,shamelessly,saison,tomo,cloaked,lichens,brotherly,uninhabited,sawn,unbelief,overtaking,transference,arjuna,pliable,mantua,sardines,dictating,studien,crystallized,reprisal,blighted,kunz,dissect,rumbling,perceptible,blazes,encircled,odette,saxons,transcending,snout,goodly,philosophically,directeur,bigot,bramble,persisting,bouillon,scribbled,celibacy,beaucoup,tooting,gruppe,displeased,portant,lather,falstaff,unchallenged,strayed,commutation,spiritualism,gracia,omnia,engender,fini,jurists,cloaks,streaked,downe,chieftains,garrick,perches,scrapes,silhouetted,crouched,juana,gradation,tole,unanimity,radnor,tycho,impeding,reino,grisly,fornication,contro,sassafras,heure,tramps,assis,blossoming,barbary,irate,partisanship,wean,omelet,suh,sheaf,folios,iban,dictum,refutation,posthumous,inclinations,ledges,wenig,muchas,enlisting,roars,swindle,revolting,candied,plaine,macedon,dingy,bons,frieze,staircases,horas,multiplies,impressing,twirling,lachlan,entwicklung,sergeants,overcoat,shak,tyrannical,infinitesimal,scharf,spouting,origine,humbling,truer,limes,katharina,martians,sullen,machin,prolonging,battering,superficially,upstart,ihm,imps,divulged,shrunken,quays,reprehensible,provokes,distancia,dedicating,confessing,forbade,incursions,viele,pieced,arching,bett,gloriously,gourds,worsted,nevermore,sanguine,acorns,slung,rowers,shockingly,viaje,vagrant,empties,bight,entra,fells,morgen,lors,dormer,geht,ahab,prolongation,uprooted,talons,germaine,dualism,intrigues,cannibals,pounce,marchant,vedas,panier,mouthfuls,instilled,calyx,valour,litle,mightily,cuzco,unwieldy,perpetuated,steht,exaggerating,smoldering,peuvent,snub,coarsely,voz,withstanding,thickens,hissing,crumpled,topmost,intrude,behest,pitkin,snatching,resto,charmer,escapades,haphazard,infirm,pontiff,menage,preaches,varios,growling,indescribable,arraignment,eugen,kentish,napping,sabatini,toppling,sten,astley,bouton,excellently,ier,pails,burly,derecho,formule,hillsides,segunda,xxix,contenu,divest,mange,unfairness,abated,sohn,tiniest,mowed,sano,overhauled,caskets,lecteur,congenial,lut,fervently,sprained,harlot,ravages,choix,superhuman,conclave,humanly,altura,livia,causa,dentro,magnificence,sacramental,peddler,eterna,mystere,fayre,glared,adverbs,donc,ugliness,constantia,shavings,lusts,nunca,helplessly,quintessence,throes,malabar,crowbar,blots,nettles,scud,raked,cruised,stupidly,lashing,gaudy,merriman,swoon,buckskin,kommt,recluse,displacing,neapolitan,blacker,haarlem,quel,aspires,telegraphic,quali,frescoes,patted,puritans,gentlewoman,somme,meinen,nouveaux,victors,revels,droves,slur,laetitia,eisen,phrased,puddles,nobleman,kort,assailant,luxuriously,flatness,pardons,debauchery,wij,extravagance,buttress,entrada,junge,rigors,foregone,stellung,overjoyed,bourgogne,newhaven,apologists,fut,allemagne,vind,waddington,refilled,whiff,burrowing,strolled,estos,regen,encrusted,clashed,harpoon,sombre,machinations,hearse,libertad,roamed,approbation,nen,wut,calmness,confound,lengthwise,fatter,abstained,chasse,christen,comparaison,valeur,senile,cobwebs,tusk,hellish,conquers,iglesia,preceptor,claro,ugliest,ungrateful,renounced,clashing,decomposing,sauter,sain,postponing,israelite,graver,flees,torrid,absalom,preconceived,zug,engrave,dishonor,hoarding,bauxite,barrack,compatriots,stereotyped,conscription,maken,philosophie,minna,tradesman,embodying,unscathed,moslems,courageously,snugly,tarry,fevers,interrogate,eocene,muddled,sklaven,leonora,militaire,subjection,punctuality,hoarse,misfortunes,vexed,delos,vanquished,ibi,inquisitor,floored,inheriting,historique,plied,beaters,twang,ombre,conceiving,syrians,mij,indivisible,poetical,stagger,crusted,heraldic,belli,maladies,adjudged,adolphe,fou,wissen,turrets,pression,efter,calms,misgivings,presumes,juggler,obeys,stifled,preposition,vestibule,heer,mournful,ameliorate,scheming,disarmed,baseless,voile,picturing,dismemberment,quartered,agrippa,lioness,appendages,feverish,pavillon,couleurs,neglects,suckling,scythe,heaving,homily,pensive,lado,fum,upshot,sifted,felder,fuerte,boisterous,sate,alleviated,outbuildings,icj,decanters,elevates,poitiers,goed,ferment,bounties,incursion,aurelia,thinned,consternation,hoisted,aeroplanes,auteurs,antigone,chirp,dimmed,yore,scurry,growths,thoth,halve,conversant,torpedoes,sovereigns,unencumbered,eliciting,tamed,fiends,farmyard,condense,garbled,tallow,unforgiving,immobile,indisputable,unkind,prismatic,aunty,paucity,expediency,frisian,lieutenants,philology,prophesied,backwoods,pheasants,slouch,amulets,cargoes,accentuated,eddies,kategorien,disobey,literatur,bandy,watercourses,amicable,prospered,savoury,colloquy,retorted,fiftieth,joyfully,onder,offensively,plausibility,magnate,pillage,vengeful,lunatics,satis,nol,edom,impracticable,misdirected,weer,surrenders,manchuria,playfully,barony,leyden,gruff,snatches,buxom,deciphering,botanist,deine,timidity,musty,silences,guineas,hebben,ministering,strangle,swerve,proscribed,chattering,esser,franconia,dominions,plateaus,berthold,spaniard,plummet,transplanting,onlookers,wissenschaft,phebe,easiness,trepidation,squatters,plantain,pepys,frailty,neutralized,tangier,ismael,guten,bateau,mourners,twos,passageway,reestablish,fondo,parsonage,quien,sulphide,outcasts,mortally,oot,agni,carbonic,unassuming,disillusionment,nouvel,knead,wilful,gaol,erudite,appreciably,equalize,prepositions,petits,tarn,endeavoured,enl,attentively,interred,indiscriminately,encumbered,herodotus,favouring,neutrals,conspire,recompense,colonnade,unde,eustace,abides,yuh,damen,seus,strove,ogni,dissenters,imparting,apologizing,coups,verdant,secrete,libris,twirl,noo,beadle,denizens,cockney,guppy,leeches,convoys,manoeuvres,shapely,rooks,shuddered,stelle,ornamentation,lynching,sommes,perdido,dictatorial,uncomfortably,defenseless,glean,amory,ander,edad,gratified,participle,schlegel,watchmen,galleon,travaux,eten,enim,chafing,betrays,assyria,inwards,corsican,libertine,immeasurable,esthetic,testator,distaste,offshoot,smithson,resolutely,friendliest,uttering,jacobus,construe,algemeen,mourned,despotism,flotilla,fragmentary,anjou,omniscient,gladness,frisky,generalities,condolence,siddhartha,brightening,inimitable,ineffectual,armorial,poppa,thickly,blossomed,cistern,tableaux,latins,phaeton,fecundity,malle,caliph,dysentery,soir,grenier,funnels,pasty,cuffed,peau,tumult,defoe,curate,donned,wilks,allegorical,monotony,reve,ohr,lucile,amazons,manon,unabated,plante,curzon,wohl,marksman,philosophic,denna,troubadour,volgende,truest,hypnotized,voitures,rudeness,felled,alleen,tinned,concoction,flay,patter,seinen,tortoises,roxana,pli,crone,possessor,wintry,gode,admonished,wickedly,laver,shamed,eluded,incriminating,unsealed,misinformed,tambien,journeyed,presenta,sett,magnificently,unpunished,albatros,apostasy,bereft,lucretia,hibernian,vitriol,vicarage,vestry,gleefully,mercies,paralleled,entwined,fosse,taille,resplendent,thrall,barked,cormac,sju,unum,scorned,relapsed,thicken,sanaa,ceci,selene,artfully,pilgrimages,fides,blazed,edda,wheelbarrow,maimed,chor,dernier,duda,pater,meno,mused,jamais,puffing,besten,wielded,futurity,quicksand,trestle,souffle,rebus,proces,sentinels,pardoned,wormwood,sighing,harz,awed,shrank,conceals,glycerine,staub,abolitionist,foamy,aventure,meunier,unpainted,knolls,unwell,unconscionable,wedged,outgrown,evading,commemorated,lurid,annunciation,rumoured,idee,coalesce,brougham,windings,strongholds,burglars,shrimps,stirrup,seria,creo,dictionnaire,finde,flopped,elbe,whitewash,subservient,suivante,stubbornly,benediction,disobedient,seamstress,immortals,euripides,uninitiated,mikko,mond,zwart,briskly,afflictions,buon,zon,weariness,ascendancy,affront,telephoned,treasuries,energetically,tinge,fingal,defection,murmurs,slog,gav,dispersing,tractable,lapped,syl,petitioning,clawed,einmal,winsome,presuming,englishmen,equaled,flog,notte,deferring,quills,oud,practises,unattainable,lengthened,dramatist,grayish,hallucination,exhortation,arousing,hippopotamus,wile,forgeries,chartres,recline,maitre,remembrances,disturbs,chums,determinate,heeded,telephoning,sophocles,humiliate,erfurt,wasser,tomes,ingen,accompaniments,clairvoyant,shriek,ferocity,quoi,withering,procreation,xxxi,exasperated,eerste,groping,soule,pinnacles,miser,scaffolds,reprisals,culpable,unserer,asunder,qualms,unharmed,sheaves,tritt,godmother,impresses,lidia,plusieurs,buttoned,sprouted,armoury,marshalling,longue,omelette,disintegrated,forgetfulness,muerte,stilts,samaritans,knocker,underfoot,roofed,jinn,nunc,primeval,sakes,horsemanship,aviators,destinies,jure,sherbet,nutritive,hurrying,helden,tepid,opportune,intuitions,dissuade,hemmed,personified,cornice,smock,musket,beautify,tannery,sooty,buckled,purveyor,kindled,provencal,schein,stairways,methodists,bourg,pretence,questioner,repute,nakedness,scabbard,covet,debe,rippling,mony,nelle,rationalism,wistful,admires,hissed,overpowered,pervades,mele,tirade,elucidation,prongs,fumbled,acte,confided,mumbling,abstaining,giotto,punkte,lancers,heimlich,waren,confederates,stretchers,demosthenes,warum,avait,devonian,infinitum,justo,antti,ointments,tugging,opulence,appomattox,bentham,coursing,beschreibung,patrician,zacharias,melodramatic,effet,inexperience,palabras,aantal,rime,casement,kalle,serially,gefunden,apprised,thoughtless,comparer,goad,parle,muddle,levites,christus,blasphemous,unaided,candidature,clapped,fatherland,evergreens,recede,dears,willkommen,spry,objets,toki,maggots,calor,hominem,tints,waver,handkerchiefs,punishes,salut,acquiescence,disaffected,manors,chronicled,laure,inundation,earshot,omens,brule,transfiguration,punctured,coughed,repaying,filial,mocks,niggers,refrained,shallower,durer,patriarchs,respectability,commode,overbearing,townspeople,adoring,trodden,reaped,bequeathed,grumbling,elude,decently,metaphorically,tripe,glitters,ahmet,austerity,mitte,informe,enjoin,dazu,boyish,egotistical,neared,claes,rostov,diverging,estoy,uninvited,irkutsk,trappers,aniline,tuk,spilt,forgetful,conceding,brightened,inconveniences,maun,rigour,evinced,uneasiness,afresh,taal,bunks,ducked,situate,sowie,escapade,loomed,egbert,hungarians,clamor,abdallah,hond,pews,workhouse,handbuch,unorganized,whalers,smuggle,laboring,nooks,wud,autocratic,titania,broder,shyly,stewed,disguises,stowed,unmanageable,denunciation,squeal,ducking,throb,scorch,perusing,duels,villainous,caius,pythagorean,steadfastly,abstention,genealogies,ruthlessly,falsify,swagger,flicked,emigrate,arbour,accomplices,nonproprietary,gebraucht,toothless,frankincense,commendations,comprehended,bravest,crevice,papel,telltale,typewritten,progenitors,forges,loosed,madcap,neigh,evie,casimir,persecute,voracious,foret,rescuer,massacred,signification,quarrels,remoteness,dominus,botticelli,balmy,hele,splinters,kleiner,epithet,blonds,ravenous,mongols,camphor,savagery,ober,navigated,dieppe,mies,pretensions,thunders,prins,diogenes,comings,danke,farthing,crevices,wringing,tearful,betwixt,florent,unmistakably,unu,massed,plucking,slavonic,reprimanded,rebelled,thunderous,rolle,encloses,sorties,revives,toleration,suitors,minutiae,deviated,sleight,burman,skirted,coachman,bigots,reappeared,comprehending,reckons,inexhaustible,canny,fainted,pianoforte,rifts,winking,firmament,hovers,thoroughness,confessor,gooseberry,aimlessly,pronouncing,agassiz,dazzled,inborn,manera,ould,consuls,eure,doria,newness,ascetic,bearable,russet,specie,hothouse,incas,skein,virginie,mettle,ojo,endeavored,matin,demonstrative,seis,detta,bigoted,discordant,lilacs,levying,elles,oriel,buoyed,malady,brahmin,grandsons,tempers,quinine,thirtieth,sige,grog,fester,permeated,retards,resentful,headlands,saintly,oude,aught,cornelis,adjuncts,jeweller,wooing,conjunctions,embellish,cordes,moonlit,intercepting,denounces,besser,wegen,dienst,corks,obscuring,tages,nullify,corroborate,envied,chins,runt,nursed,loathsome,cosas,althea,dando,icebergs,sacking,settee,driest,scipio,stealthy,flaunt,mistaking,saxe,dyspepsia,tryst,cede,annihilate,candidly,honorably,shifty,ello,deceptions,snorted,signe,shivered,teem,replenished,assailants,degeneracy,giovanna,consummated,cosimo,cotes,obstinate,farquhar,retrace,revolvers,lurch,gregarious,allee,oor,nightgown,bombard,missus,mystified,drooping,diable,inconsiderate,swirled,darted,warlike,colons,supplication,fretted,gauged,suet,overhanging,impropriety,maligned,thackeray,nought,barbarous,grandi,olly,diu,scepter,writhing,enticed,schmuck,gasps,exclaim,greve,vestiges,rustling,recaptured,marauders,spars,howls,answerable,inky,ock,sneer,allay,derision,zog,dutifully,octavo,jerrold,maddening,plundered,damit,henriette,decry,buen,devant,conspirator,luring,gallantry,hewn,whisked,pericles,desertion,rumania,yow,wherewith,siliceous,mund,circulates,signore,coldly,envoys,restorer,staves,coldness,existe,friesland,orden,riviere,gusty,brazier,bayreuth,sonntag,semaine,godliness,docile,maliciously,vole,cantons,siglo,enveloping,piedra,subito,tangles,meanest,hollows,luckiest,officiate,mumble,espacio,oppress,grandfathers,usury,russes,greedily,vizier,ojos,nostril,tombstones,wavering,barbarism,vienne,alway,surmise,blanch,inscrutable,campagne,syne,xxxii,saluted,protectorate,hieroglyphics,materialist,landlady,blameless,amalia,absurdly,garnished,fernand,corporeal,passivity,partiality,circumscribed,steno,disposes,berta,emanate,rummage,headstrong,plies,scantily,waar,befriended,professing,nestling,piedras,immortalized,leper,animus,dimple,noblest,supine,bloodthirsty,squint,vitals,lamenting,benedetto,vindictive,overtook,goe,palast,triumphed,scanty,difficile,vagaries,undaunted,lucan,hemming,nuevas,defiled,faltering,saracens,tisch,eke,conceited,denys,naissance,laymen,shopkeepers,mortification,combats,indulgences,tard,fattening,drench,digesting,cupola,hund,kommer,canst,idleness,lunge,mahmud,minuet,entombed,fers,diverged,spouts,pontifical,glided,sleeplessness,iago,axed,overdone,socratic,revulsion,rosamond,schwarze,criticising,porpoise,nowe,oligarchy,psychical,rives,houten,fanned,berge,wagging,germinate,chrysanthemums,misdeeds,acto,earnestness,wetted,undercurrent,steerage,granary,befitting,whitish,irreconcilable,giveth,concocted,essayist,epicurean,blacked,refit,boite,unwashed,detaining,shod,oratorio,befall,appurtenances,wearily,northernmost,trollope,enchanter,unscientific,withstood,sandhills,heaviness,knapsack,animaux,calcul,consciences,inflected,linseed,caisse,staccato,dels,agamemnon,dodged,refusals,outrages,cuneiform,footstool,dopo,uncircumcised,emblazoned,mettre,wrangling,dorcas,confiscate,bloods,odours,mongrel,forewarned,degenerated,eventide,impairing,dispossessed,meagre,mopping,iver,fantastical,dorf,yama,laatste,chintz,nebulous,slink,lineal,droll,honouring,grenadier,anachronism,methodically,stiffened,athenians,hautes,aleppo,whimper,whomsoever,viciously,fiddlers,endow,raum,indistinct,counterbalance,razed,anzahl,invents,loungers,wilberforce,manus,tenfold,scoured,schule,carley,knotty,stewardess,furthered,chancel,inexorably,mitglieder,worships,ironed,inhabits,domestication,olof,japon,appendage,geographer,omnis,naphtha,clairvoyance,frente,aeneas,narrates,girdles,heartbroken,parola,lameness,offal,smithy,dawns,frais,couverture,staid,encircling,verte,wove,pithy,caressed,infinitive,hysterically,incantation,blissfully,shirk,pangs,monsignor,fulness,commande,domestics,unpretentious,poachers,galvanic,narr,joven,parlance,lethargic,drunkard,conveyances,steinmetz,cowper,bronzes,essa,knell,profited,flavia,startle,algernon,exterminate,heikki,exalt,nein,zal,interludes,jahren,bide,suitor,russe,bevy,gravelly,inconspicuous,juste,wisps,urbane,hoek,nebuchadnezzar,diffusing,stupor,gratuitously,aimless,parfait,flit,quietness,accede,sicher,overshadow,xli,principale,turnips,statuette,theobald,dwindled,dispenses,fertilizing,ower,narcissist,sextant,falsehoods,swampy,euch,wast,obtenir,donning,cecily,sappho,estancia,wurden,fama,lustful,guano,presbyterians,worshiped,duque,autem,rebuked,cloisters,luella,presumptuous,toothache,presage,boars,afore,dour,moistened,kegs,unadulterated,reciprocate,rade,quia,begat,propelling,ripen,suffocating,athos,grasse,cinq,xxxiii,brawn,frowning,gaius,matchless,boatman,unconcerned,dood,orthography,conjured,assyrians,selv,vaulting,fonte,gossiping,freshen,tugged,gog,outdone,detest,paraded,trifling,undergrowth,enamored,carlotta,ceux,cuatro,methode,ulterior,puro,heracles,whirled,passim,thei,gebruik,vraag,jovial,scoundrel,romany,xxxviii,duplicity,meddle,exaltation,handiwork,andras,joyously,heaping,strident,oration,grunted,riche,pilote,wampum,dreading,humorist,nourishes,vite,cun,combative,winked,unhappily,rube,chronometer,squaring,wring,apparitions,shrieking,graaf,erst,scurvy,peacocks,ophir,wouldst,pocketed,enormity,coarser,hypnotism,oeil,dissociated,exclaims,ceaseless,emblematic,lerwick,fertilize,disengage,commonest,daj,unreserved,lessens,judicially,vend,smattering,taunts,stealthily,ripened,cleverness,roped,sorcerers,clang,sardinian,waltzes,sunlit,attests,parched,peaceable,achtung,stanzas,infuriated,dismounted,incongruous,kindest,stam,intervenes,vieles,bonnets,bared,frenchmen,callow,edicts,lemuel,inattentive,transmutation,sweeten,confide,voiceless,sombrero,isidore,headdress,nuestros,tannin,limite,boughs,naturel,overseers,presentment,sprigs,amiens,diez,prudently,foresees,patronizing,presentable,pales,dais,adornment,precipitating,hearken,insolence,blockhead,einige,patting,hippocrates,elaborately,lundi,gaslight,presides,divested,pith,eaux,transvaal,gaff,disintegrating,folie,frock,bleue,flambeau,fuming,veel,chattel,wrest,forgives,waterless,effectual,unimproved,paddled,inkling,vigils,schoenen,garcons,gauntlets,patria,blacksmiths,menor,ploughing,timon,parsimony,typified,darting,ashen,blunted,snarl,comptoir,echt,pained,inexcusable,laud,mutterings,precipice,geschrieben,recalcitrant,wos,thoughtfulness,harshness,ailes,neuve,limping,darum,utters,processions,gluttony,kneading,etwas,sait,templars,nineveh,mesures,enquired,aphorisms,compleat,consumptive,dalmatia,noisily,readjustment,unaccountable,weise,trickling,commoner,reminiscence,pouvoir,yeux,fui,waned,assented,overcharged,pucker,sanctify,messrs,insolent,octavio,portes,finis,beastly,fortresses,matrons,thun,gawain,guinevere,heresies,annihilated,tardiness,mangan,mose,specks,futur,incredulous,dere,calvinist,suas,buckler,peal,asunto,adroit,dilettante,georgiana,ecstacy,peasantry,oppressors,boeken,corns,faring,dama,unos,pinkish,blurted,tutelage,merited,hacia,peculiarity,decrepit,encroaching,solemnity,equivocal,lieb,amass,maketh,ihrem,disengaged,distilling,effigy,saloons,assailed,incensed,zachariah,veneration,broach,miseries,personification,partes,scuttle,rougher,supplanted,sardonic,aghast,raiment,disused,vetter,stooped,dower,andalusian,wordy,feudalism,achille,magister,bolting,lumbering,fourfold,forgave,antonius,indien,replenishing,immemorial,indwelling,seh,jaunt,genere,ipso,quartier,wallow,unabashed,haf,homeric,overpower,expounded,downpour,dumbfounded,cubits,outlast,frothy,macedonians,labouring,pouvez,nothings,kommen,allgemein,colonist,sorbonne,rares,colla,philippi,adduced,agli,unrequited,mangle,alludes,theseus,commuted,medan,saracen,annulled,covertly,dalle,rapped,foreboding,fortuitous,autumnal,xxxv,sepulchre,kunt,despotic,dicky,beholden,celui,apostate,enda,faltered,queda,entrar,sicherheit,gorse,louse,wilfully,paralysed,tillie,distanced,vespers,scylla,vats,urchins,implore,kindle,pricks,tenements,tithes,thinnest,sipped,mando,pulsation,hitching,xxxiv,obediently,calvinism,milked,vesuvius,disembodied,aylmer,scoff,confidant,nape,disparaging,impolite,bataille,oia,domine,sluice,darke,whistled,furor,austrians,craves,soiree,trouver,enslave,dimanche,grimly,espouse,casks,conjoined,cabled,muchos,lightened,spongy,verner,specious,threshing,infliction,frederica,entranced,deprives,onde,scimitar,holz,uninterested,cavalcade,adulation,loitering,dastardly,ludovic,avarice,sangen,butchered,pointedly,ouverture,rustle,excitable,hermanos,alluding,frere,insipid,unfathomable,ingmar,holiest,arbre,effeminate,vainly,straying,venereal,mercifully,blatt,pansies,acceded,dregs,obscures,millicent,foresaw,befriend,anker,malign,abortive,embarkation,varnished,zarathustra,valent,knoweth,anemones,sacre,hunched,buzzed,pickets,astringent,soothed,vins,premeditated,cherche,aucune,pueblos,sentimentality,tenable,jumbled,triumphantly,leva,vergessen,scolded,fetters,vulgarity,magasin,perpetuation,tafel,pliny,sewed,jubilant,sangamon,continuo,welche,silesia,staat,amputated,reappears,enquiring,masha,redden,kreis,faccia,gae,sobbed,omnium,copie,snuggled,surest,bribed,alarmingly,kosten,bloodless,basle,sigurd,tute,obliterate,dort,perron,pestle,falsity,sapling,elapse,myne,enamelled,torments,tortuous,oiseaux,seafaring,mooted,repented,infirmity,corydon,selfishly,drudgery,pacha,shrubbery,navies,impartially,imperfectly,slanderous,interminable,ancien,soins,indomitable,unseemly,vix,godlike,scrambles,arbeiten,merriment,rotted,thetis,repulsed,garni,brickwork,soulless,abbots,frontispiece,vivacious,bloodshot,salutations,pela,dogmas,forsooth,geordie,orestes,deathbed,indefensible,brutish,trill,venetia,melchior,xerxes,poudre,ramparts,disband,symmetrically,reek,hearers,frigates,availed,externals,principales,damsels,spielen,monotheism,menelaus,morsels,hatte,skirmishes,congratulatory,zuletzt,melodious,baited,veined,kens,norwegians,imitates,conjugal,boldest,hafen,flaubert,enunciated,strictures,flinging,ferme,discouragement,werke,vesper,parapet,filles,usurp,gerade,traduire,peremptory,unrecorded,seiner,gallia,hayne,lorsque,fronds,interposed,jugglers,veri,dessin,weet,naively,nominative,cleaves,doivent,avenging,ploughed,severing,ety,hev,cremona,martyred,afflict,crags,mimicry,intersected,tomkins,winced,literati,trotted,hungrily,scold,chirping,utan,tress,vaunted,astride,nostro,ruy,emancipated,ordain,rapt,wirt,sawed,receded,emboldened,pessimist,sedate,stammered,supposes,genteel,engulf,huguenot,epicurus,gouverneur,upu,hankering,normans,enumerating,toiling,spiteful,governess,alternated,colander,croak,abhor,boek,inexorable,chercher,harmoniously,bijoux,worshiping,gewicht,coolly,accompli,wann,vieille,ellos,hecho,verry,rowed,elfin,ingots,ridding,tegen,troppo,meads,exhaled,demolishing,pratique,calabash,brigantine,zeb,fitzhugh,rioters,persecutions,arriva,cramming,chuckling,disfigured,mers,chios,muro,oreille,transcended,xxxvi,cuerpo,tiel,faintest,bleek,adela,genitive,civile,haupt,testy,physiologist,imprison,repelled,abend,eran,quem,plundering,abhorrent,rebellions,sympathizers,scribbling,phineas,emissary,inhumanity,wem,belittle,repudiated,divina,leonie,sympathetically,permet,elis,liddy,dabei,rollicking,offhand,geraniums,bashful,doze,currants,absolve,conjectured,grandest,kinsmen,lier,welk,shipwrecked,doen,tacitly,dint,reverberation,quickening,waal,mistook,apprehensions,aunque,celestine,schoolmaster,impressionable,gingerly,apologised,riven,taketh,cornfield,fretting,fetter,jeers,manufactory,jarred,theorie,armen,bewilderment,loveliness,ministered,idiomatic,scalping,slav,attics,wilhelmina,hermits,gullies,prerogatives,banishment,tempering,kampf,fallacious,vestments,morsel,leniency,scrupulous,woodsman,bocca,dicta,meisten,aubert,richtig,clumsily,catholique,turpentine,ells,cussed,evaded,thickets,clink,personage,cavallo,vender,daar,bouche,delinquents,furlough,angleterre,snarling,samedi,creaking,bequeath,subjugation,gape,clase,unquestionable,prendre,irritates,whigs,despatches,titian,arras,fathoms,printemps,physic,nuptial,thickest,bulbous,whist,mieux,darauf,expound,eget,exhilaration,ziel,lordships,chanced,fastenings,ketch,treeless,adores,aground,splendidly,feuille,inattention,discolored,traf,sinning,jouer,forestall,vater,moselle,gnawing,crudely,saplings,profuse,dispelling,attainments,gane,couched,bestows,sone,particularity,knighthood,blesses,dure,sickened,tali,canteens,thoroughfares,donatello,penniless,abrogated,druck,kingship,puis,manes,relapsing,arcadian,claud,swart,eschew,vastness,precipitous,detachments,arsenals,hoofd,tramping,vieja,thereabouts,bloed,resultat,betrothed,pourquoi,dispelled,pierrot,duca,sameness,scruples,gloved,bete,dowdy,clamoring,aguas,visitations,recklessness,stirrups,intimated,allspice,squirming,thunderstruck,pleiades,surreptitiously,finery,langen,eugenie,sequestered,hesitating,stoops,stiffening,scrutinizing,allude,sprawled,interesse,tomar,courted,condoned,unsavory,deservedly,blackbirds,vowing,plying,gangrene,purplish,stille,enliven,hollowed,graven,lengua,craved,fracas,envelop,dismount,grudgingly,quae,bole,believeth,unafraid,stamens,omnipotence,irresponsibility,zelf,seaports,conscientiously,boomed,jussi,joust,grander,shackled,weedy,sacra,ipsa,grope,suomen,echte,brightens,muertos,jailer,gleich,gladden,sarcastically,tuft,quickened,reverent,braved,jaune,joli,beckoned,unquestioned,scrawled,savagely,usurped,monstrosity,certains,ravishing,grumbled,disheartening,nobis,stolz,unavoidably,blest,govinda,menial,clayey,delighting,vielen,conjuring,dutiful,absurdities,cabeza,ony,gordian,edification,flinch,xxxvii,despot,affaire,insincere,inger,vuelta,beckoning,vivant,vendre,ignis,begone,lucidity,feuds,toque,wille,primi,hiver,lateness,dier,nunnery,forefinger,rudiments,erwartet,heathens,celibate,simul,clatter,werd,faultless,awkwardness,praiseworthy,mosca,seigneur,ails,frage,vapours,jij,delphine,bruder,remiss,languishing,entrails,erreur,cossack,thrashed,topsail,modicum,malte,solange,ethiopians,rajah,persuasions,steppes,sheathed,derided,encroach,correlative,maire,diametrically,fasted,eunuch,algunos,gazes,virginians,negligently,sistine,higginson,hadden,unmoved,glum,perplexity,particulier,sabe,sulky,guarda,skyward,woeful,grund,droop,neque,dislodge,voyageur,waded,flore,unacknowledged,quietest,carven,aptitudes,bonnes,confusions,fara,alimentary,wus,republik,encroachments,ineffable,hearer,awakes,republique,generis,zit,probity,formas,grubs,unflinching,murmuring,gaan,jungen,kop,triumphal,affable,hijo,worshipers,avons,flail,adulterated,nicodemus,ardor,wissenschaften,veo,missive,ascends,splintered,transacting,vus,nomine,busen,loafing,talus,republicanism,foibles,cose,choses,squatter,waldemar,colourless,unyielding,flabby,enlarges,apace,doktor,harbored,bulwark,stringy,seront,sonorous,breastplate,draughts,heaved,lazare,uel,fashioning,churned,correspondance,dappled,gallic,tacking,feigned,dross,solidity,doge,indecisive,recurs,dripped,epicure,levity,journeying,dito,oppressor,metrical,kopf,immeasurably,tussle,fiendish,glorification,wayfarer,arabians,expanses,nuits,dervish,irrepressible,leider,joppa,wilted,emoluments,egal,conned,mutes,outwit,magnesia,patronize,impassable,serf,koning,buries,vobis,signor,phlegm,reste,freedmen,obliging,hermetically,gravestones,uncommonly,nudged,inhospitable,dissension,intermingled,dwarfed,langs,asters,surmounted,elspeth,salutary,bringt,frosts,ached,defile,odio,ansehen,effectually,unprovoked,apocryphal,pallid,sulphuric,antipathy,atone,douce,storeroom,theodora,paler,lhe,wereld,offing,infest,dampier,hardens,frisk,alister,expelling,obliges,pertained,beneficent,luxuriant,mulatto,plausibly,concubine,complimenting,courtly,dampness,zusammen,platitudes,pois,porphyry,deviating,taunted,ernestine,bubbled,tienes,korte,mortified,upturned,cordage,hobbled,loath,gagner,nibbling,unsophisticated,vexing,longa,digression,astonish,dynastic,cognizance,piquet,loveliest,nearness,vif,procurator,plaintive,exult,claps,disreputable,seraph,dressmaker,fehler,publican,hoar,movimiento,kreuz,rebuffs,reichstag,woche,handmaid,oir,chemises,consuelo,impostor,nomen,ponderous,maisons,scrupulously,plaisir,intruding,baptize,fatigues,asaph,princesse,franche,plucky,dessins,eusebius,untidy,loggia,tribesmen,subsist,tuin,augen,beholding,scarfs,leve,shallows,ersten,unjustifiable,growls,sported,quaking,refraining,commingled,coasting,logement,kindern,conciliatory,stiffen,showman,officiated,distemper,subterfuge,jede,aspired,mathilde,pues,lazaro,mouvement,beispiel,penitent,toyed,anglaise,lamentation,tunc,extol,patrimony,belgians,knave,functionaries,croup,broadcloth,disuse,reeled,quire,goeth,fascinate,garish,baronet,bombastic,francie,scoffed,thieving,minde,thinke,snarled,unearthly,predestination,verbindung,regulus,vidi,trouve,rapides,reviled,coverlet,lustig,bringen,fearfully,musketeer,fiddles,furlongs,fens,ancienne,arraigned,liquide,tanz,whitewashed,gilding,twining,explication,violette,humanely,jungfrau,verdad,perrine,gaiety,alten,uttermost,aristophanes,letitia,overthrew,lave,frowns,fabricius,sheepish,diferentes,antic,abed,edifying,dreadfully,aun,sadder,ravage,contemptible,unfailing,fowls,untoward,gloster,venu,clergymen,fiel,endeavouring,dislodged,casse,obviate,juster,genom,ueber,primero,saluting,beguiling,bayonets,trompe,flavius,gie,playfulness,confluent,orde,deel,lernen,husks,beckon,raved,herren,anfang,jewelled,reaps,fatto,traum,premonition,recut,sureties,montre,grunting,baubles,personages,actes,exigencies,marveled,peloponnesian,gotha,tasso,waffen,cultivator,nihil,quintus,crucify,unsaid,fonctions,untie,instigator,girt,annul,lanky,illa,blushes,shewed,outdo,sycamores,truant,shrieked,ermine,corroboration,juge,circe,capitulation,aspirant,germinal,vindicate,repelling,gesucht,fallible,pantheism,strutting,incalculable,tijd,soliloquy,mammy,beaks,caresses,quello,indolent,ursus,banns,thistles,idiosyncrasies,inducements,ennui,abetted,expending,ista,sweltering,purer,hedgerows,narrowest,disapproving,meses,interrogative,squealing,feverishly,sneaked,obras,drowns,nostri,persuasively,walloon,squalor,panelled,ossian,chaplet,narrate,peleus,ebon,hesiod,maman,bleat,glorifying,gleamed,valiantly,steeds,elli,infallibility,voll,altes,franciscans,comport,malheur,overdo,ragusa,sette,radishes,deeming,flaccid,eum,putrid,unguarded,prodded,fasts,sterner,tras,womanly,surmised,northwards,tiu,mayest,judiciously,worshipper,diderot,ruts,regretting,scolding,bosphorus,dimpled,massing,offen,leathery,hjem,caballos,grimace,bribing,unbecoming,bridles,rinaldo,dejected,vosges,comely,prow,sprig,apulia,squander,swarmed,wields,dragoons,brune,landholders,cradled,dreads,spurring,sollte,plaything,pander,stamm,abominations,viene,reestablished,strangling,cultivators,insignificance,deceiver,helle,sputtered,faites,merrier,simples,ruggles,miel,subsides,nobler,michaelmas,bildung,howled,blanched,allemand,unequalled,cicely,temperamental,dally,malays,nauseous,brandishing,wags,chronicler,allem,fais,disproved,justinian,lutte,dobbin,riz,coquette,menge,remarking,cobweb,punctually,unwillingly,cadeau,undoubted,formless,shipmates,englische,plaats,shorn,doubtfully,typhus,reticent,welter,lande,exertions,insel,sprachen,eins,retentive,gerda,plodding,deserter,rending,gaillard,consign,mantles,neatness,adornments,britannic,becher,unbeliever,parading,gamin,confederated,lume,overwhelms,embankments,quanto,speculator,madmen,listless,wheaten,deprecating,faggots,ducal,downcast,tedium,seamanship,gascoigne,pomegranates,sooth,knie,sportive,hewson,aout,turan,undeserved,principalities,aider,excelling,misadventure,meiner,rond,dramatists,servile,rickety,enchantments,fuori,secondo,figura,prosaic,diadem,pani,outa,bedeutung,sincerest,sagen,tittle,imprudent,keer,trou,nannie,laat,deliberated,snubbed,suffocate,applauding,epithets,toch,floundering,preserver,revolts,espy,deren,hallow,wharves,kunde,canvassed,chastisement,unmitigated,whined,sashes,assail,flirtation,unterhaltung,courtiers,carboniferous,brillant,equanimity,agitators,venerated,curs,neer,assimilating,proudest,subjunctive,harun,perishing,inaugurate,slavs,libres,noiseless,cayley,worshipful,geh,spurned,selim,chastised,zich,forethought,viscera,excitability,madder,exterminated,mette,bronzed,grimy,lascivious,ille,dispassionate,bonheur,charmingly,glimpsed,partaking,firebrand,deprecation,intimation,chequered,glimmering,alphonso,falla,disbelieve,brevet,darf,troyes,exterminating,revolted,bunched,besoin,scrutinised,allez,herded,athanasius,gemacht,deliberating,humaines,londoner,aeschylus,plantagenet,episcopalian,zwar,soldat,nisi,thucydides,tapa,repudiate,advisability,lope,festering,relinquishing,dessa,mercia,furies,piqued,jinks,biddy,compris,theophilus,crony,sambo,stellen,professes,wherewithal,shrieks,taas,ominously,caer,ablution,demure,athene,jist,ipse,parasols,munition,veered,jonge,serfdom,gossips,rawlinson,scuffle,uncritical,infatuated,rhythmically,gaat,riotous,tenga,embittered,unleavened,veces,stockade,parece,bushmen,babylonia,tempts,tempel,uur,devolve,satyr,fearlessly,ajar,pampas,altra,suppers,fluttered,untrustworthy,exhorted,ravines,yokes,howitzer,interjection,stocky,bazaars,himmel,greate,strenuously,wildness,compensations,laxity,deathly,unloved,balked,fairyland,balaam,hamar,rekindled,drams,entreat,brainless,souci,cessing,cocking,railed,abounding,fount,poacher,invisibly,lithe,intercede,tusks,hatten,ayrton,courtier,blotted,impetuous,grammes,shrouds,ambergris,hellen,clearness,embroider,hubbub,robed,unchangeable,wunsch,haya,magisterial,boor,recites,anguished,ailleurs,meteoric,jacopo,equalled,palabra,arithmetical,royally,molle,plantes,dishonorable,thwarting,venise,scurrying,subverted,urbino,effets,broadsword,blankly,auras,bonfires,allt,cloudless,conflagration,xenophon,bevis,dethroned,chapitre,vestige,courrier,cheerfulness,egoism,cataclysm,harried,transshipment,cuore,fatherless,puedo,groen,seers,cretan,roumania,blubber,appeased,coaxed,pageantry,disparage,triste,chimed,phraseology,verdienen,memoire,morass,intimes,righting,moder,tasse,dessus,striding,panelling,braving,prayerful,raad,transfixed,balle,leaven,lout,tucking,unwary,herrings,cubit,begets,groundless,prancing,amelioration,wark,beeld,bezahlen,mightier,enthroned,overburdened,dwindle,lindau,beter,sujets,acquiesce,alacrity,drawbridge,gude,overhauling,girle,pulverized,holier,mauer,everard,uncivil,nondescript,employes,temperaments,consulter,simpleton,brutes,howsoever,unsympathetic,jermyn,dico,rejoinder,condescension,dilate,rasch,tiber,bekanntschaft,feuer,secours,skilfully,abolitionists,flustered,compactly,lasses,fus,corsage,hym,laboured,enumerates,decir,relinquishment,ohg,sall,cession,liken,forfeits,heeding,fata,revenu,helder,verder,caesarea,naturelle,wordless,sleepily,prowling,harmonie,eludes,revelry,deface,propensities,mimicked,mete,algunas,uninjured,rivage,populaire,lief,toddy,disheartened,ruinous,spoor,upanishads,eigene,bewitching,mihi,individu,accusers,sunshade,cuir,hals,furrows,throngs,sarcophagus,dozing,siete,chink,likenesses,pervading,caxton,soames,fermenting,beiden,blithe,paralyze,kazi,tilling,hereunto,daad,languish,feathery,reasoner,adorning,gaily,weib,samt,jubilation,tels,storks,accoutrements,abeyance,ciudades,enfin,suivi,iniquities,nadie,purring,squinting,strolls,encuentra,gradations,conocer,vsed,molest,appetizing,encamped,trifles,sammlung,langage,importantes,suiting,hesitates,paralytic,eastwards,parsimonious,pinafore,alwyn,albertine,disposer,politische,foreknowledge,galleys,sunning,farcical,weel,toiled,incited,rhythmical,rippled,tresses,agitating,oriana,frankness,castilian,bunsen,buenas,susa,sulle,fuera,outlived,anny,repulse,basaltic,hinter,middling,minstrels,personae,wain,englander,gascoyne,knighted,torchlight,teniendo,emanated,southerner,persevered,hounded,butted,longings,galilean,ayant,dominicans,helmsman,meditated,shuddering,homesteads,abrogation,justicia,jutting,deliverer,knecht,aeneid,vehemence,befell,ette,klar,neige,sneered,chattels,brambles,disembark,secede,unmixed,grieves,prises,tumbles,sogenannten,parnassus,debarred,dandelions,abyssinian,maler,bulgarians,coaxing,marshy,terres,inne,preying,grasps,subsisting,freunde,bladders,avions,junto,bloemen,latium,shuttered,alchemists,morose,poore,regretfully,abbeys,dutchmen,agitate,vien,abdication,discontents,botanists,bohemians,blir,foreheads,narrating,gering,pedant,stubbornness,distantly,humaine,averting,pyre,faubourg,wooed,chalky,teamster,beached,fringing,glans,thousandth,sacrilege,demagogue,demean,changement,stipulating,propping,straighter,weirdly,broods,rejoices,limber,hablar,mahomet,telegraphy,lehre,doeth,verschiedenen,chrysostom,blackfeet,waistcoats,chalked,mightiest,marvelously,apse,bailiffs,infirmities,illum,aboot,jolted,manne,jacobite,viendo,freckled,plenipotentiary,philistine,gambled,chaleur,unimaginative,joyeux,gratify,meuse,certainties,zie,fittingly,gelatine,undid,quelque,publick,electioneering,nette,ressource,betel,moisten,demoralized,peopled,suffi,swooped,doctored,soured,quieted,albumen,encircle,carmelite,anges,exhort,voyagers,tendrils,thal,nullification,ostensible,malarial,exasperation,stumpy,jeden,whereon,entente,nala,mainsail,inom,promptness,retraite,excommunicated,scalding,storekeeper,muskets,uglier,witchery,predilection,wavered,climes,firelight,contrivance,anoint,scatters,wallowing,hindrances,braver,repartee,boggy,vragen,termes,chiming,modulations,philanthropists,urteil,retaliated,founds,poplars,knightly,debater,tarde,millinery,appian,irresistibly,endeavoring,comically,substratum,porpoises,snel,persuades,rapports,foreshadowed,meekness,audibly,dewy,obliquely,uneasily,meted,liveth,outre,agin,phoenicia,boven,jaunty,balthazar,squeamish,tono,parmi,eccentricities,pasar,potentialities,anthea,letzten,airships,presuppose,hetty,affectation,abdicate,creak,archdeacon,haciendo,pretension,descents,vicissitudes,dupes,larks,tormentor,tagen,postilion,weal,grudges,perversity,convulsive,inflame,zien,eclat,doric,pathetically,bluster,witching,depreciate,bellum,gendarme,dionysius,imperceptible,fattest,atolls,tibi,parley,jessamine,palatial,prelate,flippant,libations,convivial,trat,adorns,kamer,grubbing,commoners,cultivates,thankfulness,nich,unturned,workroom,zukunft,phoebus,censured,sache,relished,boers,toils,salles,enorme,instigation,veuve,indefatigable,overthrowing,maudlin,excusable,craggy,gushed,extricate,provocations,deplore,defrauded,laut,aplomb,centum,cabbages,epee,truism,employe,fervour,babylonians,fabius,despondent,ostia,cunningly,bathers,turbid,sceptics,pollyanna,bort,privateers,knowe,preoccupations,ludovico,besonders,villainy,feuilles,diverses,maladie,hurtling,squabble,ravin,seest,omnes,methodism,mente,luego,overtakes,predominates,phillis,startlingly,couplet,falta,inimical,imperious,townsmen,sondern,revoir,handfuls,gratia,formant,gongs,eigenen,larga,pentateuch,immobility,purifies,sparkled,interchanged,lulled,disrepute,rechten,implacable,sert,employments,carinthia,attired,uncalled,repels,zat,aika,pliant,reappearance,urbain,avocat,emaciated,gern,vassal,cantos,manse,pining,unknowing,blithely,moderns,fashionably,virginal,augur,colonizing,bodleian,bicameral,chapeau,dramatized,bringeth,paquet,regle,broomstick,suffocated,voulez,marauding,cynically,assuage,estrangement,versicherung,limped,yearned,fondest,parce,frightens,incontinent,amante,perpetrate,nombres,mientras,fiercest,coining,invective,sueur,depose,pacify,sunder,excommunication,grizzled,lade,caballo,loathed,florid,fatalism,despises,chanter,quacks,arme,wend,blackest,reihe,roubles,relented,meinung,tarred,beget,mooi,stenographer,nipped,disguising,invulnerable,flickered,quiere,kummer,hideously,motherly,modele,vexatious,coachmen,girlish,reddening,foremen,shamefully,herculean,tormenting,pleura,bragged,pester,deputation,oppressing,domineering,obtrusive,wrinkling,wiry,labyrinths,jealously,beare,welches,footman,pense,chafe,tapis,schoolboys,alexandrian,sinless,manche,nobly,absolutism,hause,grosser,gudrun,sharer,confidences,wakefulness,monopolize,gehen,consoled,mayores,contrition,diener,resound,unsuspected,archbishops,tarpaulin,abajo,mustapha,cherokees,peaceably,exacted,oddest,purposed,evince,hyenas,schoolmates,luogo,breathlessly,hoarded,naturalness,flings,irritably,gorgeously,helt,noonday,courteously,sinuous,availing,meekly,briefer,serfs,vives,homburg,wailed,ippolito,thunderbolts,tule,hustling,milanese,foran,bloomed,hortense,scrawl,manana,sprechen,foamed,refectory,yearns,unaccustomed,platoons,unbelieving,luminary,quitter,purser,pratiques,furtive,renouncing,accosted,conning,tiempos,incantations,enchantress,parallelogram,wonderment,pasado,groped,warder,morbidly,palfrey,persecuting,feign,swooping,jackals,niceties,outlive,dereliction,exactness,barbarossa,dray,silurian,detaching,sunburned,spasmodic,interlacing,elegante,corne,quietude,roundly,monarchies,trost,rhododendrons,flirted,vraiment,royalist,untroubled,aspirants,sheepishly,denk,haft,parisienne,russie,warily,cadmus,telle,aflame,gits,aright,windlass,studious,fineness,estan,setzen,pharisee,devenir,cercle,urania,amicably,tureen,nuptials,greif,flints,satirist,visiter,pone,camillo,hade,extort,staaten,gleeful,sprightly,grindstone,speaketh,sacredness,menton,petticoats,proffer,haply,pronounces,fussing,stragglers,scowl,tinder,omniscience,vot,leaden,advantageously,kinderen,pounced,statt,wollte,bayeux,tertullian,pompe,fastidious,ensconced,cyprian,sagacity,nipping,fogs,ausbildung,protestations,trickled,lungo,erde,fondled,poids,wistfully,abounded,heureux,disloyal,paralyzing,staggers,contorted,polemical,neighborly,dabbled,villes,piteous,olen,perfunctory,pervaded,doorsteps,falsetto,tatters,whan,puissance,tunics,lepers,gloating,dismembered,hierro,perfidy,minne,meaner,propounded,valois,insubordination,impious,absolved,dishonored,vivir,bathsheba,klara,stilted,hastening,dines,capon,stiffly,folgenden,cacher,festivity,grk,thessaly,folgende,ayre,afire,sowed,proprio,brahmins,gloat,entanglements,clawing,wrangle,autour,immensity,squabbling,acquiesced,rosamund,deinen,consecrate,pursuers,predestined,gneiss,gevonden,rhin,disobeyed,firme,dishonour,lavished,courtesan,unkempt,bassin,zeichen,jeder,interjected,humorously,victoriously,ascents,hingegen,retarding,indiscretion,undertone,adot,decease,stigmatized,tactful,friable,palatinate,liegen,fawning,decoction,resents,orientals,squeaking,tinkling,drie,nostrum,masterly,dunce,fera,butchery,wresting,treacle,frankrijk,foolhardy,bristling,boreas,cherubim,nightcap,massy,consoling,nues,characterises,antiochus,cutlets,hoofs,drawl,veux,manoeuvring,lances,helfen,rivier,imogene,impute,dainties,leghorn,directness,glutton,laquelle,unnaturally,disquiet,deerskin,meest,sufficed,extolling,wearied,barbe,pitied,hame,sibyl,lignes,victoire,erring,geschiedenis,acclamation,ypres,gigante,solamente,berenice,cisterns,kist,panoply,credulity,coiling,capuchin,verkehr,sympathise,piti,sist,noirs,pitying,twitched,clefs,actuel,vem,panted,midshipman,juda,gondolas,swiftness,necessaries,nullity,tuli,tenemos,relishing,unsuited,gurgling,imaginings,hvis,boatswain,hearthstone,fondle,cuddled,superintendence,regeln,betters,joab,corruptions,persevering,transversely,abelard,illusive,octavius,disquieting,ripeness,veering,alguna,tiere,junker,vapid,hohe,pieds,unremitting,rechnung,clenching,cordials,bandaged,evanescent,fevered,indignity,pinches,aglow,midden,sieg,notamment,bullocks,peinture,moyenne,valerius,chucked,ransacked,bugbear,wreaked,hogshead,masques,halfpenny,fetes,kneels,reticence,iambic,lisbeth,deplored,icke,unfashionable,jacobean,loveth,sceptic,vociferous,eunuchs,comed,salz,languished,sneering,coitus,churchman,lisette,cocoons,deserters,ainda,verre,smallness,esas,remotest,retorts,housekeepers,farewells,conscript,redder,cria,troupes,tiptoe,sociability,idealists,xlv,crowing,celles,thankless,avers,hochzeit,schuld,quale,sublimity,birches,crunched,ratifications,ringleader,thundered,fumed,feste,thereunto,compatriot,discontented,droning,yawned,scuttled,wochen,inoffensive,erudition,bedsteads,perrot,strictness,welke,entretien,frivolity,gulped,subtler,vestidos,inviolable,toten,riflemen,insufferable,clasping,landen,interjections,usurpation,brimmed,subjugated,unlearned,prostrated,kaffee,excusing,rejoining,subir,etiam,slanting,maisie,detested,overal,dauntless,pulsations,frugality,apprenticed,reflexion,vomited,loth,undisciplined,signalized,lunged,alii,vergil,wiens,verts,opere,pouting,watling,daher,vrij,creer,cranny,springy,perplex,lamentable,signes,besuchen,rebelling,destitution,rummaging,broached,puckered,squalid,shunning,erhalten,cinders,interrogatory,syndic,cleaving,semicircular,montant,trow,overwork,kirche,farben,roches,pommel,intermixed,logik,rerum,freemen,mellan,linnet,heightening,goede,laddie,bellowed,tante,sair,questi,entier,timbered,sxi,unrighteousness,shrilly,catullus,dulled,nuestras,interlocutor,kingly,chided,turbans,acquit,tota,choisir,hvor,singe,stunden,harping,etwa,akimbo,beeches,seule,augmenter,hieroglyphic,aryans,banishing,unicameral,clamour,sopra,alvar,punkt,dunkel,erle,unadorned,prefaced,wijn,gleichen,verband,majesties,endearment,fealty,disputation,leicht,whoso,thracian,forerunners,exhalation,investiture,animates,ruffian,turkestan,balthasar,ourself,invariable,inclines,southey,patronising,deciphered,shudders,voie,gerne,ardently,granitic,untried,luise,narada,intruded,marmaduke,coppice,autocracy,backwardness,undiminished,caput,connaissance,discomforts,clammy,indisputably,rifled,meglio,pomerania,fane,latterly,flogged,disadvantageous,philological,enamoured,unpalatable,shrugging,disse,persistency,conscripts,chimeras,befits,instants,denunciations,pervade,entrapped,suerte,apaches,archduke,myriads,physiologists,egotism,motherless,cien,tiberias,chaldean,comedie,reciprocated,squabbles,buffoon,tilled,rumbled,mittel,ambos,disobeying,drusilla,sidon,acrid,dijo,trespasses,conversed,ingeniously,howitt,counterbalanced,undertakers,pricked,coppers,reddened,exhortations,wohnung,againe,hijos,poulet,degenerates,demeanour,broadsides,closeted,unceremoniously,genuineness,bungay,poissons,volte,suoi,wirklich,iho,crannies,prospering,dearer,familles,minutely,seditious,trotz,inarticulate,turba,brust,rameau,silvered,youse,seno,poche,neuem,fromage,gunboat,drippings,voici,alida,messager,asceticism,reconciles,disentangle,bestowing,belie,ostend,divining,balustrade,fortieth,adulterous,slyly,shied,plantains,eveline,deferential,enlivened,coterie,magnanimous,plait,guttural,prided,anciens,capsized,breslau,unreality,weiteren,murs,lath,encampments,hindenburg,whiten,derniers,entendre,cuidado,reynard,remarque,katrine,perused,refrains,furrowed,tabernacles,virile,poignancy,detestable,pouce,certaines,sombra,narbonne,voisin,jilted,centurions,poring,quivers,flaunting,peeped,kiu,ellas,quer,wails,gild,debonair,indignantly,invigorated,bucolic,disaffection,grappled,executioners,belial,harde,blessedness,courtesies,misericordia,apotheosis,jette,bettering,tigress,geworden,occhi,chante,bleating,stratagem,squatted,dagon,hugues,atalanta,partage,authoritatively,unpleasantness,bettered,imbecile,gravest,defilement,butting,gobbled,hispaniola,conceives,townsfolk,afflicts,thinness,counteracting,marilla,ramshackle,dullness,syllogism,wrenched,giovane,usurping,arouses,augustinian,scald,rois,rodolphe,heliotrope,aquiline,reapers,uncouth,allein,whimpering,eleazar,portent,fatten,crossly,hadst,fier,admonish,battlements,transgress,leant,lank,governorship,tolled,zealously,aen,dowager,werken,squealed,convents,romane,vertrag,usurper,recitations,inculcate,olla,encumber,blut,golfe,wier,unimpaired,liue,heedless,rancor,trots,providential,freiheit,daresay,kapitel,liberality,principes,semaines,stort,indulges,unthinking,tutta,marcelle,flossie,inestimable,whiles,henne,distrusted,prie,mohawks,ignoble,frankish,jeroboam,timidly,lurked,greyish,imitative,igual,pagodas,ganze,hobble,maan,roten,kannst,tills,repentant,comite,meanness,wege,biding,unassailable,sidan,mutters,singhalese,mammon,cavour,discoverable,letty,tombe,beltane,whir,afflicting,posto,biographers,escrito,hyacinths,demandes,freeholders,ventre,facetious,tinkle,wormed,histoires,weiber,approche,civilly,unhurt,incredulity,yawns,croker,liisa,proscription,foretell,hoards,boccaccio,whimpered,businesslike,egypte,juba,frill,landward,cripples,amusingly,cornices,ostentatious,vrai,pocketing,bereits,shylock,deseo,paymaster,canaanites,carnac,gnarled,doce,gnashing,preuve,plod,damals,covetousness,dammed,piebald,unawares,scornful,crosswise,tuneful,hache,girolamo,quienes,humdrum,distended,faun,parler,folgen,fatness,summe,lente,dangled,fixedly,feebly,objekt,vexation,bastions,bailly,threadbare,emissaries,weh,vertue,subsiding,hebe,purred,lieve,contingents,squirmed,haren,sangue,cringing,saal,kleinen,hys,outstrip,demerits,highwayman,contes,hussars,fatherly,jehu,southwards,swerved,unas,recurred,roams,fuhr,hemos,terrify,licentiate,periode,innerhalb,inflammable,freundin,disowned,parlement,surmount,hellenes,unheeded,siecle,nicholl,magis,wolle,apprendre,habitations,warf,cowering,overhear,tawdry,doublets,saintes,buona,gaspard,skall,canonized,solicitous,findet,vorbei,hulking,realidad,seconde,carcase,caballeros,unwound,whiche,progres,reveille,garrisons,professeur,shames,schicken,predominated,wilden,pittance,gironde,gosse,escutcheon,winging,alcibiades,schatten,curds,sinfulness,recapitulation,trudged,junger,hummed,convalescence,verite,spada,priam,unceasing,disdainful,cackling,blancs,freres,aimer,parsnips,trembles,davon,dryly,ingratitude,postes,godt,largesse,humped,mooie,rowboat,perfections,restive,hackneyed,canticle,peine,naivete,circuitous,frieden,imploring,erebus,abridge,picardy,glisten,clubbed,turnings,unblemished,trenchant,lilla,volleys,hommage,girlhood,freshening,rill,andar,lodgment,clumsiness,witless,regale,crus,siya,amuses,pallor,unwholesome,parsifal,copra,journeymen,filipinas,hippolyte,marsa,galling,vei,quitted,tomba,musta,brawny,quella,fueron,prattle,partakers,climat,ilium,livy,incorruptible,puritanism,carthaginian,assiduously,nibbled,appeasing,piquant,grond,magno,leute,unreservedly,tattle,baste,manier,willst,inseparably,anthers,buttonhole,uncivilized,insensible,seasick,redouble,theodosius,liberte,rostrum,ejaculated,eux,sables,pian,admonitions,shewing,suelo,cower,erfahren,inferiors,singed,gird,territoire,pierces,jugend,kleidung,erfahrungen,solicitude,pawnbroker,reverently,deign,eher,hominy,doting,fuerza,blistered,glittered,hanseatic,pestered,preeminence,billows,biens,etten,carted,despots,gnaw,bandied,liegt,vinden,rijk,perversely,bors,transfigured,dauer,quizzical,couper,informers,resentments,bartered,sugared,spittle,circumspect,demerit,shouldst,roundness,acrimonious,pulpits,warding,unbuttoned,brot,feit,frolics,groat,matins,formes,bellowing,platon,abhorrence,verbo,osten,blackish,emme,aphorism,emanation,miscreants,unction,redan,seguir,noblemen,barque,deride,kirke,houseman,sedges,pitiless,zwarte,portly,jangle,jarl,beauteous,veld,contrive,huguenots,estimable,scowled,ministration,willet,wriggle,impudent,xlii,petted,meist,prude,heroically,phoenicians,enjoining,willen,hustled,jinny,surreptitious,petulant,unfurled,sauf,lits,chinaman,nonchalant,disloyalty,laconic,westwards,nase,paha,askance,misma,binnen,baronial,charrette,denouement,belied,obliquity,satiric,quivered,sche,sanctimonious,natt,ebbs,obed,ezek,heet,stammering,waked,logis,foolscap,sorte,oases,brach,limites,calma,unmeasured,statuettes,nubes,unga,gegeben,satz,twinge,cultus,trudging,narcisse,feasted,rebukes,colquhoun,quadrille,inconnu,lucretius,sprach,ihres,docteur,meubles,whome,repressing,embroideries,booke,ingenio,intellects,brawling,veut,tient,gelatinous,meilleures,figur,gentlemanly,underbrush,bemoan,norsemen,forsaking,souvent,bobbed,diversities,gouden,pontus,unintelligent,holies,annexing,vriend,amas,asylums,satires,coffer,costliest,ravaging,rarefied,nebel,gleichzeitig,leyes,deprecate,lvi,serait,esos,chivalrous,overruling,gendarmerie,konnte,groene,obstinacy,caked,delude,similes,seeme,puertas,recedes,wroth,emetic,gestellt,holde,capitale,steamboats,naturelles,towered,fastness,gautama,alsatian,unrighteous,torpor,leser,desecrated,transgressed,publiques,rawdon,endeared,arsene,pecked,colonne,dozed,outstripped,chaldeans,perdu,repast,annee,majestically,shapeless,heen,contrite,pursed,principio,entreated,heliopolis,chel,righteously,marvelled,seductions,taga,propitious,domesticity,dashwood,veta,chastise,inveterate,peacefulness,extolled,absently,promis,breit,copse,espada,highwaymen,orators,incorrigible,abating,sonore,feigning,passant,liveliest,sixtieth,reproof,filets,baiser,credulous,inflections,lintel,allora,stak,hereupon,clod,alaric,beneficence,impregnable,poca,dessen,penmanship,dese,girded,bessy,inscribe,adelante,serenely,nosing,crowed,vnto,cooped,overwrought,vivacity,incontrovertible,forenoon,clotted,jolyon,certitude,marshalled,approvingly,waif,ruder,suffused,fanden,altijd,artless,morne,cowed,longueur,deeps,forger,busied,venir,kith,vrouwen,valenciennes,komt,noblesse,jostling,satiety,tolerably,consanguinity,wint,convulsion,slumbering,heraclitus,semicircle,vient,squinted,exaggerations,editorship,rapturous,unobtrusively,sabes,choicest,tempestuous,vaillant,bamboos,noticia,signora,flitting,laboriously,inmost,jehan,vorhanden,poesie,snuffed,cannot,vache,sere,slighted,keinen,maner,stammer,inordinately,fidget,borst,comprehends,gleams,sieges,magnifique,pollux,sieben,muzzles,peleg,punic,oser,saman,epirus,fantastique,tilbage,astern,pelted,stoutly,insinuating,auge,leib,unequally,profligate,sated,acht,apprise,bothe,goda,beady,oberst,abdicated,reveries,hauteur,unerring,arter,euer,denizen,elegiac,bivouac,owain,doggedly,hermano,ladyship,kneeled,longe,rire,marcha,problematical,tanden,drapeau,crackled,defenceless,pricking,invalids,eiland,harbouring,droite,fastens,igen,paysage,fleshly,striven,lurched,blotches,persoon,herre,pistil,legen,northumbrian,apprehending,werde,insinuate,deadening,froid,angele,dolt,propria,schreef,agreeably,scouted,intime,splendors,capstan,feint,muscovite,pursuer,letto,wrappings,daunted,candido,ske,aurore,couplets,socialistic,narrowness,dwelleth,mogelijk,moustaches,manzoni,brushwood,arrogantly,traurig,lieux,barricaded,pillaging,vingt,tief,perles,bungling,impel,schlecht,expectantly,perching,solum,broiling,gangway,tantalus,rapacious,uniquement,debased,concubines,jogged,sentido,entangle,steepness,franchi,puritanical,capacious,prefects,clew,biscay,unrolled,tambour,watchword,drummed,verging,interdict,geplaatst,scamper,devoutly,transmigration,deshalb,redoubt,meus,kerk,revenant,instil,boastful,bilious,orsini,despondency,disheveled,exclamations,allegories,entonces,trudge,mincing,scurried,setzt,homesickness,metamorphosed,hussy,stoicism,congregated,covetous,ewer,grootste,doux,directe,hysterics,procures,stimme,aceite,concerne,devours,waists,judaea,leden,quidam,potentate,barbarity,extirpated,charlatan,slouching,susceptibilities,plaited,floe,surtout,agonies,misjudged,writhed,beine,housemaid,eurydice,undeserving,untruth,directement,preyed,relent,zillah,verba,horsehair,seinem,handelt,gien,mandarins,sforza,indifferently,nevil,shuns,teile,retinue,hulda,impostors,stehen,brawls,derangement,mesmo,hinaus,epictetus,impertinent,ouvrir,buffeted,physiognomy,hecuba,oiseau,behooves,misshapen,scrubby,jedoch,unpolished,vales,steadiness,ceaselessly,irishmen,charmes,succor,branche,efecto,ague,sodden,helpe,changements,unavailing,vagabonds,irreverence,ditt,chaises,statesmanship,papst,popolo,saner,tendre,halla,demoralizing,prest,disillusion,frocks,poner,thronged,iets,beseeching,irksome,burgesses,abbess,minuit,uncounted,schoolroom,varus,terrasse,teufel,teaspoonful,rambled,bertin,monta,kneaded,fertilised,rosse,emanations,veiling,squandering,wahrheit,quiescence,gilet,widowhood,eut,swarthy,abyssinia,populaires,poetically,durance,farnese,chid,menaces,desir,ambling,perilously,numbed,acteurs,regel,bathes,drover,wees,dogmatism,chasseur,grudging,reciprocally,effusions,snared,brogue,passeth,gret,namn,squeaked,seance,stilled,bygones,assez,mentre,contentedly,roughest,entreaties,ridiculing,alternations,penitence,discours,avails,velvets,completer,streit,recevoir,tactfully,speake,gericht,borde,drunkards,danton,hurries,smolensk,terreno,tweede,ouvert,duchesse,mingles,strafe,corrals,rectitude,semble,engen,erreichen,encircles,garratt,jorden,uncleanness,viens,pried,supplications,onely,deportment,marchandises,invidious,weten,seraphic,gedanken,malevolence,wetten,alcalde,judicature,vigueur,einzelne,exhorting,libation,facit,soient,duas,rechts,bagatelle,chaine,nonchalantly,drenching,verhaal,subi,chiens,prance,lapsing,suivre,edifices,gruel,fing,exasperating,grievously,hauts,partout,hesitancy,courte,chafed,kennen,interposition,callings,satisfactions,distrustful,incredulously,zij,obsequious,moyens,dissolute,briefest,lamplight,sharpshooters,druggist,absolu,unprincipled,sweated,lieth,flinched,zeer,pacification,nitrogenous,sackcloth,enraptured,indique,boeuf,fidgety,disown,sophistry,illumined,thir,agonized,pickpocket,warbling,shriveled,conformable,imprisoning,incongruity,uselessly,gallantly,bended,drang,poignantly,untiring,hostelry,slumbers,forfeiting,fertig,humphry,numberless,intemperance,definiteness,reproved,privation,westen,peevish,tapio,pedagogue,soothsayer,facings,multiform,peuple,herculaneum,carthaginians,micheline,indelibly,ashy,cependant,cruelties,unseren,cadences,slavish,bawling,awestruck,bluer,felicitous,caravel,calles,plaudits,schooners,mycket,chacun,demander,weniger,eltern,adepts,clefts,kapital,underhand,sophist,heimat,idolatrous,secundum,smouldering,tradespeople,untersuchung,polytheism,varias,revellers,rebuff,appellations,draughtsman,boulet,verandas,pwh,pindar,iscariot,bombast,soyez,bateaux,impulsively,cuarto,seeth,milch,depredations,dews,kalt,temerity,mlle,eluding,adventitious,interdit,corked,deluged,fleecy,antelopes,daub,unanswerable,darkens,excellencies,strahl,isak,gedicht,atque,untainted,eigenschaften,slays,crees,whirring,miserly,troth,contemptuously,frequenting,mannes,celerity,grottoes,marthe,milliner,komma,blase,hoose,exonerate,righted,sayd,travailler,imperishable,degen,spurn,famished,romping,oozed,cuanto,contient,devrait,bidden,tuileries,samen,contraire,vasili,monopolized,abstruse,stripling,overshadowing,succour,whizzing,headman,saat,mellowed,ebenso,contiguity,morts,retracing,similitude,servent,verdure,sward,exclusiveness,anwendung,forse,deines,tira,reclined,throbbed,divines,prostration,wretchedness,admis,festooned,barest,steadfastness,boog,digressions,diocletian,fellers,begrudge,xliii,coxswain,schriften,counselled,sentries,reproaches,pediment,hayti,geef,cassio,meinem,wanneer,baleful,swifter,timotheus,hulp,gelten,miroir,promesse,apenas,hillock,fearlessness,neben,waggon,unalterable,beelzebub,inexpressible,indios,cherishing,crooning,bref,wist,eius,disavow,peals,mariette,backsliding,ziehen,whisking,wantonly,samovar,zweifel,oppresses,footstep,stewing,schnee,acrimony,bristly,soever,ruefully,unfavorably,slothful,sitt,diep,exhorts,moloch,epigram,wafted,keepe,expends,golde,reassuringly,thwarts,sitz,staats,jedenfalls,abhorred,zeigt,sollten,mene,worketh,phosphorescent,sauntered,foundling,illiberal,deserting,onlooker,deathless,assurer,scandinavians,legate,dissuaded,paled,ascribes,hearths,duller,discoverers,furled,denken,caminos,esdras,typify,ganzen,commissariat,seele,abydos,cornfields,ebbing,evelina,resta,portents,venetians,unnerved,demain,participles,harmlessly,purty,possessors,mephistopheles,pologne,seene,fortes,liveliness,godson,passa,peur,conserver,paling,deur,bisher,schwester,autocrat,shouldering,hovel,gauls,conforme,honneur,stirrings,decider,lusitania,rustled,unquenchable,foreseeing,indolence,profundity,lawe,paru,vostro,turgid,exigency,exige,necesario,reined,prend,unenviable,genau,unfeeling,cooing,haine,bishopric,espoir,severest,lesse,beautifying,glistened,encroached,corriente,suppleness,irascible,eigenes,canute,vibrated,denuded,rendre,subjugate,commissaire,gulden,naturaleza,niobe,incorporeal,orderlies,thrushes,dient,ferried,wriggling,crape,mouldy,amant,merest,wordes,perpendicularly,expounding,nutzen,gestern,swaddling,benighted,hysteric,robespierre,tillbaka,exultation,fand,blanke,selfsame,overcoats,calvinists,grovel,soberly,therfore,mellem,gayest,vais,fetid,boatmen,vespasian,singleness,kette,yearnings,remise,unquiet,einzige,herbage,adduce,twaddle,unitarians,unutterable,outshine,parisians,stellt,patronized,aldus,pommes,inelegant,clambered,histrionic,subsists,degenerating,recommande,sergius,taciturn,sways,bristled,flecked,mustering,allemande,sophy,paramaribo,betrothal,boorish,posa,queste,sinon,devoir,hunde,adjoined,soumis,pire,vilest,niin,vassals,throttled,fonder,entrancing,elope,seid,nehmen,welshman,beguiled,besoins,violetta,stillen,sinew,mordant,clotilde,ascribing,zahl,compter,germanicus,declension,fawns,damaris,anodyne,dearie,verum,voller,lequel,enigmas,kinde,bezoek,humored,befalls,endlich,yli,primeros,chere,fussed,anabaptists,xliv,disembarked,burgundian,telles,pente,thumped,superbe,conjectural,tendance,idlers,eigentlich,hoog,contortions,effusive,heilig,cloistered,redoubled,choristers,bosoms,flapped,supernumerary,aqueducts,ngon,reprobate,despues,indiscretions,riper,forsook,hittites,tatler,prelates,unserem,ensigns,sauve,miei,spendthrift,antipodes,chers,grossest,shanties,ploughs,lashings,noemi,loue,persecutors,averred,valueless,imperceptibly,jaren,uden,dise,crevasse,hastens,huizen,davantage,brilliancy,gushes,marechal,surer,frae,traitorous,hacen,levite,quieting,candour,pacified,drin,gored,remunerative,intricacy,coralie,pendulous,eare,mourner,enfold,wirst,troubadours,amours,reentered,paupers,bludgeon,welled,naturae,inconsiderable,cotyledons,cackle,sallow,gemaakt,montagnes,reformatory,demeure,ostentation,ninguna,cherishes,souper,wrathful,thuis,partook,ehe,familiars,blacken,zorg,possibles,vannes,schemer,lika,actuellement,deiner,writhe,friendless,proboscis,fitful,sicut,genii,intrust,illi,dishonoured,unquestioning,desultory,fabrique,pitifully,egen,menacingly,emmeline,linken,disinclined,lackeys,codicil,puerile,kleber,journaux,worthlessness,oblation,franziska,caracalla,civilizing,conseiller,corneille,merken,dorp,palaver,gorgias,tribu,unvarnished,overran,folies,wretches,hoarsely,bonhomme,hellenism,statecraft,familien,propia,flout,studiously,reveled,confounds,pitiable,countrie,reiteration,corsairs,indiscreet,duelling,pedantry,lugged,debilitated,blazon,gars,looseness,neglectful,gamla,pillaged,voces,reasonings,vestido,agathe,niemand,tost,worthily,passy,verfahren,insomuch,anneke,scruple,steadied,coolie,honeyed,recoiled,comprendre,disliking,chinks,unripe,shipmate,convulsed,noce,cleanness,unmolested,insistently,fording,linie,telegraphs,coverts,transgressors,redolent,impudence,ananias,vied,eulogies,weakling,griefs,yoked,steeples,tares,detto,tottering,grossen,scalps,despaired,quails,satiated,plupart,principaux,lightnings,repenting,souldiers,manliness,churchmen,parthian,knowen,chirped,facta,himselfe,derisive,imbibed,hanoverian,samma,warton,equipage,prophesying,abodes,kring,spouted,clanging,windpipe,veronese,guiltless,burnings,caractere,estaba,distresses,retaken,heere,intermingling,foundered,mandat,blinde,dispensations,irretrievably,thralls,crise,connivance,miscreant,bitterest,uncertainly,resenting,kingdome,familiarly,reviens,scowling,swaggering,grandly,publicans,graciousness,footlights,smarting,pueda,hatreds,imperil,salamis,supplie,zweite,censer,surfeit,schneller,obeisance,whelp,fantaisie,monnaie,ignominious,entschieden,sulking,keenest,ungainly,darstellung,bauble,circlet,rouses,dormir,consolations,enslaving,medes,deale,odorous,indefinable,faits,kenne,ironical,sympathized,uncultivated,functionary,suppositions,jehoshaphat,chevaux,elegies,carbines,richt,kaffir,livelier,gervase,grenadiers,bruit,acacias,magnanimity,aleck,propio,fiesole,gallops,dexterous,connaissances,hebt,beaute,hoor,modernes,undignified,stesso,conocimiento,mord,endear,effigies,folge,counteracted,planking,blockhouse,confiance,urbanity,lawgiver,totter,rumpled,scalded,importations,laughingly,prefaces,tenue,idolaters,seducer,haire,tenaciously,moonbeams,inculcated,monate,verschiedene,wohin,generall,reposed,cicerone,mustaches,hasard,leddy,mildest,restlessly,uselessness,lezen,doet,oaken,endroit,harlots,conduite,rouges,humours,humain,voltaic,derriere,xlviii,flot,cudgel,aurait,multifarious,runneth,tenu,llegar,abhors,minarets,wrack,bleiben,vividness,beatitude,husbandman,procureur,stuk,douleur,heaves,xlvii,sagt,passi,subaltern,appui,bharata,longingly,apud,bandes,roseate,ruffians,servir,contralto,tenter,rues,dote,valdemar,curtly,resuscitated,exemples,confidante,rashly,athen,leering,soudan,clearings,pleasantries,louer,uomini,atoning,insinuated,xlvi,warble,prodigies,herbes,phrygia,overige,dardanelles,familiarized,fakir,rato,divinities,ostracism,magasins,buttresses,drovers,obelisks,vierge,doggerel,existences,farre,extravagantly,hauptmann,builded,volle,slandered,demagogues,cephas,flighty,opposer,ejus,gabled,convient,ofta,enrage,sinews,flemings,glanz,serjeant,shadrach,shallowness,ensnared,loyally,sneezed,darkling,subservience,nightingales,gaped,subduing,apoplexy,poorhouse,sunbeams,kaan,brigand,jahrhundert,chasms,jealousies,ditties,dignitary,wenches,dite,gesicht,improbability,shrewdly,sneers,bloodhounds,meed,impish,menaced,seneschal,deafened,hooting,cyrene,dejection,economize,prophetess,hatchets,witz,spoonfuls,unten,ebene,funereal,wrested,deceives,plaint,imperio,demesne,briny,nimbly,supped,calumny,sigismund,herrn,verger,ludicrously,portend,reves,spattered,couloir,straggling,cochon,berthe,acadians,comtesse,jailers,chaud,disastrously,intimations,arzt,xlix,heterodox,manque,codfish,debility,shirking,rustlers,demas,zaken,aloes,obliterating,victuals,certo,dully,leonore,exalting,chide,entrap,indignities,nombreux,rhymed,whirls,compassionately,hussar,scow,voorbeeld,beide,honora,remorseful,obstinately,zei,peste,aggrandizement,jotted,unpopularity,deluding,boileau,naast,charta,royalists,lachen,hennes,nej,achaeans,cravat,genug,pinions,mindre,praetor,peche,sunburnt,superficie,grotesquely,mown,soms,vagrants,transept,patois,atlee,seuil,petrograd,aveva,bulged,bated,seines,thereat,aise,recours,cloven,apollyon,intemperate,confiding,fleisch,eares,compunction,bonum,unceasingly,herdsman,haat,frightfully,reprises,fierceness,remodelled,unpleasantly,szene,bouches,aggressions,spectacled,telegraphed,resounded,mickle,sagacious,moralists,abimelech,gehe,valise,prompter,provincials,distaff,imbibe,hisses,garcon,doel,freude,gnawed,sieht,oog,clattering,traite,bleus,tente,reverberating,incomparably,bearskin,ripens,darunter,benares,recitative,factotum,zoon,screeched,quare,anticipations,determinedly,calamitous,pria,hughie,egli,mopped,sacrilegious,fatuous,elocution,cilicia,retraced,palliation,kunne,misanthropy,protruded,hanse,incompetency,mebbe,plainer,chambermaid,sapping,perfidious,voyaging,humiliations,umbrage,fatiguing,awaking,presencia,portmanteau,moralist,farbe,legere,tormentors,distinctness,expiation,insinuation,indem,alehouse,practicability,swindler,standen,inquisitors,dreamily,frobisher,digo,motivo,gibbet,exactitude,promenades,grise,epitaphs,jostled,mannen,globules,herdsmen,conmigo,reprove,heareth,ipsi,inviolate,zoroaster,orations,vistula,laten,examina,erster,autant,schrift,resemblances,termina,cuales,lordly,complexions,despising,assiduous,verstehen,epigrams,dagny,thenceforth,girths,swerving,surpris,frappe,pobre,lebens,muerto,enfance,gesetz,portentous,conjurer,dramatis,receiued,sergent,hurls,habt,couronne,dullest,erschienen,venal,gebe,grete,lauter,gourmand,wearisome,sortir,exaggerates,gurgle,antislavery,laertes,apologetically,clime,poultice,ministrations,gendarmes,telemachus,sommet,remonstrance,capitulated,karna,prettily,reeking,cheapside,citie,zuerst,persuader,epistolary,flutters,elemente,maitresse,reappearing,dudgeon,pilasters,theban,kennis,unwisely,grammarian,figlio,peruvians,lateran,sente,reverberated,plenitude,faim,unpardonable,robarts,volgens,bowmen,blundering,dishevelled,exorcise,scurrilous,squalls,parla,vaste,jedes,shewn,hiki,vasudeva,objetos,briefe,valets,corruptible,pedlar,impassive,abasement,faints,vicomte,pillory,dieux,inquirers,orte,brahmana,toren,prostituted,quartering,amorites,disavowed,undulations,redressed,waifs,cuyo,siegmund,steg,harangue,liefde,yeomanry,lepanto,matilde,passepartout,gentil,ablest,faveur,dicho,whitest,bastante,handmaiden,humors,sollen,cooed,knabe,gunboats,comradeship,inopportune,exhaling,lurching,plumed,poesy,cheapness,scythian,proche,backe,sapped,starched,tasche,insieme,undistinguished,unes,gayer,seceded,belligerents,baser,ribald,coursed,habitants,brusque,officious,hert,gorka,flannels,contrivances,capitulate,wayfaring,kammer,dejar,disfavor,staden,umgebung,liveries,sieur,devez,anatomist,laundress,bugles,manie,swindlers,clandestinely,sitte,avere,fichte,coolies,edra,briars,tarentum,chaude,unfitness,annihilating,swathed,extorted,tanta,avaricious,entfernt,waft,popish,darning,pasos,crois,fidgeting,resinous,granit,flayed,paramour,enunciation,josue,frailties,haunches,morea,chastened,dropsy,impositions,wriggled,displease,agit,moneyed,halten,peligro,armee,langsam,toutefois,cloche,neatest,howitzers,mantelpiece,proclivities,rache,falkenberg,imitator,agonising,maximilien,tuer,meerschaum,impiety,loiter,actuelle,schwer,begot,suddenness,baneful,templo,wenden,twirled,furtively,betrayer,jingling,arrowroot,welcher,readjusted,assails,priestesses,jostle,admonishing,avocations,allons,humblest,haec,mohammedan,solitudes,insurrections,lodgers,kunna,cacique,exalts,grec,cajole,mhw,swooning,wincing,unswerving,enjoyments,thirsting,savants,kentuckians,monarchical,celebes,divans,immodest,perquisites,flatters,gedichte,herzen,beurre,meni,sayest,lutter,heissen,voeux,juges,papists,jeer,premeditation,waken,tearfully,sagged,pugnacious,companie,bedecked,finalmente,soin,oftener,motioning,saunter,universelle,firmin,llamado,versant,flaxen,pseud,soie,tempter,miscarried,rivulets,corde,appertaining,nostre,prochaine,lohn,partridges,qualche,nooit,swum,dunkle,staan,brakeman,regretful,coasted,democritus,yawl,endast,permettre,drooped,mehrere,exacts,licentious,antiguo,fermer,deadlier,doest,romanus,agog,ponts,liii,yeomen,lothario,maal,charybdis,wazir,habituated,doff,fede,jests,brandished,jeremias,raisons,gouty,twined,comprend,resister,stoics,soldiering,viso,tyrannies,natuur,greenbacks,puesto,sullied,calvinistic,abridgment,frequents,faite,hoffnung,leipsic,bekommen,fiercer,entreaty,creaked,disconcerted,roule,interpose,saan,neveu,hearkened,mournfully,surprize,tenanted,kerchief,marvellously,allerdings,unenforceability,moralizing,phantasmagoria,glutinous,pretexts,recollecting,omdat,jemand,hundredweight,hags,severities,sobered,fournir,coiffure,forasmuch,lige,aliment,moeten,salir,caprices,laufen,blockaded,ignominy,tempests,scythia,recriminations,olim,geeft,dismally,insinuations,smiting,hapsburg,bevor,zeiten,lulls,pompeius,peux,misrule,unasked,illo,kuka,copiously,freien,wildernesses,perpetration,transmuted,abideth,blaspheme,blacking,quelled,threescore,sitteth,keenness,quickens,scornfully,puerperal,multis,worldliness,croaking,ignoramus,howbeit,sisterly,briers,ouvrage,faible,avidity,gascon,bergs,accustom,consiste,venez,prouder,pleaseth,cottonwoods,dienste,superintending,spectres,poetess,moluccas,leguminous,brigands,quarrelsome,moine,damnable,etruscans,poeta,tottered,theil,disdained,shrivel,ouvrages,avaient,firstfruits,sinne,daran,untying,slights,throbs,whitened,genoese,inclosed,couche,dismounting,procede,fattened,planche,vasari,freier,enkel,jupe,heaths,enjoins,terrestre,insuperable,recapitulate,vois,drays,rester,enceinte,starlit,wohnen,inauspicious,prescience,capitaine,magnates,predilections,picketed,knaves,sware,scampered,imposible,academical,krank,ploughman,heilige,mettez,conscientiousness,basilio,morceau,splendide,arabes,cire,acceptation,schlug,novitiate,humoured,idolized,rivulet,seethed,geest,etruria,geboren,senti,allayed,pored,perceval,wagen,antiquary,muscovy,shoemakers,zullen,diggings,legte,emancipate,achter,burghers,ignorantly,ancor,erlaubt,diviner,laisser,bleibt,discoloured,gooseberries,jahres,wolde,quarreling,enterprize,augustan,fruitfulness,slanders,quelli,embalmed,uprightness,stephanus,apposite,milles,slaveholders,kansan,parlez,nimi,arbres,kloster,zulus,limpid,bridled,forecastle,statuesque,polyphemus,knowed,encouragingly,harboured,foole,misschien,dolorous,benefice,unenlightened,sagte,croaked,symbolical,magistracy,alighting,schritte,foretaste,porthos,incoherently,ladylike,iphigenia,pleine,allured,jahrhunderts,lucilla,constitue,sogar,palpably,weder,improbably,expressionless,bowstring,sickens,jolting,soundless,hadde,freest,unspeakably,gestalten,unconquerable,contemplations,foretells,empor,pasteboard,mangy,artaxerxes,misapprehension,perche,reverential,sledges,schoolmate,utiles,denke,befinden,infallibly,unbidden,callousness,bloss,tooke,prefatory,herakles,extirpation,pantaloons,noiselessly,adventuress,fluch,commodious,pincers,freshened,artificer,animo,entangling,quarrelling,blackening,appeareth,partakes,regaled,disputants,freundlich,junks,ingenuous,floundered,entrer,jeered,strabo,assignation,kleider,mismos,sheeted,beefsteak,undervalue,pensar,reden,particuliers,oratorical,sacerdotal,baying,dikke,dieren,fief,poate,repents,cleverer,scheiden,recommandation,nimmer,goaded,ecke,mislaid,rotund,zenobia,pickaxe,babbled,gentlest,sibi,besiege,blandly,hobbling,myn,miletus,scythians,mainspring,dinge,slake,drame,dirent,jedem,speared,attaque,galleons,sensorial,legation,strutted,leafless,deigned,slaver,iseult,recommence,giue,aventures,hellespont,anciennes,dalliance,youthfulness,privations,trouvez,monstrosities,assai,goest,bonbons,chroniclers,vitam,erregt,dignities,livings,ferryman,mockingly,caisses,devolves,perder,chemins,hoeing,debauched,doute,parlons,loquacious,vore,saada,annat,displeasing,intrusted,prudish,pelting,drizzling,soothingly,wayfarers,englanders,flouted,worthies,courtesans,heavenward,theodoric,meget,charmian,bezit,ustedes,exhilarated,ansicht,clanking,repugnance,joyless,execrable,lucrezia,loftier,stolid,unacquainted,simonides,pawing,balcon,visigoths,titter,otranto,defraying,mondes,charlot,deified,grecians,princeps,sumptuously,unemotional,coarseness,universel,enormes,piedi,flamme,selber,flitted,toen,gants,disproportion,counterpane,gulfs,gewalt,surnamed,logique,deare,venerate,tomahawks,scoffs,unsavoury,zephyrs,exemplification,waarom,pleader,lieben,bawl,casque,cleverest,convolutions,siendo,verloren,foretelling,munched,vrienden,receiveth,jene,ostler,waddling,pencilled,escalier,drachm,colline,plebeian,eintritt,ionians,bekannt,grammarians,pflanzen,undefiled,furred,segun,overhearing,puissant,donnez,blundered,meines,congealed,pierres,pouvoirs,maister,yit,blasphemies,covenanted,disparagement,anstatt,minut,teint,sachen,pretences,unimpeachable,meditates,cheerily,faintness,effaced,meself,beguile,revenus,dagar,rearguard,saide,inextricable,rameses,popery,trustful,lewdness,sanat,satiate,sorge,stupefied,treu,caire,brasses,lethe,secondes,tepee,euphemia,joue,measureless,scandalized,jerkin,stunde,aforetime,reflectively,trackless,patroness,impossibilities,inconsolable,shouldest,explicable,plucks,wreathed,criminel,alexius,marksmen,enthusiasms,slaven,standeth,geven,lesbia,quellen,worte,drave,blowed,vare,canting,propitiation,sinewy,gamekeeper,dulcie,agir,maakt,uproarious,gebruikt,penitential,glinting,seeketh,condescend,terrifies,humbler,expence,cavaliere,pettiness,slackened,heur,hija,predominating,auftrag,endureth,unapproachable,boons,vouchsafed,lunga,gamle,philibert,cordiality,billow,relativement,inconstant,effete,storehouses,carcases,crestfallen,iemand,gloomily,pouted,lunching,wakened,eerst,sidled,tartars,ebbed,steckte,issachar,astir,reasserted,trente,hardi,reeked,dispirited,insidiously,divined,revelling,mazzini,befahl,lovelier,odium,fettered,hustings,rasping,besotted,charioteer,papered,primum,clamber,adroitly,ferne,descente,holte,alders,tache,unformed,ducats,watchfulness,gottes,kleines,steamships,hvad,cime,sundered,irretrievable,roguish,tenir,maand,ovat,rapacity,sicken,elopement,ardente,worke,folles,besuch,rummaged,peons,incontestable,languor,israels,frivolities,mantilla,instante,slovenly,ambled,celebre,clementina,necesidad,hesitations,protagoras,curtained,purloined,lounged,rustics,purposeless,visites,skirmishers,flinching,certaine,trumpeters,disbelieved,anderes,tableland,plaatsen,infini,revile,unselfishness,burrowed,prussians,buttercups,footfall,cocoanut,cajoled,sublimely,tribunes,kraal,meilen,whizzed,dritte,multitudinous,javelins,grenzen,beatific,bigness,artificiality,jeering,maltreated,chaperon,consorts,stimmen,priester,muckle,vergeten,causer,respecter,bornes,propter,churlish,treasonable,stowing,twinkled,schal,existenz,swindled,vasta,ridicules,deres,wechsel,gracchus,undine,timorous,soeur,rende,ensnare,spurted,quarrelled,beggarly,mutineers,schwert,inseln,monter,keiner,fascinations,suum,unhesitatingly,vivere,prieur,treacherously,repas,fyra,disengaging,propres,moping,obviated,roue,kracht,merveilles,fuerzas,lunettes,pirandello,blare,historiques,comest,sullenly,kurze,oppressions,steadier,miedo,trebled,demurred,conciliate,contenant,ransomed,donnant,bedchamber,chevaliers,aufs,calme,roughs,drawled,niets,ruhe,florins,einheit,sechs,tagus,lydian,pointes,ehren,remis,vele,imputing,endowing,spangles,peterkin,armer,simplement,brillante,servia,disunion,shepherdess,sextus,linge,lucht,rueful,sterk,unbending,ideen,anderer,beispiele,equinoctial,constante,varuna,jugement,inheritor,ginevra,tarried,remorseless,disputations,querido,apennines,gesehen,wirkung,redoubtable,interessant,antechamber,seasonable,clarisse,moche,platina,anden,viande,ravish,dubiously,battlement,gamester,byword,warded,stygian,referable,rigueur,jangling,parfois,doleful,baize,debasement,besieging,shrewdness,interstices,mayst,parried,demanda,principios,elbowed,zahlung,landschaft,furze,neighbourly,nahe,haast,sensitiveness,gelesen,gascony,pawned,outen,mendicant,exigences,keepeth,beginnen,vindt,giddiness,gebruiken,warders,senat,retributive,pyrrhus,vont,flagon,traduit,innere,geste,barefooted,chattered,overhung,demoralization,pebbly,stellan,abashed,samme,aurelian,sacristy,charitably,joka,boutons,folle,brooded,sylvanus,guter,dandies,oracular,undefended,lecteurs,kleid,hizo,humorists,unities,papiers,rakish,effervescence,enthalten,unworthiness,isaias,moraines,dorrit,unflagging,wur,corroborative,komme,ruffling,voet,hardihood,bougie,calleth,greenness,recrimination,basked,embarrassments,aureole,disgusts,nombreuses,tiden,sledging,igitur,footmen,recoils,quadrupeds,tahi,bewailed,morceaux,roughened,gewoon,thinketh,thoughtlessly,depute,besteht,returne,savours,edes,bulwarks,clods,maoris,mantled,encouragements,unfaithfulness,fenian,boten,eateth,bedraggled,chiffres,readier,ineradicable,floes,steadying,cowered,monseigneur,grotte,verschillende,pluie,dispassionately,mirar,holen,slacken,disgorge,warre,avantages,clamouring,attainder,followeth,communing,mischievously,communistic,jongens,thys,zweiten,chastising,mouvements,derisively,lopped,spoliation,pleasantness,meilleure,montrer,phosphorescence,daba,lustily,avantage,antediluvian,irreligious,vindicating,objeto,ascetics,creuse,scorns,laggard,vues,jadis,blockheads,saddening,llena,malcontents,gentes,nane,satins,danser,unmindful,indescribably,unruffled,inclining,aquellos,drapeaux,animosities,inured,pardoning,weshalb,somit,conoce,giorgione,enfranchisement,rebuking,perceptibly,cierto,vitiated,wizened,wintered,comique,sympathizing,beziehungen,townsman,continuer,gorged,mildness,luckless,maecenas,caracteres,gunwale,indigestible,jowl,prinzessin,unclosed,warten,causas,inclosure,voluptuousness,solide,paroxysm,merchandize,construire,meester,whetted,seraglio,scourges,corroding,lejos,leadeth,soupe,jongen,guiltily,teaspoonfuls,acquainting,parapets,twittering,augurs,admiringly,illumine,selten,awfulness,encamp,henceforward,scalped,huddling,erfolg,combated,evinces,gewinnen,deputed,clambering,surplice,factitious,fitfully,vrede,ascanio,perishes,oncle,laisse,blanches,vieilles,skulking,demur,monstrously,imposts,diaphanous,theodosia,wagged,aske,vilka,peradventure,surmounting,satyrs,grandsire,evasions,lumbered,cortege,rapidement,countenances,beholds,contradistinction,scampering,easie,tourna,sainted,inglorious,contrario,whereat,discuter,defrayed,kirchen,kaum,trouverez,repudiating,insupportable,undisguised,discerns,tantum,juden,deaden,victime,unalloyed,venial,widger,griselda,hansom,nonchalance,frapper,regarde,amoureux,cypresses,phrygian,lamed,workingman,scoffing,hulks,sauvages,breede,ruminating,honorius,abjured,jacobin,communiquer,nere,insincerity,persecutor,dichter,cloches,crevasses,singen,burgher,ferner,unstained,unflinchingly,subsisted,notaire,tamen,entro,songer,surprized,rehoboam,fromme,deputations,ringlets,retourne,scourged,survivals,mollify,commonwealths,blockading,shakspeare,triumphing,ecstasies,rends,nahm,bilden,bedclothes,impertinence,commissaries,languidly,sedulously,venne,grimaces,neger,loftiest,decembre,recommenced,stuhl,pochi,depopulated,upraised,formen,whereunto,fuit,vorst,unfruitful,conceits,shrivelled,geschenk,jesting,begriff,erfahrung,tendril,quoque,dayes,entendu,ercole,indes,beareth,sleighs,pensiero,licentiousness,uren,unshaken,englishwoman,limply,hereward,ahasuerus,pythian,compassed,hablando,unsettle,proconsul,coarsest,jenseits,woord,gentility,assizes,devons,serue,quadruped,honourably,insbesondere,chivalric,helgi,womankind,streng,penknife,copyist,eadem,entwickelt,solemnized,palpitation,haughtily,valentinian,kindreds,counterfeited,sweetmeats,tousled,unfastened,venire,courser,flaunted,canopied,dethrone,vouchsafe,hereabouts,blackguard,unitarianism,gegenwart,garrulous,eftersom,controverted,serviette,venga,amiably,schreibt,sowohl,nappe,fulsome,terribles,gauzy,verie,cornes,noires,echter,mangel,marcher,beetje,vostra,patrie,lvii,dilatory,unco,jagd,debase,hoher,alltid,wollten,distil,cinna,splendours,fronte,abreve,clinking,apposition,maddened,vaster,florentin,slouched,remonter,aguinaldo,sorrowing,revenir,hohenzollern,neere,devient,moeder,exultant,pilfering,trousseau,frisson,kaikki,unconquered,farces,connu,perjured,seeke,eloped,corpuscles,obscurely,dreamless,dadurch,lamely,curdled,haie,schoon,wonted,gallants,dasein,respectably,fixity,zehn,yelping,vaine,croesus,obdurate,ofte,tuuli,absolue,christabel,ransack,belisarius,schlag,taler,piously,quaintly,rationalistic,usque,partis,seras,schritt,disinclination,eingang,aloofness,arminius,dilating,parthia,felucca,premisses,glibly,putrefaction,unfortunates,pottage,ligger,tubercles,herzlich,manservant,unluckily,plumped,disinherited,resounds,crut,anciently,tiens,remaineth,ratione,begetting,gurgled,scheint,hopefulness,poil,voiles,hez,citer,dehors,vindictiveness,potest,lolling,aboue,extorting,adventured,elkaar,clattered,pouvant,oure,unsteadily,sufferance,muu,charmant,mede,raptures,dinna,barrenness,placidly,bawled,enkele,protoplasm,dyspeptic,gaue,diffident,affianced,communs,zeker,guileless,ebbe,wery,opprobrium,geheime,imputations,marchioness,pferd,capriciously,ganske,superintend,bantering,indorsement,perspiring,dissensions,baseness,blotched,implores,gewesen,digne,hillocks,jalousie,straat,nogle,solche,fretful,geheimnis,dresse,inquisitorial,circumspection,unsullied,spirituous,garrisoned,supercilious,soldiery,skirmishing,profaned,ordinaire,prochain,ebullition,avowedly,notwendig,remoter,reflexions,clamorous,sois,scullery,seemeth,etait,blasphemed,disconsolate,einde,antiquaries,quibus,whimsically,spinsters,hohen,fahren,exactions,cupful,lugger,bestimmt,patricians,atoned,tourbillon,causeth,unpromising,geluid,caissons,surcharged,stoff,quarreled,suckled,soort,pulpy,militaires,partaker,pigmy,censures,morir,digged,fust,confessors,kleur,braut,lacerated,promptings,vouched,obligingly,puo,yerself,jael,tragen,spinifex,unexpressed,lunched,scourging,haroun,manfully,vidare,revolutionist,kennt,tracery,ebers,surmises,torno,bedingungen,falle,seemly,catched,saura,habet,preso,naughtiness,derecha,fastidiousness,demoniac,penury,wainscot,supernal,impelling,cellule,einzelnen,modeste,flits,vacillating,jocular,galop,jacobins,forsyte,fathomless,chiding,savoured,algun,marvelling,plentifully,wakeful,conter,dicen,homelike,swooned,unsociable,puisque,allgemeinen,fatta,drear,erreurs,buffoonery,rashness,pensamiento,impels,dissembling,consistence,intimating,dieth,missis,appeler,possa,aemilius,slunk,deswegen,coadjutor,footfalls,lombards,jego,jewess,endued,sorrowfully,iniquitous,tramped,ecclesiastic,agriculturist,hanc,hildegarde,waylaid,blustering,blauwe,uniforme,granaries,ombres,dolch,estaban,deras,dishonourable,bespeaks,smilingly,avow,whar,certa,assize,ducat,suuri,schrijven,nachdem,hundredfold,poing,knickerbockers,hechos,fiers,betook,caressingly,hooted,gjort,instanced,shet,corpulent,jacobites,stumm,veldt,springen,moros,tierras,mystification,eorum,recoiling,pshaw,erscheint,ruban,apoplectic,lingvo,basest,fitly,marchands,flirtations,conocido,unctuous,enlivening,sentir,mauvaise,beaumarchais,plaints,entfernung,startles,colonnades,theatricals,hoogte,intimacies,remonstrated,leichter,braying,nuages,lassitude,leibnitz,moonless,changeless,sagely,unfavourably,valorous,endurable,leid,prolix,trespassed,shews,longtemps,sidelong,principalement,clamored,einigen,scheldt,perte,idiosyncrasy,clucking,glaube,cualquiera,donjon,messieurs,goutte,workingmen,paleness,festen,alack,trivialities,tristesse,discourteous,dimness,besetting,daunt,boue,vorm,indisposed,rente,drog,strategical,thermopylae,ivanovna,landet,skola,amidships,meete,garder,buiten,beeves,nemen,alwayes,looke,preternatural,versuch,conduce,sien,centimes,feare,retourner,neder,earldom,indubitable,juifs,handsomest,decorous,chagrined,gemeinde,imbecility,ouverte,goud,buffeting,doorkeeper,absolument,schwarzenberg,bushrangers,bounteous,steine,lulling,toucher,steeled,patronised,whisperings,detests,haughtiness,ilka,defiling,frenchwoman,betide,estime,emolument,rivalled,prithee,wisse,expedients,beautified,precipices,llevar,walketh,mutta,diffidence,tablespoonful,meum,bestowal,tingled,hangen,conduire,unrelieved,morgon,ariosto,swindling,saragossa,gladiatorial,parthians,parer,reichen,bacchanal,perplexities,ablutions,arten,innan,vallen,tulla,unkindly,lovest,stratagems,carousing,envies,condescended,freighted,gange,compagnies,slackening,pardner,wondrously,dingen,teilen,shimmered,tror,anteroom,agriculturists,marins,slechts,watermen,citoyens,sorti,megara,mayenne,beardless,cheerless,tenido,goot,tuch,wacht,moistening,unprejudiced,explications,dissimulation,restes,pined,inculcating,combien,pensando,oorlog,plaits,fleuve,agrippina,neen,erit,satt,budded,liest,plaintively,devenu,threateningly,profligacy,gwendolen,subtil,meshach,videre,armie,hoffe,hungered,pecho,bluntness,kuin,lebe,gesticulating,pourraient,athwart,hermana,shambling,tenderest,ordains,propound,immoderate,acuteness,hewed,kindnesses,douze,unaccountably,neun,plainest,boire,sech,pesar,gavest,subtlest,racines,partaken,gruffly,etes,welkin,breviary,lineaments,unburied,insatiate,intolerably,discomfiture,puso,mirando,threepence,ebenfalls,libanus,unmercifully,milord,behandlung,velours,tochter,itse,noces,lampes,chary,quas,danach,wouldest,primroses,manumission,mortifying,gondoliers,krijgen,ministres,garbed,adelheid,memnon,nuo,desperadoes,nuage,sesterces,coucher,freunden,civilize,phial,faute,arrant,offrir,appealingly,multe,declamation,miscarry,complacently,unmerited,insubordinate,feux,assuaged,dukedom,efface,dazzlingly,peintre,looketh,whalebone,minutest,ungovernable,wellnigh,meuble,ziet,wittily,schmerz,foolery,exulting,habitant,craned,ennobled,profundo,arbeid,apuleius,pourtant,wantonness,scenting,beziehung,fik,flinty,comanches,ordnung,ceremoniously,gloire,wobei,hollowness,zeggen,jardinier,serai,plw,desierto,fancying,protuberance,largeur,divin,portait,tersely,deploring,sallies,frontiersmen,contraries,armful,envers,extricated,dissemble,bouteille,impost,countenanced,essayed,findeth,gesagt,zustand,pandavas,vaguest,fenetre,passen,feebleness,plodded,lesquels,excellente,gik,nieder,brise,facilement,inflaming,prete,augury,diabolus,revelled,mayhap,humbles,poetes,metier,personnages,demoiselle,unhampered,matelas,puisse,indissoluble,netta,nicety,tablespoonfuls,witticisms,enfeebled,surveiller,revolutionists,cozen,middel,penitents,imprudence,tiptoed,reicher,magyars,civilities,trussed,dulcet,sirrah,rapporter,festal,couteau,baronne,heartrending,devotedly,plancher,amies,steeps,salubrious,spearmen,houden,marriageable,imposture,mutinous,jabbering,tyrian,pourra,peremptorily,whirlwinds,despoiled,lugubrious,ringleaders,begriffe,listlessly,affronted,debout,probablement,daintily,pikemen,deinem,partager,exaction,unlighted,washstand,overspread,losse,piteously,politischen,tager,largess,weightier,plenipotentiaries,muka,insensibly,snart,contento,parchments,uusi,scotchman,repousse,ingratiating,bairn,poisoner,prodigiously,unerringly,qualm,aquel,marseillaise,uncharitable,bestimmung,shiftless,visages,subjoined,pierrette,befindet,daubed,ostentatiously,unvarying,choisi,whereto,cottagers,voluble,ingratiate,helpmate,ligt,soldats,gloaming,adamantine,weinig,kansa,rudest,forcer,einfluss,brunnen,oreilles,varit,braucht,gutes,irresolute,mogen,aarde,smartness,burthen,attente,bekend,lleva,unsparing,bewegung,paard,alcide,espied,effrontery,vacuity,pillared,queerest,impolitic,defiles,byles,indubitably,mottoes,molti,questioningly,generalship,debasing,victimes,demurely,talar,donker,peuples,humains,comun,prettiness,usurpations,plebeians,habia,meurs,philosophique,sloops,regierung,savez,gesang,gick,saturnine,trinken,hungering,unreasoning,morto,thoughtlessness,pobres,rasped,celestials,florrie,turneth,childishness,glauben,revenged,radiantly,gefahr,prohibitory,destine,forestalled,converses,commonplaces,waggons,interet,duenna,outwitted,summat,bespeak,pocos,waarde,wheresoever,compromis,wyth,obwohl,partei,meddlesome,bustled,neckerchief,brahmanas,misgiving,farthings,gebiet,disfigure,rancorous,forsakes,torpid,doctrina,atem,canne,intendant,bereit,fiere,swiftest,confidants,unwonted,astonishes,joues,recondite,sightless,blunderbuss,besondere,chiselled,unconsidered,hottentot,tarda,fausta,beholders,quelles,vertes,invitingly,gloated,wearying,straitened,disdainfully,romish,servitor,ingrate,unvisited,officier,bairns,bedeutet,sorgen,autrement,quinze,entreating,longues,voisine,insensibility,washerwoman,ufer,caldron,offert,summum,reiche,irreproachable,quels,penser,sentimentalist,tenia,avea,sublimate,mitad,deutlich,encima,bowsprit,antrag,childishly,envying,austerities,largeness,hemlocks,chiffre,sadden,passionless,haunch,signifie,thronging,plainness,wolfish,breakfasted,quidem,semblant,ressort,intrepidity,pferde,affectations,filthiness,rayons,sommeil,hateth,spitze,fomented,opfer,dietro,iesus,conjuncture,vivante,docility,moravians,wretchedly,preciso,nosegay,fidgeted,trooped,deadened,brimful,antwoord,mistrusted,florentines,circonstances,bedarf,commencer,fevrier,vyasa,assailing,unseasonable,blod,minstrelsy,voies,paunch,sobriquet,horatius,serapis,soeurs,chaffing,wahr,unlettered,prowled,uninviting,buttoning,agesilaus,entender,jaunes,tragical,charakter,vesture,spricht,richtung,salver,milliers,profoundest,reproachful,petulance,grovelling,companionable,kindliness,convulsively,laudanum,residuum,tombeau,servility,strew,dites,unendurable,ennen,cassock,khasi,aufgabe,excommunicate,erwarten,zaal,arabesques,avowal,interposing,retirer,pathless,revers,juist,trooping,rencontrer,marteau,stanch,perspicacity,pawed,swains,hinzu,undulation,versuchen,highroad,wesen,gondolier,douleurs,ascendency,sammen,hasted,sehnsucht,stupefying,pealed,stets,citoyen,requite,larges,omnibuses,windless,hinc,sanguinary,mohammedans,tyburn,souhaite,firmest,neus,dumbly,allemands,inquisitiveness,fourni,erkennen,bethought,debajo,lebt,slipshod,rundt,produire,heeds,tevens,doted,overmuch,chastening,waxen,cadaverous,stroom,spielt,croire,contriving,waddled,circassian,especie,whin,greediness,preferment,geschreven,ziele,remounted,ontvangen,strewed,artifices,assenting,anaxagoras,unge,cousine,presentiment,sturdily,falleth,quitte,censorious,ouvre,mekka,noontide,ewigkeit,tausend,pranced,augenblick,pudo,glowering,suppliants,heare,personnelle,gezien,schemed,disentangled,qualite,husbandmen,fruitlessly,guerrier,huntsmen,photoplay,dritten,duchies,cuirass,flotte,hireling,overweening,joies,abruptness,sieh,moed,warred,nourriture,niver,conducteur,regicide,dedans,roved,remplacer,ajoute,auquel,siller,touchingly,hisself,bliver,industriously,confusedly,eying,befit,edified,profondeur,portier,malignity,revient,sibylla,karakter,becometh,poort,halloo,pasturage,loisir,puits,voort,soixante,voglia,pandu,geval,pouvait,smarted,paroxysms,coquin,mirthful,vergangenheit,coeval,pharao,ceinture,galvanometer,finna,graceless,slinking,enlever,brocades,ennobling,prevenir,harten,pleasanter,hindoo,falseness,drap,betimes,natuurlijk,procurer,malefactors,lysias,handmaids,gefallen,gaar,straten,dommage,bewail,rhenish,twitter,erano,schar,irreverently,misjudge,revengeful,interdicted,suppliant,monotonously,benignly,certes,averil,sauntering,zusammenhang,gebracht,inexpedient,confiscations,heartiest,untutored,forbears,exulted,uninfluenced,gallies,omne,taches,tourner,marcius,pealing,campagnes,quoniam,leathern,ecclesiastics,interceded,nimmt,intelligibly,craftily,chaplets,abends,englischen,bestaat,makest,nerved,braccio,philosophe,couvert,musketry,caribs,enfranchised,maer,casements,eatable,dets,meanly,profonde,theyr,aspecto,disinterestedness,soumettre,plebe,nier,jeta,blaspheming,benutzt,pantheistic,slumbered,hostler,fous,quartette,hoed,stettin,brusquely,rankled,nonconformists,intonations,scandalously,sirup,exercer,reproachfully,pauvre,rivalling,obtenu,eeuw,howat,existencia,delusive,sepulchral,sarebbe,fuor,pareil,remplir,fourscore,teacheth,guld,droned,balles,traiter,rapporte,wellen,abler,wallowed,recompensed,quil,chamberlains,disgracefully,brung,manches,quei,atteindre,asuras,lamentably,achaean,loups,lowliest,braggart,somersetshire,indisposition,mithridates,reconnu,nutriment,unkindness,tranquille,froh,gardes,talo,rascally,gardien,sanoi,strumpet,zigzags,discoursed,erreicht,haare,accost,manoeuvred,libels,blighting,vileness,blessures,soldados,abase,outcries,stampeded,bithynia,cupidity,soundest,consentement,risings,fervid,truculent,illimitable,gayly,forbearing,kvar,despatching,potentates,putteth,impetuosity,jutted,encomium,decke,behoves,querulous,mener,manchus,pemmican,discomfited,dienen,sidste,steden,mollified,sulphurous,entierement,parterre,subtile,ziemlich,quon,enfolded,gedacht,belongeth,parian,emot,nowise,vaan,verdient,detestation,theophrastus,indiens,sallied,infinitude,unchristian,nachbar,hubo,quaff,scuffling,commotions,belang,numidia,craning,indistinctly,aldrig,zes,houdt,chiefest,casuistry,siis,manchmal,purposing,justness,hundert,simpering,soothsayers,charwoman,mittag,facere,aquella,chasseurs,countersign,frem,cambric,thron,spluttered,leetle,quos,glinted,facon,coupable,lowliness,lesquelles,turc,trundled,desolated,kindles,shineth,woning,falchion,asperity,pousse,dran,secretaire,effulgence,banisters,extricating,valt,hesitatingly,affray,pensively,meretricious,promiscuously,overset,chuse,ruido,undefinable,scorning,multa,lacedaemonians,aristoteles,friede,censers,aufgenommen,tandis,talke,trifled,intelligente,delightedly,chimerical,kanske,importunate,disgraces,zeg,agitations,piratical,indigence,acquirement,mutely,billowy,querelle,suzerainty,imperturbable,milliners,pensa,fecit,gleiche,vacillation,innocente,toilers,snored,heathenism,rancour,apercu,facetiously,riband,pecado,slaine,vaut,disdains,gedaan,hvem,amain,cavil,kohta,huskily,unwarrantable,glowered,curates,anent,wenigen,konnten,worthier,vooral,leered,palmy,religieux,truncheon,hovels,milliards,unlovely,abjure,plenteous,piedmontese,debauch,holocausts,imperatively,philadelphus,darky,ravening,kentuckian,methought,fagot,foulest,rills,gaven,treize,leise,dragoman,micht,affrighted,unsocial,loger,dejectedly,tamely,reposing,ausdruck,phlegmatic,mightest,dispossess,cataloguers,gibe,drily,languorous,paire,tode,foulness,zelfs,calumnies,scythes,shirked,disapprobation,propitiate,hilft,usurpers,lagen,estis,inspirer,gainsay,ambrosial,atteinte,intanto,conciencia,provender,schulter,navire,matronly,andern,sourire,ungracious,overawed,mukaan,relenting,bijna,angesehen,coude,dickon,vapeur,maintenir,sluices,geweest,erziehung,zitten,importe,raisonnable,canot,grundlage,hessians,undreamed,equable,oppressively,chacune,zaak,pourront,indorsed,kasteel,indulgently,takaisin,superfluity,pantalon,gossiped,generalissimo,coquettish,zegt,konung,accepter,expiate,commiseration,voudrais,counterpoise,sawest,inquiringly,betes,romanism,northmen,folgt,cuya,schicksal,travaille,thae,leitung,unfeigned,impalpable,murmurings,conjointly,excitements,zambesi,vilken,comeliness,verra,hambre,indiquer,grossness,cuivre,noget,countrey,carefulness,blijft,douceur,vaporous,oarsmen,seigneurs,toilsome,proprieties,listlessness,waarin,pities,tredje,mortify,gipsies,neapel,unhallowed,injudicious,gesetze,remonstrances,uninterruptedly,revanche,suam,ither,unmanly,mazy,forebodings,fickleness,tuvo,gelukkig,geschlecht,unsheathed,freilich,heiligen,palest,impulsion,empirische,vano,sitten,illis,votaries,factious,braw,verdadero,shabbily,hollande,camarades,slighter,yere,homewards,trous,achten,rapine,materie,snuffing,schwarzen,sterben,bezig,abnegation,yeare,vostre,kerl,widerstand,betrachten,erinnern,betake,arbeiter,klaar,outspread,thim,sendeth,winde,lichaam,zetten,whirr,alarum,doigt,daarom,liten,declara,gebrauch,jambe,paie,unmerciful,apporter,demoiselles,reprobation,lache,burgomaster,camest,sonder,extravagances,esset,fellah,verlassen,gewinn,wakening,vacantly,discoursing,cablegram,tourne,attendre,schlechte,lauf,injuriously,spluttering,felsen,gloried,argives,paarden,japhet,cabane,hende,zacht,promontories,mignonette,supplicate,joindre,freundschaft,pattering,unromantic,sophistical,frescoed,sauver,nobleness,sealskin,bewilder,gwine,zeven,consulship,aminta,brauchen,fuite,unclouded,affability,affright,recantation,threshed,malen,gladdened,weisen,fausse,ruses,expostulation,faisait,heraus,paille,delawares,devait,tirer,reines,galled,esel,verres,atteint,slaveholder,fuisse,meddled,soldaten,protestation,cambyses,enmities,becalmed,genou,verbunden,hver,muut,leprous,lambent,wolken,sacristan,lavishing,wending,disquieted,solchen,benedictions,niggardly,herte,teki,ankunft,solides,gesetzt,dangereux,evincing,vraie,fauteuil,naturels,eue,buckboard,noisome,veinte,malades,impassible,oblations,worten,intoxicate,prenant,graue,entweder,exasperate,curtsey,bestimmten,exclusivement,babyhood,sojourned,censuring,disrespectfully,mesmeric,apprehensively,roofless,despoil,direst,razones,inroad,terminer,vainglorious,wenige,benevolently,archbishopric,hatchway,eigenschaft,pinnace,slighting,vorher,falsch,maintien,ellinor,sepulchres,extirpate,adrianople,imposer,schlimmer,wies,imperiously,kuu,rhetorician,totta,portefeuille,unconcern,toucheth,requited,geburt,suffit,peloponnesus,postern,irremediable,hamilcar,quavering,unperceived,leonine,botte,wonderingly,haversack,liet,ennemi,handen,dawdling,spiritless,thorwald,rejoindre,inutile,signally,loitered,benefices,hewing,abysses,beginnt,mouldering,schmerzen,everlastingly,descried,aquellas,vosotros,miten,froward,elend,audaciously,indelicate,einrichtung,umfang,chinamen,prostrating,ceremonious,slaveholding,unworldly,ideality,fece,fathomed,boord,waan,plafond,erzeugt,gekommen,tranquilly,delectation,honoria,couldst,prattling,suivent,terram,prate,submissively,whithersoever,parcourir,assise,soutenir,girdled,abased,versucht,niemals,antient,semblables,despairingly,alguno,munificence,throwed,gervaise,habitude,impetuously,providentially,veulent,coom,harangued,provincias,wahren,glorying,cockade,unfrequently,inconstancy,betrifft,ninguno,doun,gratifications,impenitent,gayety,arriver,sagesse,kwam,foule,turm,bildet,blijven,sternness,vede,lames,gunst,complot,knapsacks,engross,tristes,appelle,gracefulness,communed,calmest,glutted,largement,dallying,witticism,fatted,blauen,hottentots,penances,brengen,glimmered,bretons,servitors,refus,fehlt,cxar,ewig,airily,gegeven,schluss,maudit,autoridad,kinsfolk,erinnerung,essayer,distrusting,tartary,genoeg,fremde,droops,blandishments,individus,remonstrate,improvident,handsomer,blazoned,vatten,plainte,damps,machten,bonhomie,adverted,soweit,sacerdote,productiveness,gestes,druse,quaver,trouw,ausgang,versuche,wrapt,draweth,prit,tampoco,versification,sojourning,acclamations,aimez,unfaltering,loftiness,emendation,behandelt,clownish,criado,tellement,fordi,remettre,redound,auront,objektive,moodily,discords,outworn,honeycombed,gedanke,venant,anspruch,drauf,trouvent,allers,superannuated,schauen,viands,amiability,kaisers,victualling,religieuse,wirklichkeit,envoie,dicha,strenge,unwearied,punctilious,turne,entscheidung,egotist,jouissance,falsche,schier,ursprung,importunity,distractedly,zele,vexations,seraient,piastres,boche,bewitch,allures,frisking,rottenness,rufen,sentimentalism,clanged,jupes,rechter,privily,ungenerous,asketh,eigenlijk,absented,euboea,fiefs,honom,sympathised,upbraided,thermidor,ignominiously,mischiefs,appertain,joko,perd,enviously,wahrscheinlich,joyed,gegner,einfache,bhishma,clairement,eate,maddest,adresser,cabalistic,conventionality,italiens,aliquid,lidt,whiffs,lleno,manufactories,twelvemonth,undimmed,gjorde,heah,parvenir,faithlessness,vilain,contrives,wistfulness,genannt,geleden,munificent,fortement,glaive,maggior,convoked,veste,malefactor,gelangen,dotage,palliate,oxus,pedants,quaked,malade,affronts,explique,reproaching,excellences,venturesome,roues,severer,fremd,fusillade,muita,feareth,endroits,maanden,bareheaded,girding,anzi,taire,kopje,illud,ilman,maxence,wrings,ferma,hummocks,detraction,dicht,perdre,charbon,foure,subserve,cherubims,toilettes,liebhaber,lenity,songe,respecte,sabots,podia,insolently,blik,dimpling,quiconque,ehre,littleness,homines,gammal,highnesses,awaked,upbraid,unsubstantial,muren,dezelfde,proselyte,authoress,fabel,grandee,pleasantry,setteth,chaldea,pensioned,yeardley,tiefe,considerately,gattung,denkt,poursuite,teuton,pestilent,sofern,bountifully,desisted,senecas,jollity,enrica,inexpressibly,sunshiny,dicitur,handeln,begint,oeufs,amanuensis,dreariness,animi,comprenant,smites,schlacht,schauspieler,bezeichnet,orisons,reposes,vart,hauses,geduld,fieri,mischance,koska,hospitably,metaphysician,vulgarly,construit,invectives,poitrine,perdus,blive,voulu,pompously,discourtesy,hazarded,curtsy,palpitating,marido,plaisirs,ennoble,dira,unsought,palsied,sartin,panegyric,profanation,unfitted,halfe,drinken,imprecations,virtuously,inconceivably,vouloir,assiduity,entstehen,abschied,asiatics,artificers,ohren,murderess,pouvons,radicle,volontaires,villany,forded,superintended,abominably,zweck,familier,enervating,tumults,philippus,pouces,forswear,astuteness,heiter,liebes,kenntnis,gehn,molte,lediglich,musst,hauberk,domestique,geluk,unspotted,altname,legt,bounden,declaimed,unexampled,todes,tearless,basely,vorstellung,labios,vond,hubiera,speakest,teemed,killeth,preternaturally,genommen,pauvres,negress,seien,haranguing,quaintness,verser,stoical,tyd,aptness,retrouve,mehreren,malediction,givest,discreditable,brilliants,unseeing,connived,connais,mourir,reicht,crabbed,obsequies,perverseness,latticed,pleadingly,besiegers,busying,brazo,cudgels,heisst,paroisse,befehl,machte,soldierly,musste,richten,exhalations,rapturously,forelock,luy,esteems,agonised,hirelings,hoogste,jauntily,erscheinen,declivity,vivants,reviling,sixe,altid,retrouver,ailed,garlanded,abjectly,vernunft,churl,vrijheid,guds,rendue,erden,erant,telegraphing,archly,statesmanlike,souverain,yeares,duft,gezegd,kust,woorden,quelconque,dunghill,declaim,bucklers,stouter,seuls,unpractical,sehe,reverenced,derfor,hominum,voeten,liveried,disfavour,genially,gezeigt,modish,plomb,gennem,prier,vorn,deigns,careering,thenceforward,trug,hasdrubal,kanssa,hempen,miltiades,growed,decrepitude,thinkest,effluvia,ordres,figurer,grimness,repassed,meditatively,sinecure,mettent,stopt,riseth,kanzler,invloed,verlust,figger,underrate,laune,jederzeit,pardonable,vnder,choleric,inclose,bided,beggary,desto,boeotia,pleasantest,deil,gashed,exordium,tocsin,alcun,spitefully,gehalten,tonnerre,abbia,brocaded,forwardness,drawling,testily,gebunden,ruhig,unfasten,tyran,precocity,resistless,wangen,spasmodically,mesdames,resignedly,festoons,aboute,varlet,viennent,threatenings,erkenntnis,prevision,dacht,squaws,cesse,mahomed,plunderers,navires,tremblement,comfortless,incautious,luxuriance,petto,creditably,jolies,impressiveness,cheyennes,finit,needeth,superabundance,precipitately,unceremonious,sidewise,anacreon,lisping,sonna,delante,rideaux,prig,gezicht,parfaite,vituperation,manifeste,cabman,fawned,oever,untaught,juley,einiger,voorkomen,gelijk,forsworn,imperilled,sichtbar,promptitude,indiaman,cantered,allurements,bataillon,lasst,omkring,juicio,noin,distressful,justifier,bestimmungen,verbinden,bestimmte,foremast,bestaan,stimmung,meeste,accorder,thirsted,irruption,professedly,geschwind,groweth,stupefaction,lanterne,larmes,harangues,remorselessly,appartient,naturall,stupide,dexterously,extempore,viscid,abaft,auraient,reproving,ottilie,waer,scandale,turnus,helpen,begonnen,pestilential,schaffen,merchantmen,flammen,atter,ensi,circumlocution,queenly,livest,grandees,devenue,adjure,allant,obstreperous,gnaden,olet,heedlessly,soif,lolled,flatterer,stube,sentimentally,gowned,tutelary,hindmost,furent,faibles,monkish,zouaves,ineffectually,contraste,duidelijk,turbaned,guillotined,conformably,meane,zugleich,disdaining,solcher,ouvrier,zieht,lowness,annoncer,unpleasing,disgracing,disant,begon,heartiness,recompence,petulantly,prinzip,casteth,rhetoricians,sulkily,minuteness,solemnities,vexes,tomando,impecunious,avond,menschlichen,loob,aliis,snaky,confessedly,slecht,wheedle,hushing,gxi,corpore,ungraceful,queerly,schwere,parfaitement,holdeth,straggled,picturesquely,mainmast,disquisition,tiefer,vorgestellt,dulness,pistoles,unexceptionable,finnes,soumission,liebt,maie,centaines,havde,mutinied,terwijl,palanquin,contenir,milesian,poursuivre,lacedaemonian,volgen,respire,gehad,untrammelled,stentorian,flatterers,tomber,cantering,minces,foible,questionings,choeur,kehrt,manacled,haud,thereabout,contenta,soone,hauptstadt,daheim,heedlessness,coquetry,wended,getan,leggen,onkel,barbadoes,wifely,tantas,cuius,rouler,expliquer,mortel,worthiest,pusillanimous,personnage,swaggered,accepte,forbore,gravelled,publikum,opportunely,odoriferous,insensate,showeth,causeless,partem,dennoch,imprudently,drollery,makt,uncongenial,feront,noght,philosophes,sententious,reconnoitre,doigts,eatables,intorno,quiera,sabines,catholiques,housetops,rostro,descry,zouden,dachte,drona,complaisance,tinkled,rappelle,bewailing,entrenchments,llegado,stilte,sternest,vijf,vaches,befitted,preeminently,enervated,profiter,ceremonials,sedately,choisis,trone,gabble,searchingly,somewheres,patriotes,tyrannous,wigwams,paysan,blevet,ooit,suffisamment,monosyllables,sluggard,gelegen,dissembled,verlieren,ieder,impudently,jotka,contrariety,unprovided,prinzen,ruhm,cerveau,inclosing,osaa,supping,anteil,diplomatist,barefaced,plighted,faudrait,unterschied,fermes,verborgen,ofttimes,neemt,steersman,caitiff,thebans,keek,aient,seyn,brumaire,embroil,pennon,athirst,gnashed,neighing,durchaus,glaces,magnanimously,compagnon,anchorite,boisterously,chancing,dagegen,tantos,prenez,momente,sterke,provinz,withall,lippen,donnent,consorted,miry,hollanders,perh,exactement,exacte,entend,gewonnen,moindre,humeur,souple,proserpina,fluss,conclure,dotter,effectivement,feelingly,noised,bondmen,unseres,bashfulness,vaunt,wollt,greatcoat,unmeaning,turcs,untrodden,nerveless,insurrectionary,ruisseau,refuser,quondam,zimmern,raillery,faciles,accordant,mixt,ruft,humide,sensibles,prudente,indissolubly,teils,treten,geschlossen,extenuation,favori,compagnons,merriest,loftily,pourrez,placidity,hicieron,gueule,regne,doffed,herodes,quatorze,tegenwoordig,usurer,voluntad,geniality,twopence,froide,rampe,hearkening,flippancy,breastworks,ruleth,pellucid,couvre,frighted,hearest,evadne,kreise,oublier,idees,irreligion,bruits,waarschijnlijk,prodigality,bessere,vuol,enveloppe,freshet,stoutest,takest,livelong,joyeuse,serez,citadelle,appeare,schaden,sublimes,verfassung,opprobrious,cnut,propitiatory,voyez,acquirements,drearily,grenze,estuvo,violences,hideousness,drawed,bewegen,satte,appartenant,paquets,synes,parecer,mechlin,diciendo,collines,cabals,scherz,disait,atli,superscription,lieue,filched,suffrages,darkies,maitres,swineherd,unworthily,disturber,foresaid,redoubts,boding,ouvriers,benumbed,wenigstens,carouse,habere,composedly,paleis,nilus,eenvoudig,heiresses,schien,pistolet,ambuscade,repine,thinges,geheel,amants,jingled,autrefois,breakfasting,noeud,regardez,zufall,drowsily,religieuses,voisins,verfasser,nogen,engraven,nahrung,gaoler,bancs,waarop,jolis,evasively,draps,weisheit,habitantes,brouillard,resentfully,acquaintanceship,declamatory,elate,juif,halb,geister,quiso,gleicher,supplicating,schlaf,zahlreichen,trembler,wickedest,bekannten,adroitness,bestir,helst,multitud,wachten,auxquels,dropt,schoolmistress,obloquy,profitless,mourant,wijze,saidst,flucht,unconcealed,mettant,coursers,disent,mohammedanism,finir,abstemious,krankheit,cannonade,otti,brume,grandmamma,fahrt,moeite,tediousness,verdadera,ongeveer,horreur,licet,ouvertes,warbled,genomen,vuestra,clamors,complaisant,votary,hesper,flossy,zufrieden,geloof,luxuriantly,loopt,haled,grizel,certainement,duquel,inharmonious,amatory,todavia,hindoos,warme,officiers,meaneth,videtur,knavery,dije,blivit,prennent,harrowed,appris,podido,stod,mussulman,unhesitating,sybarite,montrent,leaue,fulco,irresolution,geschickt,schlagen,proverbially,waywardness,maturer,nennen,treiben,servius,bepaald,daraus,faudra,caresse,bijzonder,benignant,appartiennent,domestiques,trifft,arraign,khoja,cawing,fragt,gilds,bottes,antipathies,afeard,bishoprics,marier,bewegt,teutons,whelps,bestehen,victual,healths,heutigen,kertaa,benignity,whitsuntide,gesund,coxcomb,shrewdest,couverts,hecha,jener,undistinguishable,satrap,haen,stateliness,copses,richesse,poursuit,adown,brokenly,coffre,gilberte,eddying,couvent,hawser,circumstanced,werry,muratori,heartlessness,foully,boors,quailed,esquimaux,peint,helas,broils,contenting,troublous,nulle,kinswoman,puissent,bunten,silencieux,gegend,quaffed,fervency,schuldig,sortes,courbe,bethink,eind,comen,serried,careworn,abstractedly,besitzen,unbent,frolicsome,foudre,overrate,directoire,jambes,betweene,stolidly,gerechtigkeit,throned,feind,gnade,saisir,farine,affably,lendemain,aristocracies,hexameter,volontaire,pracht,cravate,aikana,irgendwo,fanns,parricide,strewing,prosperously,allurement,curtsied,mither,recreant,expiated,bedienen,roula,blott,allait,reihen,tournant,entgegen,bareness,shamefaced,bords,perspicuity,gegenstand,visitant,mulle,organes,kriege,connue,annos,enow,jocund,unutterably,entdeckt,winna,brahmanism,appius,inextinguishable,batavian,remarquable,knaben,betokened,griechischen,braccia,merchantman,habited,betrachtet,sympathising,hvide,rejoicings,draga,entreats,conciliated,foeman,confute,voulait,unexpectedness,indispensably,gevoel,endearments,interj,wheedling,touchant,aliud,coyness,quarante,zuvor,tirant,teilnahme,dirige,mantling,extenuate,interessen,battre,quartiers,bracht,vormen,disinherit,restent,aufenthalt,calomel,ouverts,entsteht,disquietude,naething,enormities,kerchiefs,helft,remercie,beruht,genoux,artillerymen,hoeren,flatteries,unfading,gehabt,dight,jouir,waylay,benefactions,angenommen,pitilessly,pattered,varandra,assister,daies,cacha,moest,uncomplaining,tulee,pillowed,courtes,sayde,saisi,linien,temor,imploringly,unsuspicious,picturesqueness,kende,unresisting,besitzt,yez,tronc,begann,musingly,blieb,protract,connus,disconcert,argive,profond,choler,pinioned,tarrying,hatless,baith,epigrammatic,ilmarinen,usurers,boded,dallied,seekest,couverte,dettes,schoot,messire,vorschlag,semblent,geschehen,seelen,traversa,vassalage,offenen,manasses,zuster,breake,auxquelles,designedly,whensoever,conciliating,frucht,discouragements,gingen,semblable,gegensatz,inundations,gelegenheit,scandalised,cinquante,pudiera,algonquins,comported,bange,fasse,servian,stond,unterschiede,propitiated,hogsheads,contumely,ollut,connaitre,provoquer,herrschaft,erinnert,clamoured,lacedaemon,peines,meint,bourgeoise,nerfs,aiment,begge,possit,nomme,plis,piquancy,unpremeditated,desirest,declaiming,bestimmen,marchesa,dizzily,pauperism,samnites,schlief,livrer,sobald,nettled,allerede,odeur,comprends,peroration,preuves,dahin,verbergen,aandacht,vertreter,daarna,lourd,wilfulness,betrekking,grunde,retenir,esteeming,fallait,ressemble,klage,hauing,prolixity,sonner,subterfuges,stof,zahlreiche,harer,expostulated,barbarities,prudery,bivouacked,fusil,langt,passagers,firesides,vicissitude,salido,allerlei,joyousness,vorsicht,behoved,porticoes,gebirge,tragedian,fastnesses,nebst,waarvan,ruminated,reprend,commonalty,lapset,guerres,indorse,suffisante,curst,flounces,upbraiding,revenging,feebler,venger,miteinander,chaffed,overstrained,consolatory,houre,einzigen,spreken,contemporains,heut,augured,verran,sanscrit,halfpence,cutlasses,cupfuls,tremulously,quavered,puir,governesses,besluit,hetzelfde,veracious,wesentlich,readiest,disconsolately,squally,captaine,demandez,inzwischen,seules,cumbrous,palings,satisfait,geschikt,devoirs,rappeler,croit,orten,habent,didna,demoniacal,voraus,distempers,execration,drest,colonnes,tabooed,retenue,guicciardini,gaed,vuestro,cierta,einfachen,hundra,belike,saltpetre,forborne,cuyas,tardily,satisfaire,dicere,verbrechen,zichzelf,superabundant,vilja,versteht,brengt,scudding,verschieden,destinee,deprecatory,larboard,keinem,manuscrit,shrubberies,volkes,pertinacity,amabel,parme,herrlich,hunc,flurried,avevano,deferentially,souviens,mazarine,infiniment,overborne,rempli,goeden,reinen,engager,jocose,shawnees,vaterland,blessure,restant,maist,ursache,oublie,eminences,obscur,afstand,kepe,cailloux,enemigo,toits,weite,pm,video,info,ebay,dvd,website,photos,forums,yahoo,server,pc,feedback,blog,options,audio,fax,rss,porn,faq,sep,powered,electronics,database,microsoft,url,update,downloads,apr,hosting,videos,tech,linux,jun,listings,sony,google,environmental,pics,sponsored,eur,pdf,usr,homepage,lesbian,logo,airport,phones,cnet,hp,eg,ip,cameras,ratings,paypal,thu,rentals,worldwide,anti,nokia,tx,anal,interface,technologies,gmt,xml,input,sexy,mb,multi,graphics,prev,ads,mini,usb,php,trademarks,phentermine,keywords,msn,programming,isbn,az,updates,desktop,pst,fucking,blogs,evaluation,implementation,angeles,networking,australian,kb,connect,dev,vegas,module,pricing,dvds,documentation,coverage,automotive,developing,milf,ringtones,xbox,www,settings,monitoring,nc,llc,hardcore,provider,techniques,rd,websites,servers,keyword,username,fuck,paperback,classifieds,providers,upgrade,auctions,therapy,samsung,affiliate,admin,designated,integrated,cds,ipod,porno,motorola,strategies,affiliates,multimedia,xp,tits,interactive,developer,sitemap,lab,cvs,gamma,weekend,lcd,dj,parking,ct,hentai,laser,icon,basketball,stats,hawaii,nj,clips,rw,vhs,criteria,pubmed,logged,laptop,checkout,tripadvisor,zoom,anime,spam,bytes,gb,bc,consulting,aa,lingerie,shemale,parameters,jazz,profiles,mom,singles,amounts,usd,mg,pharmacy,constitutes,collectibles,infrastructure,intel,soccer,math,healthcare,preview,devel,rs,voyeur,cisco,certification,bookmark,specials,bbc,avg,panasonic,permalink,viagra,src,faqs,trackback,revised,broadband,pda,dsl,webmaster,dna,diff,sql,specs,ss,yeah,sexo,javascript,gps,acc,euro,encyclopedia,interracial,tn,suppliers,playstation,annotation,gnu,lesbians,aol,modules,backup,personals,kevin,perl,bike,utc,albums,verzeichnis,hosted,developers,kits,variables,agenda,template,investor,wildlife,elementary,sponsors,unlimited,printable,hardcover,setup,booking,ericsson,supplier,bluetooth,tm,upcoming,scores,weblog,nh,alerts,mysql,offline,lifestyle,converter,blowjob,safari,pdt,parameter,adapter,processor,node,hockey,micro,laptops,regulatory,db,ph,epinions,affordable,databases,psp,ds,discounts,boobs,jennifer,demo,lg,gourmet,nfl,avatar,dildo,featuring,misc,calculator,holdem,awareness,spyware,packaging,wallpaper,biggest,alumni,hollywood,wikipedia,diabetes,ml,wow,mapping,indexed,grid,plasma,voip,consultants,implemented,sf,blogger,kg,textbooks,seminar,latina,nasa,sexcam,accessibility,templates,tab,router,concrete,folder,womens,css,upload,milfhunter,mc,metro,toshiba,qty,airline,uniprotkb,beastiality,lp,consultant,researchers,unsubscribe,bio,upskirt,exam,logos,milfs,sustainable,pcs,honda,cinema,ag,blowjobs,deluxe,monitors,sci,edt,pmid,recruitment,siemens,expertise,medline,innovative,tampa,ks,python,tutorial,cruises,moderator,tutorials,collectables,scripts,abc,stereo,operational,airlines,livecam,hobbies,telecommunications,bestiality,biz,voltage,nintendo,vinyl,highlights,designers,ongoing,imaging,blackjack,analyst,reliability,gcc,ringtone,oriented,desktops,semester,cumshot,applies,casinos,filters,nv,notebooks,algorithm,semi,proteins,exp,debian,epson,terrorism,cpu,allocated,anytime,nr,layout,initiatives,lol,mp,optimization,genetic,modem,mph,evaluate,toyota,nationwide,vector,limousines,destinations,pipeline,ethernet,postposted,nba,busty,coordinator,epa,coupons,cialis,bb,ron,modeling,memorabilia,alberta,org,okay,workplace,wallpapers,firefox,eligibility,clinic,involvement,placement,vbulletin,funded,motorcycle,presentations,wiki,radar,citysearch,nsw,pci,guestbook,pizza,rc,bmw,mpeg,shoppers,cst,ceo,twiki,counseling,medication,shareware,dicke,configure,institutional,metabolism,rm,pdas,outcomes,sri,thumbnail,api,acrobat,thermal,config,urw,regardless,wishlist,sms,shit,trailers,syndrome,iraqi,foto,tabs,gm,rt,shopper,nikon,customize,sensor,telecom,indicators,thai,emissions,dd,boost,spanking,supplements,icons,tranny,catering,aud,camcorder,implementing,labs,dynamics,crm,rf,cumshots,bukkake,shorts,td,amp,sm,usc,environments,trembl,blvd,amd,emails,wv,insider,seminars,ns,vitamin,processed,functionality,intermediate,billing,diesel,bs,promotional,chevrolet,compaq,authentication,showtimes,sectors,bandwidth,img,schedules,cached,rpm,florist,webcam,nutten,automated,pee,nipples,tvs,manga,mhz,orientation,analog,packard,payday,deadline,robot,assess,gnome,gadgets,automation,impacts,cl,ieee,corp,personalized,gt,conditioning,teenage,nyc,partnerships,slots,toolbar,basically,genes,firewall,scanner,occupational,hs,integer,treatments,camcorders,basics,rv,struct,genetics,punk,enrollment,interfaces,advertisers,deleted,rica,inkjet,peripherals,brochure,bestsellers,eminem,antenna,bikini,decor,lookup,harvard,podcast,interactions,nike,pissing,plugin,latinas,customized,dealtime,temp,intro,zus,fisting,tramadol,jeans,fonts,quiz,mx,sigma,xhtml,recordings,ext,minimal,polyphonic,outsourcing,adjustable,allocation,michelle,ts,demonstrated,handheld,florists,installing,ncaa,phd,blogging,cycling,messaging,pentium,aka,sampling,refinance,cookie,goto,calendars,compatibility,netscape,rankings,measuring,tcp,dv,israeli,medicare,skiing,hewlett,flickr,priorities,bookstore,timing,parenting,fotos,britney,freeware,fucked,pharmaceutical,workforce,nodes,ghz,targeted,organizational,skype,gamecube,rr,titten,excerpt,halloween,methodology,housewares,resistant,recycling,gbp,coding,slideshow,tracker,hiking,jelsoft,headset,distributor,archived,photoshop,jp,bt,diagnostic,rfc,downloaded,sl,seo,isp,nissan,acoustic,cassette,initially,hb,jpg,tc,sunglasses,planner,stadium,mins,sequences,coupon,ssl,gangbang,opt,flu,mlb,tagged,bikes,gp,submissions,oem,lycos,zdnet,broadcasting,artwork,cosmetic,terrorist,informational,ecommerce,dildos,coordination,connector,brad,combo,activation,mitsubishi,constraints,dimensional,mozilla,toner,latex,anymore,oclc,locator,pantyhose,plc,msg,nylon,palestinian,trim,pixels,hispanic,cv,cb,procurement,espn,untitled,totals,marriott,starring,referral,nhl,optimal,protocols,highlight,reuters,fc,gel,omega,evaluated,assignments,fw,doug,saver,grill,gs,aaa,wanna,macintosh,projector,std,herbal,retailer,vitamins,vid,panties,connectivity,algorithms,bbw,collaborative,fda,turbo,thats,hdtv,asin,spotlight,reset,expansys,connecting,logistics,kodak,danish,scenario,fs,approx,symposium,nn,weekends,screenshots,deviant,adapters,macro,mandatory,syndication,gym,kde,viewer,signup,cams,receptor,piss,autos,deployment,proc,directive,fx,dl,starter,upgrades,tapes,governing,retailers,ls,cbs,spec,realty,instructional,phpbb,permissions,biotechnology,outreach,lopez,upskirts,debug,boob,exclude,peeing,equations,bingo,spatial,respondents,lt,ceramic,scanners,atm,xanax,eq,unavailable,assessments,cms,footwear,beijing,utils,phys,sensitivity,calgary,dialog,wellness,antivirus,previews,pickup,nascar,mega,moms,addiction,chrome,ecology,botswana,nav,cyber,verizon,enhancement,clone,dicks,lambda,baseline,silicon,beatles,soundtrack,lc,cnn,lil,participant,scholarships,recreational,electron,motel,sys,solaris,icq,yamaha,medications,homework,advertiser,encryption,downloadable,scsi,focuses,toxic,dns,thumbnails,pty,ws,bizrate,sox,gamespot,wordpress,vulnerability,accountability,celebrate,zoophilia,univ,scheduling,therapeutic,travesti,relocation,np,competitions,tft,jvc,vibrator,cosmetics,concentrations,vibrators,estonia,dt,cgi,showcase,pixel,focusing,viruses,gc,stickers,leasing,lauren,macromedia,additionally,nano,copyrights,mastercard,updating,kijiji,conjunction,cfr,validation,cholesterol,slovenia,folders,routers,starsmerchant,arthritis,bios,pmc,myspace,theorem,nb,stylus,topless,structured,jeep,mba,reload,distributors,levitra,mono,particles,coordinate,widescreen,squirting,rx,apps,gsm,rebate,meetup,ddr,rec,forecasts,sluts,ciao,ampland,chem,shopzilla,payroll,cookbook,uploaded,americas,connectors,twinks,techno,elvis,latvia,jd,gpl,irc,dm,bangkok,photographers,infections,brisbane,configured,amino,clinics,mls,saddam,threesome,handjob,transexuales,technician,inline,executives,audi,staffing,cognitive,closure,ppc,volt,div,playlist,registrar,jc,cancellation,plugins,sensors,freebsd,acer,prostores,reseller,dist,intake,relevance,tucson,swingers,headers,geek,xnxx,hormone,childrens,thumbzilla,avi,pichunter,thehun,columnists,bdsm,ide,valium,rpg,cordless,pd,prot,trivia,adidas,tgp,retro,livesex,statewide,semiconductor,boolean,diy,interact,olympics,identifier,worldsex,jpeg,startup,suzuki,ati,calculators,abs,slovakia,flip,rna,chrysler,plumbing,nuke,projectors,pharmacies,ln,introducing,nicole,latino,uc,asthma,developmental,zope,regulated,gmbh,buf,ld,webshots,sprint,inputs,genome,documented,paperbacks,keyboards,eco,indie,detector,notifications,msgid,transexual,mainstream,evaluating,subcommittee,suse,mf,motels,msgstr,volleyball,mw,adipex,toolbox,ict,browsers,dp,surfing,creativity,oops,nipple,behavioral,bathrooms,sku,ht,insights,midwest,karaoke,nonprofit,hereby,containers,integrate,mobiles,screenshot,kelkoo,consortium,pts,seafood,rh,rrp,playboy,fg,mazda,roster,symantec,wichita,nasdaq,ooo,hz,timer,highs,ipaq,alignment,masturbating,comm,nhs,aye,visibility,reprints,accessing,midlands,analysts,dx,sk,locale,biol,oc,fujitsu,exams,aj,medicaid,treo,infrared,tex,cia,sublimedirectory,poly,dod,wp,naturals,neo,motivation,lenders,pharmacology,bloggers,powerpoint,surplus,sonic,obituaries,belarus,zoning,guitars,lightweight,tp,jm,dpi,scripting,gis,snapshot,caring,expo,dominant,specifics,itunes,cn,newbie,bali,sponsorship,headphones,volkswagen,marker,strengths,emirates,terrorists,airfare,distributions,vaccine,crap,viewpicture,volvo,bookings,minolta,gui,rn,abstracts,pharmaceuticals,andale,remix,thesaurus,ecological,cg,appraisal,maritime,href,benz,wifi,fwd,homeland,championships,disco,endif,lexmark,cleaners,hwy,cashiers,guam,preventing,compliant,hotmail,refurbished,activated,conferencing,trackbacks,marilyn,findlaw,programmer,vocals,yrs,foo,gba,bm,nightlife,footage,howto,entrepreneur,freelance,screensaver,metallica,headline,str,bahrain,academics,pubs,shemales,screensavers,vip,clicks,mardi,sustainability,formatting,nutritional,weblogs,timeline,rj,affiliation,nudist,ensures,sync,telephony,realtors,graphical,aerospace,meaningful,shortcuts,voyeurweb,specifies,logitech,briefing,belkin,accreditation,wav,modular,microphone,moderators,memo,kazakhstan,standings,gratuit,fbi,qatar,porsche,cayman,rp,tba,usgs,kathy,graphs,surround,lows,controllers,consultancy,hc,italiano,rca,fp,sticker,stakeholders,hydrocodone,gst,cornell,mailto,promo,jj,schema,catalogs,quizzes,obj,myanmar,metadata,floppy,handbags,ev,incurred,questionnaire,dept,euros,makeup,troubleshooting,uzbekistan,indexes,pac,rl,erp,gl,ui,dh,fragrances,vpn,fcc,markers,assessing,eds,roommate,webcams,webmasters,df,computational,acdbentity,handhelds,reggae,whats,rides,rehab,allergy,enzyme,zshops,condo,pokemon,amplifier,ambien,worldcat,titanium,contacted,cdt,recorders,casio,postings,postcards,dude,transsexual,pf,informative,girlfriend,bloomberg,beats,scuba,checklist,bangbus,lauderdale,scenarios,gazette,hitachi,divx,batman,hearings,calibration,eval,anaheim,ping,prerequisite,sao,pontiac,regression,trainers,muze,enhancements,renewable,passwords,celebs,gmc,hh,adsl,advisors,finals,fd,acrylic,tuner,asn,toddler,acne,listprice,libs,cadillac,malawi,pk,sagem,knowledgestorm,ppm,referenced,gays,exec,warcraft,catalyst,vcr,prepaid,electro,vietnamese,lexus,maui,handjobs,squirt,plastics,postcard,tsunami,internationally,psi,buses,expedia,pct,wb,smilies,vids,shakira,qld,dk,findarticles,routines,issn,podcasts,sas,ferrari,outputs,insulin,mysimon,ambient,oecd,prostate,adaptor,hyundai,xerox,merger,softball,referrals,quad,firewire,mods,nextel,rwanda,integrating,vsnet,msie,wn,liz,ccd,sv,burlington,researcher,kruger,viral,aruba,realtor,chassis,dubai,llp,pediatric,boc,dg,asus,techrepublic,vg,filme,craps,fuji,brochures,tmp,alot,benchmark,highlighted,antibody,wiring,ul,js,webpage,hostels,pn,wendy,diffs,mumbai,ozone,disciplines,nvidia,pasta,serum,motherboard,runtime,inbox,focal,bibliographic,incl,hq,propecia,nbc,samba,inspections,manually,wt,flex,mv,mpg,retrieval,cindy,lolita,carb,importantly,rb,upc,dui,mh,discrete,sexuality,polyester,kinase,televisions,specializing,pvc,blah,mime,motorcycles,thinkpad,cunt,feof,bunny,chevy,longest,tions,dentists,usda,workstation,flyer,dosage,urls,customise,marijuana,adaptive,enb,gg,fairfield,invision,emacs,jackie,cardiovascular,ww,sparc,cardiac,learners,gd,configuring,guru,convergence,numeric,kinda,malpractice,dylan,rebates,pix,mic,basename,kyle,obesity,vertex,bw,hepatitis,nationally,andorra,mj,waiver,specialties,cingular,bacterial,lf,ata,bufing,pam,dryer,nato,funky,secretariat,scary,mpegs,brunei,slovak,mixer,wc,sbjct,demographic,washer,springer,evaluations,helicopter,hk,powerseller,ratios,maximize,cj,workout,mtv,optimize,leu,namespace,align,peripheral,confidentiality,changelog,orgasm,condos,greensboro,tulsa,fridge,qc,simpsons,upgrading,pgp,frontpage,trauma,flashers,subaru,tf,programmers,pj,monitored,installations,spank,cw,motivated,wr,fioricet,rg,bl,vc,wx,figured,currencies,positioning,heater,promoted,moldova,paxil,temporarily,ntsc,thriller,apnic,frequencies,mariah,usps,bg,planners,intranet,psychiatry,conf,wma,aquarium,cir,looksmart,modems,paintball,prozac,acm,glucose,norm,playback,supervisors,ips,dsc,neural,hometown,transcripts,collectible,handmade,entrepreneurs,robots,keno,gtk,mailman,sanyo,nested,biodiversity,movers,workflow,voyuer,subsidiaries,tamil,garmin,ru,fuzzy,indonesian,therapist,mrna,budgets,toolkit,erotica,dts,qt,airplane,istanbul,sega,viewers,cdna,harassment,barbie,soa,smtp,replication,receptors,optimum,neon,interventions,internship,snowboard,beastality,webcast,evanescence,coordinated,maldives,firmware,lm,canberra,mambo,bool,cho,jumping,antibodies,polymer,immunology,wiley,bbs,spas,convicted,indices,roommates,adware,intl,zoloft,activists,ultram,cursor,stuffed,restructuring,simulations,cz,cleanup,crossword,conceptual,hl,bhutan,liechtenstein,redhead,tractor,unwrap,telecharger,safer,instrumentation,ids,groundwater,gzip,ricky,ctrl,theta,lightbox,swaziland,mediawiki,configurations,ethnicity,lesotho,rfid,retailing,oscommerce,nonfiction,homeowners,racism,vaio,gamers,slr,licensee,bisexual,rel,ign,installer,powershot,bestselling,insure,packaged,behaviors,clarify,activate,tg,pv,sandisk,vitro,cosponsors,hyatt,burundi,demos,btw,psychiatric,tittens,teenagers,grading,valentines,vonage,wetlands,quicktime,underwater,pbs,vanuatu,erotik,supportive,vw,targeting,preschool,dw,hm,jl,hg,megapixel,booklet,cancun,reimbursement,turnover,cheryl,radeon,italicized,chromosome,optimized,ffl,upgraded,colorful,popup,mk,garnet,ppp,oceania,formulation,fresno,handbag,bypass,ies,logout,boyfriend,hogtied,wl,clipart,detectors,newsgroups,spectra,mailbox,athlon,iq,landscaping,mol,korn,directv,viable,deviantart,qa,hunks,appellant,xsl,lithium,ctr,planting,alphabetically,facials,calories,airways,refill,reagan,kazaa,einstein,pornstar,vcd,jumper,majors,headsets,toxicity,sz,denim,greenville,scat,neighborhoods,buick,slipknot,mst,residual,bf,bash,ngos,storesshop,postgraduate,daytona,wastewater,constructor,technicians,debbie,issuance,sj,mbps,nationals,ij,alito,waterfront,diagnosed,biotech,turkmenistan,woodland,iranian,unsecured,kyoto,cis,eb,barcode,xd,regulator,txt,postcode,makefile,ansi,vicodin,shawn,suv,lacrosse,crafted,eritrea,bbq,wh,debit,dmx,edits,unwanted,xr,bn,noaa,lemma,kyrgyzstan,sensing,postgresql,kbps,trac,dolby,ecosystem,pkg,dashboard,nikki,technorati,esl,alzheimer,jk,wk,handler,semantic,globalization,atv,vga,atari,sch,reebok,mfg,jb,blogthis,inspirational,wilmington,faso,sdram,motherboards,blk,inherent,jw,tailored,vodafone,romanian,xt,ucla,celeb,assoc,palo,usability,backyard,novell,refunds,newsroom,tina,kia,taxpayer,fb,cola,boise,bsd,saab,refinancing,cert,buffy,doctoral,backpack,npr,identities,tajikistan,sheraton,snacks,booster,taxable,imc,ufo,linksys,dentistry,renal,fedora,nyse,guideline,freezer,pcr,bnet,binoculars,demographics,enroll,daemon,buddies,kc,crashes,outlines,steroids,pogo,konica,hotline,amps,accountants,coefficient,transvestite,upstream,digg,ladyboy,hussein,biochemistry,duplication,scottsdale,ninja,tj,avalon,voucher,tw,wheelchair,gw,epidemiology,pentagon,diabetic,stressed,libdevel,dvi,biomedical,gameboy,subset,gucci,https,websphere,cheney,zombie,recycled,followup,nih,hdd,bidders,simulator,exporters,ninth,mutant,ssh,authoring,specializes,irvine,olds,ramp,jakarta,tl,pgsql,malls,jensen,impairment,scooter,wap,mcgraw,lr,cheerleader,edu,lotion,substrate,mmc,ashanti,homemade,ukrainian,freshwater,topical,rms,isdn,coded,alcatel,suriname,parkway,femdom,palau,duff,ck,bonuses,scam,biking,microsystems,timeout,aerosmith,resellers,portfolios,ops,semantics,scarface,beige,auditing,rolex,amplifiers,coli,executable,pentax,restart,overstock,eps,hmm,explores,torque,memberships,renting,icann,ticketmaster,cdc,meridia,hsn,oncology,nf,woven,bloglines,audioslave,wikimedia,lipitor,remodeling,redhat,enom,haha,coordinating,holistic,salsa,encarta,childcare,dvr,cdn,soundtracks,napster,wong,debugging,rechargeable,engineered,jerseys,pw,superstore,hex,wg,blogroll,evite,micronesia,dreamweaver,diets,sauna,multiplayer,crt,caicos,qaeda,shareholder,kitts,tivo,deletion,ptr,macau,mudvayne,ceramics,freestyle,organizers,smartphone,cmd,hypertension,searchable,aguilera,servicing,counselling,ecards,acura,clit,cops,fedex,snowboarding,laserjet,cooker,lego,microbiology,internships,sgh,vectors,craigslist,hamas,shane,heaters,rdf,bj,visualization,newswire,hf,spermshack,brokerage,overtime,staind,wd,sourcing,filings,boeing,sizing,exceeded,presley,godsmack,labeling,whois,paradigm,msc,linguistics,snmp,standardized,liu,gta,nutrients,kosovo,barbuda,napa,abt,nickelback,lj,nazi,jenna,arrays,syllabus,rgb,rodriguez,animations,activism,fargo,chairperson,reged,leverage,sgt,anguilla,radisson,apc,hitler,handset,vulnerabilities,pga,activist,palestinians,ldap,prerequisites,maintainer,benq,lx,bv,knoxville,mentoring,pak,mos,didnt,classrooms,residency,deadlines,tk,bookshop,nonetheless,hifi,gf,forex,diagnostics,ew,dreamcast,tumors,vm,kyocera,nudes,rationale,hubs,pasadena,bissau,subway,hpa,fgets,citrus,cameltoe,reuse,sightseeing,therapies,widget,renault,comoros,suede,selector,gop,diaper,hotwire,ngo,pvt,atp,subtotal,coefficients,duplex,mvp,jh,analyzer,charset,clin,nutrient,zhang,underway,govt,cbc,excerpts,formatted,gorillaz,inhibitors,uu,prestigious,deploy,gameplay,autism,taxpayers,martinez,bombing,wwe,metrics,winxp,inability,goo,coronary,bldg,mediated,prom,scans,vaginal,isps,rookie,theatrical,interdisciplinary,kerala,enzymes,analytics,jacuzzi,lesbianas,parser,razr,jt,styling,snack,weezer,randomly,semiconductors,coca,acs,peugeot,bollywood,mentally,horoscopes,noun,xmas,silicone,cpa,dn,scoreboard,proliferation,squid,hw,customised,trilogy,hike,imdb,clic,ars,pharmacist,marley,typepad,xs,deliveries,recruiters,screaming,cygwin,gprs,png,pornography,robotics,chopped,contexts,init,svn,oslo,foreclosures,audits,pesticides,fave,residues,ashlee,viet,orbitz,invasive,helsinki,hardback,vuitton,nextag,inconsistent,narnia,alfa,twp,geoff,rename,atx,markup,breakthrough,ietf,beneficiaries,copier,uncategorized,xm,geforce,defaults,foreclosure,clarification,espresso,hendrix,homeowner,mib,tees,glu,winnt,tec,hydro,nonlinear,spokane,playa,gh,csi,radioactive,desserts,doi,socio,pcmcia,grooming,validate,nederlands,bst,filmography,outerwear,parse,dsp,implementations,attendees,toc,downstream,webcasts,accelerator,masterbating,flyers,tacoma,radiology,locals,mms,tungsten,typed,desc,datasheet,shutdown,xenical,computerworld,tattoos,peptide,sweatshirt,hassle,regents,gn,docket,dll,elsevier,nordic,privat,geometric,taxonomy,deli,intern,nsf,sata,xxxx,megan,allergies,bangalore,clutter,predator,xlibs,belgian,adolescents,djs,coventry,clamp,pricegrabber,cloning,args,madden,smugmug,visually,alright,laguna,limo,aligned,pesticide,transformers,avid,outpatient,lam,encrypted,wholesalers,coldfusion,dcr,shooter,switchboard,vince,fluorescent,cookware,lavigne,param,environmentally,gradient,ncbi,inserts,kvm,programmable,bibtex,chemotherapy,vr,dysfunction,livejournal,diazepam,rodeo,sampler,jovi,timetable,corrosion,positioned,checker,workstations,cathy,darren,cmp,udp,sts,milfseeker,sbc,midland,synchronization,informatics,oakley,rants,tarot,didrex,brenda,purdue,figurines,footer,maternal,jedi,seamless,ghetto,thr,panty,subunit,aires,commercials,regulators,influential,carlson,yy,benchmarks,ug,emi,retrieving,reactor,kiribati,telnet,biker,parked,financials,peanut,converters,nauru,dishwasher,rcs,neurons,ios,feminist,yds,ive,ecosystems,gadget,cctv,leukemia,deco,ticker,habitats,remover,incorporates,brasil,unicode,prod,spreadsheet,lowering,discography,encoded,researching,pediatrics,sushi,asap,onsite,mapquest,deleting,compilations,therapists,appealing,lifestyles,dst,swimwear,applet,pricetool,threesomes,quinn,daewoo,antigen,ultrasound,mgmt,procedural,cern,macros,msa,aussie,advisories,lendingtree,belmont,acad,bilingual,barbecue,localization,customization,gigs,indexing,lori,spacecraft,ivoire,montserrat,telecommunication,coatings,eureka,pcb,sdk,preparedness,systemic,playoffs,adaptors,forecasting,specialize,drm,enya,masterbation,tubing,bloomington,conditioner,plaintiffs,vanessa,nucleotide,bronx,listmania,middot,netgear,panda,crc,symbian,emailed,chf,constants,clr,isuzu,webring,redirect,interoperability,msrp,tuvalu,shampoo,neoplasms,artifacts,vac,pseudo,dinar,carat,microphones,nobel,galaxies,verlag,scrapbook,dummies,magnesium,pagina,kenwood,roundup,imac,faxes,plump,uss,wwii,methyl,campuses,ramada,tesco,dba,architectures,acdbline,getty,cdr,msi,prog,firewalls,tester,polling,fifa,bins,consumables,highbeam,msdn,statistically,mps,agp,cont,adverts,programmed,lohan,unclear,aromatherapy,nederland,stockton,clearwater,trustpass,topology,airborne,antennas,sundance,lifecycle,dhcp,trucking,iraqis,shortcut,racist,profitability,unc,fairmont,globally,aaliyah,reboot,newsgroup,audiovox,phuket,jf,metabolic,sarasota,billed,lim,toons,danielle,exc,relied,mesothelioma,trafficking,eff,bizjournals,michele,kk,cutie,creampie,seoul,printf,columnist,transplantation,jerome,nwt,rammstein,scrapbooking,sequential,uniquely,goodies,auth,gina,sugababes,rsa,rcw,whistler,airfares,huntsville,ths,layouts,servicemagic,herpes,newsgator,contractual,akron,bh,rebounds,compressor,samantha,khz,webmail,carcinoma,taipei,stance,aps,kumar,gemini,kinky,supervisory,ostg,kl,chiropractic,throughput,netbsd,misplace,serviced,opener,vaccines,jigsaw,jumbo,unspecified,jsp,turbine,percentages,lett,maths,probes,frustration,americana,complexes,varsity,insurer,croatian,multicast,certifications,pradesh,px,proton,allegedly,kaplan,linens,roast,testers,debuginfo,complainant,inhibitor,knowledgeable,jimi,hummer,telefonsex,putative,hyperlink,presario,motorsports,getaway,robbins,kimberly,unsure,dinosaur,tac,ashland,dlp,royce,sophomore,antibiotics,landfill,warehousing,filesize,celebrex,verisign,registrations,wavelength,slashdot,transvestites,cheerleaders,friedman,coolpix,blocker,tawnee,hud,mov,entrepreneurship,percentile,linkage,lh,ripper,afp,kd,accomodation,mcafee,counselors,competitiveness,burger,microscopy,hyper,madthumbs,linkin,gmail,utf,scooters,reserveamerica,organisational,ezine,reactive,clipboard,gamer,alexa,pollutants,directorate,savvy,uploads,terri,norms,implants,alibaba,hormones,hype,addr,nfs,urinary,institut,condoms,directives,zelda,fetal,dong,reportedly,edi,kudoz,replay,flavors,ig,quickcheck,ziff,placebo,lotto,textures,pid,dep,seagate,nanotechnology,toggle,emc,spacing,frameworks,mergers,filtration,gpa,cpus,incremental,corr,sbin,scalable,ji,intra,wetland,olson,methodologies,fremont,someday,sha,exporter,mri,hum,ifdef,killers,multicultural,lasers,dataset,savers,powerpc,steelers,enhances,fucks,relational,graffiti,cassettes,pussies,doesnt,tiff,cnc,refrigeration,houghton,countdown,decker,natl,extern,enron,codec,broadcasts,checksum,directional,breeders,lethal,decals,macs,archival,seismic,baccarat,mommy,teenager,smokers,declining,lineup,hotspot,bellevue,hj,req,gigabit,worksheet,allocate,aftermath,roach,continuum,feng,pep,nylons,chipset,msnbc,hillary,factual,carisoprodol,tutoring,spectroscopy,gemstone,psc,phonephone,unregistered,moto,gonzalez,dior,pops,osha,goldberg,preteen,bonding,insurers,prototypes,proactive,issuer,sponsoring,malaysian,easton,sentencing,bulldogs,worthwhile,ideology,cervical,tallahassee,userpic,attribution,acta,yep,iec,differs,starters,uml,bur,kris,sizeof,spi,regs,shinedown,standby,arin,unisex,wallets,identifiable,ethanol,cannabis,rsvp,dynamically,grenadines,constr,subtitle,librarians,manson,autocad,powerbook,swinger,infiniti,ppl,williamsburg,supp,snyder,budgeting,backpacks,resale,mikes,scalar,unresolved,hep,seiko,electromagnetic,arial,tos,zoofilia,hcl,validated,sco,annotate,joomla,helix,sx,env,biomass,phs,hierarchical,lesions,financed,surnames,reconditioned,allergic,rk,abn,eliminates,addict,matte,melanie,secunia,metering,genetically,zebra,runway,admits,chennai,ions,asshole,faroe,glendale,speedway,sweatshirts,yay,activex,logon,recruiter,popcorn,espanol,disadvantaged,trong,niue,ux,supermarket,mfr,boo,hmmm,genomic,helpdesk,refuses,afb,adhd,avian,exe,visas,matrices,anyways,xtreme,etiology,tcl,mellon,webmd,personalised,hospice,zerodegrees,qos,exhibitor,sportswear,recap,toddlers,astro,chanel,jabber,hgh,hx,rotate,fema,subwoofer,amortization,neurology,ack,radiator,competencies,hotspots,trainee,nielsen,podcasting,centennial,tuna,bluegrass,wipe,acronyms,autographed,loader,latency,themed,messy,dmc,ments,empowerment,replacements,subtitles,gcse,acupuncture,workload,highlighting,grassroots,gentoo,redevelopment,cellphone,sax,triggered,frontgate,routinely,asc,uploading,managerial,nsu,celine,finepix,wks,tonnes,hypermail,thunderbird,investigative,letras,bylaws,wmv,lao,facesitting,breastfeeding,mccartney,anglo,kathryn,randomized,motivational,gratuite,gerry,kappa,neuroscience,blender,blaster,remediation,decoder,genocide,heathrow,indy,pantera,sidebar,authored,snoop,winery,rbi,photon,overlay,rusty,pharma,fayetteville,champaign,fyi,xc,pakistani,ics,apa,bitches,urbana,diagnose,secsg,franco,announcing,trivium,amature,showroom,cx,swarovski,liter,akon,brendan,condosaver,amex,classicvacations,blackpool,fh,inuyasha,nominees,cuz,viewsonic,dryers,fujifilm,ams,hallmark,counterparts,paced,engl,asians,seether,milestones,parkinson,mclean,checkboxes,lobbying,mgm,cinemas,islander,encoder,importers,impressum,phe,maroon,kontakt,ers,kawasaki,licences,bose,fountains,clones,crossover,situ,specificity,runoff,osteoporosis,approvals,bea,jukebox,nexus,cancers,tango,melting,garner,aba,karate,qb,optimizing,switchfoot,coldplay,vioxx,tty,bsc,celexa,guitarist,symmetric,kuala,bbb,geeks,jg,repec,insightful,unrated,diva,adsense,exemptions,integrates,csa,bookstores,cimel,hvac,leica,agendas,nws,busch,armani,bipolar,menopause,inbound,shortlist,gainesville,tiava,eclectic,headphone,regimes,readme,binder,xemacs,helicopters,ngc,intercontinental,workspace,customizable,softcover,realtime,electrons,subsystem,appl,kinetic,caffeine,xf,nib,httpd,slac,calorie,graphite,stroller,bowel,sweaters,mafia,futuna,predictable,susceptible,insest,skyline,sulfur,scams,lipid,tao,quot,ritz,networked,localhost,cabling,stills,perimeter,biased,cardiology,playoff,sti,chiang,payload,merrill,oldsmobile,grilled,misty,conserved,searchsearch,rewrite,vending,keygen,janeiro,heh,transexuals,prentice,cumbria,diaz,vegan,congressman,recombinant,ubuntu,superstar,closeout,corel,kayaking,synergy,eta,backpacking,accidentally,bonded,sticking,dudley,osama,oprah,inflatable,beers,glassware,amc,kos,coursework,kayak,mayotte,repetitive,gears,orbital,musicals,lithuanian,amatuer,profiling,reps,hn,sequencing,panoramic,deskjet,rhino,polynomial,tau,nsa,stakeholder,signifies,stochastic,psu,santana,kidding,swansea,airmail,problematic,roadmap,ogg,lesbo,farrell,acknowledgements,tnt,skincare,heroin,mandated,workbook,xslt,hogan,omg,sulfate,timeshare,oldies,complaining,debra,cdrom,cle,thrillers,fortran,timeless,spouses,vv,ninety,tyr,cues,bioinformatics,chung,subpart,scheduler,hypnosis,kat,cornerstone,recycle,sos,lsu,gao,applicability,volatility,uid,hoteles,fav,disneyland,umd,gdb,bro,offs,listserv,fab,cond,tokelau,conformance,diecast,bittorrent,frankie,oa,iu,vf,alprazolam,collaborate,positives,hunk,allocations,lymphoma,rpc,freebies,frontline,thb,tele,imap,winamp,stoke,idg,polymers,grills,phat,zz,escrow,lumpur,dds,infospace,surfers,kauai,licensors,cpc,stresses,webhosting,peoria,peek,alr,ipsec,bournemouth,sudoku,undef,campground,sars,cme,predictive,vlan,aquaculture,sendmail,redesign,nitro,jackpot,cortex,entitlement,secs,mixers,accountancy,policing,michaels,ecc,kj,similarities,kv,hipaa,neutron,duluth,dogg,folklore,dimm,acoustics,pensacola,crs,condominium,wildcats,exhibitors,ssi,redwood,invoices,tyres,westwood,gly,estonian,bomber,songwriter,shania,coaster,typedef,strippers,macmillan,aac,woodworking,cbd,pricerunner,afl,catalytic,bethesda,privatization,sourceforge,sanford,membranes,testosterone,nunavut,biochemical,lennon,suitability,lara,kx,invitational,handcrafted,aftermarket,fellowships,freeway,digitally,hatchback,rfp,coa,subclass,rutgers,sampled,deploying,interacting,roanoke,treadmill,fiberglass,osaka,personalize,broncos,jorge,classifications,diggs,rafting,sle,jv,safaris,contaminants,scr,mitch,mailer,liners,asheville,quinta,kristin,bistro,lw,voodoo,caching,volts,excalibur,bots,sinatra,interpersonal,traumatic,ringer,zipper,meds,briefings,siblings,adversely,pitcairn,pdb,onboard,nucleic,telecoms,hehe,celeron,lynne,invariant,challenger,redistributed,uptake,newsweek,geared,svc,prada,tycoon,maxtor,plone,dcp,biochem,pte,ors,compactflash,antibiotic,vanderbilt,cps,overweight,metasearch,taliban,maureen,trekking,coordinators,digi,shoreline,westin,middleware,mips,roundtable,dementia,levine,ripencc,shoppy,filesystem,pow,docking,guidebook,atreyu,kylie,pilates,backstreet,packers,localized,lic,docume,xy,fte,stl,yd,archiving,disconnect,multilingual,gsa,immunization,ciara,cumming,interviewing,categorized,cmos,transmissions,receivable,ronnie,implant,playlists,thematic,brentwood,correctional,katz,jojo,buffers,talkback,servings,kobe,baylor,otc,frustrating,ssa,zeta,dinnerware,sclerosis,emotionally,carbohydrate,estrogen,odbc,ipods,openbsd,federated,shui,rockford,staging,statistic,torino,schizophrenia,predators,mpi,adhesives,inventories,uf,brokeback,dumping,ow,econ,footjob,warez,magenta,tagging,overly,triggers,constructs,impedance,dragonfly,underoath,refundable,hbo,billboard,huang,sportsbook,layered,neurological,subs,watchdog,starbucks,ibook,viability,kh,filler,smiley,genomics,yi,yum,researched,copiers,ovarian,airplanes,cello,wlan,sweepstakes,antigens,midtown,stabilization,kinetics,cocos,impacted,rumsfeld,beanie,thurs,spaced,freq,segmentation,soaps,courthouse,entrepreneurial,lebanese,psycho,maharashtra,ricoh,nrc,chavez,asst,overload,vikings,kanye,bootstrap,wtf,humane,scm,travelocity,fno,twink,nortel,koh,affiliations,pussycat,appropriated,escherichia,mallorca,reversible,spd,oj,unclassified,bookshelf,htdocs,fps,initialization,expat,raider,farmington,timers,enrolment,glibc,lawmakers,larson,photosmart,centrally,acl,luv,dealership,eyewear,bakersfield,decal,addictive,clarinet,fiona,vn,gigabyte,dbz,rainforest,federally,macos,multinational,pornstars,nope,evo,aspirin,spoilers,machining,malibu,gatwick,shaun,redundancy,emo,detox,skateboard,automate,drosophila,branson,ortho,appraisals,flashes,lakewood,drupal,prac,carers,kramer,usaid,idc,keypad,richland,microbial,adc,caregivers,quark,zyban,electronica,mitochondrial,grinder,angie,octet,wj,cre,dinosaurs,mccoy,vibe,snapshots,ubc,meth,trendy,inpatient,filming,fread,backend,cartier,ageing,containment,keynes,protections,aliases,maximizing,handsfree,tomcat,walmart,interestingly,jules,ernie,elem,organisers,pissed,nite,mckenzie,lenox,darussalam,genital,mcse,cajun,csu,algebraic,astm,kristen,fsa,sgd,chromatography,overdose,nad,gallagher,mueller,cao,ladyboys,orgasms,plantronics,ftd,freezers,ibiza,reese,digimon,gastrointestinal,inspiron,pagerank,asm,smb,contrib,blu,matlab,netware,bse,megapixels,retriever,svalbard,pixar,dhtml,winme,func,gamespy,standalone,antitrust,equine,bros,proto,jared,tehran,dal,anesthesia,filemaker,libtool,wrongful,signage,psy,encode,admins,moc,dau,alvin,accolades,raton,stefani,infertility,servlet,collage,aces,depeche,benchmarking,xxl,teleflora,bankruptcies,gauges,blueprint,mccain,spiderman,bridging,flick,datum,canceled,empowering,ymca,facilitator,bos,macworld,wwf,galveston,rockville,banff,smc,lq,serv,ipo,tek,ipc,timestamp,musica,bib,stevie,rivera,dermatology,sandbox,mdt,pinkworld,cambridgeshire,premiership,luton,conftest,recursive,registerregister,fluorescence,kosher,additives,marketed,mandrake,camper,cpr,liquidity,lasik,galactic,merchandising,ombudsman,registrant,firefighters,placements,ih,elec,levin,academia,amiga,descriptor,pimp,gimp,cyclic,swimsuit,morphology,versace,printprinter,condom,westerns,dodgers,litre,correlations,textual,handsets,gandhi,inks,diarrhea,seahawks,mondays,insertions,itk,kms,couture,ativan,summarize,savesave,laminated,citrix,backups,turismo,animalsex,mayhem,washers,grep,xeon,polymerase,optimisation,easyshare,cvsroot,joplin,dialup,nx,thn,afro,biosynthesis,prosecutors,alloys,getaways,miquelon,wonderland,zine,conn,truman,jin,asynchronous,carla,messageslog,clearinghouse,dwi,facilitates,specialised,ramones,everquest,bernstein,skis,calc,marketers,itc,lipstick,brennan,kpx,saturation,stamford,alamo,comcast,hyderabad,attn,spaghetti,tues,boogie,abramoff,ean,fla,utilizes,lesbos,fasteners,sakai,lk,rajasthan,committing,inlog,laminate,earring,aggregator,datatype,postnuke,ergonomic,dma,sme,kp,refills,ibis,yyyy,unidentified,atl,ims,tractors,vx,spp,coed,audiobooks,sheikh,gk,hernandez,kiwi,ohm,truste,acreage,mfc,fingerprint,sorority,audition,mca,plano,nmr,lortab,leveraging,psychotherapy,mso,htm,stokes,lakers,ats,saxophone,cocktails,steroid,communicator,horticulture,dhs,resets,util,ordinator,bono,acronym,veritas,breathtaking,streamline,crowne,brunch,pundit,figurine,mutants,cyberspace,expiry,exif,goldman,msu,inning,fries,initialize,tlc,sybase,foundry,toxicology,mpls,bodybuilding,fta,nostalgia,acetate,pls,bmx,saratoga,terminator,badminton,cyan,cory,stacey,serif,portability,fsb,yearbook,lubricants,cns,hv,alameda,aerosol,mlm,clemson,goin,philly,coolers,multilateral,costello,audited,galore,aloha,dehydrogenase,aq,gx,postfix,fj,altavista,exponential,shi,gev,secretarial,todays,toaster,cater,omb,bac,kart,cpl,sbs,putin,questionnaires,profileprofile,serials,equivalence,vaughn,aviv,condominiums,schematic,liposuction,swf,apoptosis,pneumatic,sniper,vertices,additive,professionalism,libertarian,rus,washable,normalized,uninstall,scopes,fundraiser,troll,teamwork,auditions,refrigerators,redirected,middletown,widgets,ontology,timberland,mags,videogames,concluding,vallarta,chopper,pinball,pharmacists,surcharge,tbd,ipb,latvian,asu,installs,malware,tsn,nguyen,horsepower,algae,sarbanes,alcoholism,bdd,csc,maximal,prenatal,documenting,scooby,moby,leds,mcbride,scorecard,gln,beirut,conditioners,culturally,ilug,janitorial,propane,appendices,collagen,gj,nigerian,ect,sto,makeover,esc,dragonball,chow,stp,cookbooks,spoiler,ari,avr,lamborghini,polarized,baroque,ppt,jihad,sharepoint,cts,abit,abnormalities,qtr,blogshares,motorsport,septic,citroen,gz,predicts,palmone,expedited,curricula,wmd,pms,raped,configurable,denon,sloan,flawed,cfs,checkpoint,rosenberg,ffi,iriver,callaway,tcm,dorm,lakeside,marquette,interconnection,gilmore,prc,taxis,hates,gamefaqs,cookers,ultraviolet,afc,haitian,dialing,unicef,identifiers,mentors,steiner,licensure,tammy,tz,dcs,soybean,affirmed,posix,brewers,mci,retractable,quickbooks,townhouse,stormwater,sgi,coco,pipelines,rudy,tia,congrats,msds,arafat,srl,splitter,wai,standardization,lakeland,thiscategory,classy,acxiom,triathlon,kbytes,thx,textured,doppler,entropy,snooker,unleashed,lux,nairobi,importer,isl,orioles,rotor,theres,ttl,dreamy,backstage,qq,lubbock,suvs,bmp,gasket,firearm,dss,bam,closures,participatory,micron,budgetary,pcos,ssk,pantie,bombers,spongebob,markus,ideological,wellbutrin,rheumatoid,swindon,cabernet,sek,dsm,understandable,shea,doctorate,binaries,slovenian,showdown,simone,spc,potentials,tempe,hklm,cores,borrowers,osx,bouvet,multifunction,nifty,unveils,skeletal,dems,oahu,rollover,infos,lds,thanx,anthrax,shockwave,westlife,bpm,tamiflu,touchdown,planar,adequacy,iomega,xa,fetisch,eastman,franchising,coppermine,ged,ecard,ue,kn,ferries,faqfaq,muller,fudge,extractor,usergroupsusergroups,svenska,pcg,myocardial,everytime,callback,encompasses,sander,conductivity,atc,vicki,danville,sedona,skateboarding,lexisnexis,deepthroat,outback,reiki,biopsy,peptides,awakenings,pim,sediments,appraiser,smp,gaussian,hustler,tensions,linkages,separator,schultz,adr,concordia,recon,fileplanet,royals,globalisation,borland,pastel,nottinghamshire,strollers,uninsured,picasso,mcgill,discriminatory,headquartered,travelodge,empower,hurley,pedals,teak,bitmap,migraine,sli,enum,lamar,aes,methane,pager,snp,aclu,westchester,nimh,quilting,campgrounds,adm,densities,isd,tional,turnaround,navigational,stargate,saskatoon,cen,minh,fingertips,sba,rockwell,vl,pepsi,rea,oversized,snr,sibling,ecs,burberry,nrs,cfa,inhibit,pps,screenplay,unabridged,ntp,endpoint,labelling,synchronous,heartland,cafeteria,outfitters,opp,homelessness,opengl,efficiencies,blowout,tickboxes,oversee,thresholds,isnt,waveform,deficits,flair,applegate,whitewater,tableware,bernie,workgroup,clement,cli,robotic,mana,mississauga,dialysis,filmed,staten,carole,schwarzenegger,summarizes,sludge,crypto,christensen,heavyweight,lps,zach,pdp,phantomnode,comptroller,scalability,creatine,embl,minimizing,gpo,dq,relativity,mojo,econo,shapiro,rituals,pq,ub,epoxy,watercolor,uncensored,trainees,tori,effluent,infousa,storytelling,polarization,bombings,smes,ionamin,fuckin,charlottesville,xu,aniston,barred,equities,feeders,jboss,mobil,scrolling,diode,kaufman,aloe,buckinghamshire,medford,underlined,whores,gemstones,bmi,viewpoints,exim,appalachian,dealings,phillies,ramblings,janis,centric,optionally,nightclub,geophysical,fictional,golfing,rubin,handlers,topeka,openoffice,bugzilla,linus,taco,mcsg,humboldt,scarves,mla,repertoire,emeritus,macroeconomic,gundam,adaptec,tailed,voyer,hostname,excl,bx,arr,typo,merchantability,autodesk,jn,winged,attacker,catcher,haynes,siyabona,inverter,abi,motivate,mackay,bridgeport,assessor,fullerton,cpp,blockbuster,dz,amarillo,pixmania,pathfinder,bonsai,windshield,tomtom,spf,croydon,convection,jdbc,debugger,boing,ancillary,pointless,alibris,factoring,gyms,inhalation,faucet,bitpipe,arguably,techs,electives,walkman,midget,quan,commissioning,experimentation,saltwater,cpi,nis,wacky,sgml,anemia,biting,reits,savanna,crn,travestis,mmf,cancellations,paging,coe,nudists,fac,asean,airsoft,bontril,proliant,keeling,zh,accesses,jive,bullshit,casper,libstdc,xpress,datasets,webdesign,nicotine,comeback,gannett,curricular,downtime,takeover,lolitas,thessalonians,upto,joaquin,transistor,spotting,wagering,everest,disregard,hanger,outkast,pitbull,rtf,fairview,hires,alienware,mainframe,indo,compilers,guinness,heartbeat,blazer,timezone,merck,tanya,bmc,eia,colleen,bbbonline,participates,syndicated,lexicon,integers,zirconia,shortages,plumbers,jfk,raf,igor,hama,patton,pei,surfer,diapers,eas,waco,physiol,adp,outbound,breakout,fakes,stderr,kev,fomit,injections,remortgage,yogurt,complies,workaround,polytechnic,uber,shoppe,berlios,csr,penthouse,synthase,pistons,emule,sauvignon,bayer,carrera,dvb,cation,scientology,cdma,maxi,msm,rac,feminism,topps,webinar,dewalt,turnout,bruins,clamps,firefly,tabletop,monoclonal,wholesaler,typekey,partnering,mage,sqrt,israelis,cdp,headlights,monophonic,proquest,sergio,swapping,mev,particulate,bedfordshire,rockport,nist,negotiable,subcategories,quarterback,sudbury,hectares,upscale,scrabble,sdn,mta,docbook,kiosk,firstgov,hoodie,hoodia,payout,clinically,metacritic,obligated,decoding,presenters,teal,epstein,weblogic,ity,covington,esd,interconnect,chinatown,mindless,purifier,kz,greedy,rodgers,gloryhole,suppl,hotjobs,downing,gnd,libc,societal,astros,halogen,wyndham,osu,tuesdays,utp,superpages,coaxial,jpy,liam,sesso,arabidopsis,argv,hanoi,ccm,faucets,ballistic,payouts,rockin,supermarkets,bmg,nacional,csv,telstra,contraception,polaroid,underage,cardio,timeshares,atk,qi,logger,kool,oki,birding,detainees,indi,lymph,barrie,pollutant,closeouts,tolkien,undp,jbl,weekday,homecoming,increments,kurdish,chromium,mccormick,pcm,confrontation,shreveport,grower,frederic,unpredictable,dtd,capacitor,burnett,hilfiger,mda,litres,moroccan,nightwish,hess,wheaton,motorized,subgroup,chevelle,vets,assays,ramon,longhorn,backdrop,aerobic,vgroup,thursdays,dansk,tenerife,mayen,oldmedline,dunlop,caa,modernization,xe,fourier,businessman,watersports,lucent,commuter,orthopedic,hhs,tyrosine,shenzhen,initiating,grabs,erickson,marlin,casserole,canoeing,cca,ophthalmology,geile,clubhouse,licensees,evaluates,svg,protesters,fernandez,mvc,sleazydream,patti,mz,sennheiser,sheehan,maven,commute,staged,transgender,customizing,subroutine,pong,hertz,myr,bridgewater,firefighter,propulsion,westfield,catastrophic,fuckers,blower,tata,giclee,groovy,reusable,actuarial,helpline,erectile,timeliness,obstetrics,chaired,agri,repay,prognosis,colombian,pandemic,mpc,fob,dimage,fetus,determinants,durango,noncommercial,opteron,superannuation,ifs,haas,wimbledon,documentaries,mpa,rao,remake,arp,braille,physiopathology,seperate,econpapers,arxiv,pax,kalamazoo,taj,sinus,maverick,anabolic,allegra,lexar,videotape,educ,amplification,larsen,huron,snippets,conserv,dustin,wsop,composites,wolverhampton,banning,cpt,gauteng,ftc,watertown,pathogens,mft,uefa,jacking,radiohead,ooh,subsections,definately,bod,yin,tiki,homepages,handouts,cpm,marvelous,bop,asnblock,stretches,biloxi,indymedia,clapton,beyonce,smf,nabble,intracellular,infoworld,boyz,waltham,geisha,dblp,briefcase,mcmahon,cq,mcgregor,modal,marlboro,grafton,phishing,addendum,foia,kirsten,yorker,memberlistmemberlist,gam,intravenous,ashcroft,loren,newsfeed,carbs,yakima,realtones,xtc,vdata,interpro,engadget,tracey,wac,darfur,fragmentation,behavioural,kiev,paranormal,glossaries,sonyericsson,dex,emoticons,carbohydrates,hms,norwood,appetizers,webmin,stylesheet,goldstein,wnba,englewood,asf,hottie,stripper,pfc,adrenaline,mammalian,opted,meteorology,analyzes,pioneering,ctx,spreadsheets,regain,resize,medically,tweak,mmm,alicante,graders,shrek,universidad,tuners,slider,cymru,fprintf,irq,dads,sdl,ebusiness,hays,cyrus,courtroom,baht,relocating,synth,filthy,subchapter,ttf,optimizations,infocus,bellsouth,sweeney,aca,fpo,layup,laundering,fre,nazis,cumfiesta,newbies,mds,piles,vaginas,bezel,avatars,twiztid,facilitation,ncr,xb,voc,rts,applets,pdfs,cac,teh,undercover,substrates,evansville,joystick,knowledgebase,forrester,xoops,rican,uptime,dooyoo,spammers,nuclei,gupta,tummy,axial,aest,topographic,westport,majordomo,wednesdays,burgers,rai,watchlist,campers,phenotype,countrywide,affirm,directx,resistor,bhd,audubon,commentsblog,snowmobile,publ,cpg,subparagraph,weighting,rectal,mckinney,hershey,embryos,garages,sds,urology,aforementioned,rihanna,tackling,obese,melvin,collaborations,isolates,velcro,worksheets,avaya,srs,wigan,hua,abba,qd,orig,huskies,frey,iz,loyola,gartner,xda,strapon,chaser,astra,expasy,overdrive,ripley,phosphorylation,cfo,depletion,neonatal,qr,mclaren,rowling,vhf,flatbed,golfers,lira,technics,damien,clippers,spirited,gv,staa,recharge,openid,sassy,demux,ribosomal,tdk,filmmakers,transnational,paralegal,spokesperson,fha,teamed,preset,iptables,pocketpc,nox,jams,pancreatic,tran,manicures,sca,tls,prweb,holloway,cdrw,plz,nadu,underwriting,rulemaking,valentino,prolyte,millenium,collectable,stephan,aries,ramps,tackles,dsa,walden,catchment,targus,tactic,ess,partitioning,voicemail,acct,shimano,lingere,parentheses,contextual,qwest,jira,cerevisiae,dyson,toxins,camaro,cryptography,signalling,daycare,murakami,merriam,scorpio,attr,emp,ultrasonic,ashford,intergovernmental,paranoid,dino,xvid,dmoz,ivtools,barron,snorkeling,chilean,avs,suny,gifs,qualifier,hannover,fungal,ligand,aust,peoplesoft,freelists,coastline,omit,flamingo,deformation,orf,pfizer,assembler,renovations,genbank,broadcasters,employability,noodles,retardation,supervising,freeport,lyme,corning,prov,dishnetwork,amg,claremont,moo,cpe,childs,bizkit,blogosphere,endocrine,resp,carlsbad,ammo,bling,chars,mcguire,utilisation,rulings,sst,geophysics,slater,broccoli,foreach,oakwood,mcgee,kissimmee,linker,tetris,tds,synchronized,hsbc,shellfish,astoria,trajectory,epsilon,knowles,astrophysics,hansard,lai,authorisation,vampires,relocate,nerd,dac,glazing,provisioning,mnt,expandable,maserati,bender,reliably,fas,sendo,hasbro,corba,polski,multidisciplinary,ventricular,petersen,bans,macquarie,pta,poy,mao,transferable,yummy,momma,lehigh,concordance,greenberg,trish,electrodes,svcd,cron,darth,cramer,yup,ching,melanoma,thug,yugoslav,occ,cpan,bizjournalshire,tco,shaver,grammy,fibrosis,opel,hummingbird,ported,eeo,polyethylene,parametric,awarding,dkk,superbowl,sse,haskell,flatware,skid,eyeglasses,fenton,polaris,formulations,bgp,parenthood,latinos,artworks,doherty,dnc,bci,allegheny,arenas,aaaa,compressors,exclusives,lounges,consultative,lst,ais,conveyor,normative,surg,rst,longtime,ecm,mckay,spe,solver,ani,lacie,solvents,kudos,jens,creams,poo,handbooks,agm,shawnee,crowley,butalbital,artifact,mdot,coldwell,qs,depts,veterinarian,merseyside,cso,krona,disseminate,puget,coasters,geologic,fleetwood,feldman,endocrinology,replicas,polygon,mcg,kwazulu,servo,riparian,guelph,tenuate,curator,jaime,mower,gamestats,lvl,faxing,meyers,testsuite,stressful,extranet,remastered,teac,neg,rma,eastwood,handspring,gerber,duran,aquarius,stencil,srp,scifi,redirection,showcases,hmv,refinery,abort,drs,schroeder,indent,chardonnay,removals,antrim,accelerating,guesthouse,bz,insiders,duvet,decode,looney,brigham,mts,jewelers,juneau,dilution,veterinarians,colourful,grids,sightings,binutils,spacer,microprocessor,deloitte,claiborne,clie,cdm,spills,assistive,chronograph,refunded,sunnyvale,spamcop,lovin,embracing,minimise,salinity,nbsp,specialising,handout,routledge,ramirez,haiku,paisley,telemarketing,cutoff,visuals,ccs,breads,seg,martina,mclaughlin,headlight,kemp,sla,pipermail,sonneries,clinicians,entertainers,tripp,peterthoeny,blockers,stash,jamaican,semen,endogenous,memorex,showtime,narcotics,oceanfront,flange,realplayer,mcc,mpaa,gogh,allentown,romero,bnwt,predefined,buzznet,melodic,isi,naics,transgenic,axim,brookfield,endorsements,viscosity,cve,bengals,estimator,cls,concurrently,leafs,electrician,mayfield,ftse,samui,bleach,unauthorised,wolverine,individualized,ecn,raffle,shredder,embedding,hydrology,mascot,lube,launcher,mech,primers,caregiver,lupus,sachs,qtek,oy,twn,keane,gator,memberlist,utd,nordstrom,roseville,dishwashers,walla,remixes,cozumel,replicate,taped,mcgrath,biometric,incubation,aggregates,wrangler,asymmetric,cytochrome,xfm,sps,shure,mcs,donating,antec,giveaway,cmc,alyssa,cnt,renter,vmware,patel,honeywell,nightclubs,barrington,luxor,caterers,capacitors,rockefeller,checkbox,itineraries,reagents,christoph,walkers,eek,ensembl,weekdays,computations,wineries,vdc,booker,mattel,diversification,wsdl,matic,xyz,antioxidant,esrb,archos,semesters,naruto,storyline,melrose,streamlined,analysing,airway,iconv,commas,vicky,helvetica,ssp,submitter,cambria,icp,manifestation,subsets,blazers,jupitermedia,merritt,triad,webpages,yp,clinique,fitch,charting,ugm,fixation,bsa,lenovo,alamos,leach,gravitational,cyrillic,prevacid,designee,sunni,netflix,monoxide,groupee,hardin,colorectal,outage,chunky,raptor,ima,coulter,iain,mtn,pbx,quantify,dmesg,elfwood,substitutions,lancome,galleria,inv,hillsborough,booklets,pln,cin,msp,gluten,spanked,orthopaedic,medi,nrt,obispo,minogue,turbines,notepad,crappy,golfer,afs,receivables,scripps,livermore,cirque,ost,marxism,escondido,diffraction,aha,outlining,subtract,bosnian,hydration,havent,preferential,dre,interns,quotas,methodological,aarp,gettysburg,iseries,menlo,walkthrough,bikinis,aopen,bookcrossing,addicts,epithelial,drastically,clarks,groupware,matchmaking,dict,descriptors,aeronautics,radiography,norsk,nps,afr,expr,ejb,refereed,afi,toxin,poynter,filmmaker,grounding,smartphones,calvert,fiduciary,bayesian,saccharomyces,cfp,humps,osi,zimmerman,javier,romantics,trimmer,bookkeeping,hmo,hikes,kickoff,magick,hillsboro,blm,fractal,mtg,guildford,twill,therapeutics,disruptive,kicker,protease,abrams,moreno,newsforge,timex,duffy,racers,cma,pairing,kirkland,gujarat,dkny,catfish,doubletree,brink,transex,tdd,hotpoint,anthologies,retirees,dcc,btu,investigates,chelmsford,anonymity,gotham,lyle,pinot,responsiveness,gazetteer,jacobson,kda,imitrex,monash,binghamton,connolly,homology,rpms,psychedelic,gyn,rhinestone,ely,quadratic,philharmonic,dynamical,cantonese,quran,turnovr,keychain,shakers,inhibited,lexical,openssl,ugg,mathematica,karachi,missoula,abilene,fdid,snes,swat,pune,trashy,expended,webct,pvr,handycam,zn,strategically,dms,anus,dnr,deputies,emergent,erika,authenticate,aligning,nautilus,doulton,rtp,dracula,umm,modding,eap,shaman,letra,mandriva,seti,extracellular,jaipur,stockport,eiffel,plywood,dnp,morbidity,wimax,effexor,binders,custodial,combi,integrator,sonnerie,teri,sectoral,trombone,postsecondary,rbd,ambulatory,lookin,xff,camouflage,beckham,dispensers,firebird,qu,showbiz,hbox,waikiki,lng,pds,antiqua,boxers,asics,barbeque,workouts,ini,mrc,seamlessly,ncc,girlfriends,songbook,hepatic,copeland,swanson,aquifer,ldl,pgs,xga,svensk,stereotypes,marlins,shelly,exiting,saginaw,polyurethane,seks,textus,johansson,spraying,hamburger,reactivity,lieberman,windchill,storefront,eof,codeine,tetex,cheerleading,wellbeing,pkwy,hairdryer,punitive,exon,outsource,thier,siebel,captions,kf,chromosomes,emailing,manic,novotel,ndp,transmitters,nicola,minidv,collaborating,tuxedo,receptus,michelin,bicycling,itt,blueberry,schumacher,socioeconomic,hamster,bushnell,ergonomics,finalize,lumens,sudanese,softpedia,iff,faceplate,packer,ibs,broward,globus,pir,reco,softcore,referencing,typ,guangzhou,nader,militants,resins,cougar,montrose,surreal,irradiation,redesigned,raster,credential,checklists,quirky,oscillator,finalists,encrypt,mgt,sneakers,incontinence,pajamas,murdoch,dali,lubricant,quests,mgr,outsourced,jody,plasmid,schiavo,unbeatable,upstate,lymphocytes,repayments,transsexuals,fueled,mex,xanga,sverige,extrait,pelvic,monochrome,activating,antioxidants,gynecology,mythtv,probabilistic,cooperating,calibrated,phased,godzilla,eweek,airbus,simplex,webhome,aerobics,sabrina,condor,gated,gaap,sasha,ebayer,hmc,bitrate,karnataka,amish,ffm,duh,hyperlinks,clitoris,hse,cribs,reliant,subcontractor,fendi,giveaways,wah,psych,hydrochloride,magnification,twelfth,proponents,priceline,ecco,backpackers,kohler,irb,initialized,ava,silverado,amr,ecu,psychiatrist,lauder,soldering,phono,crd,daryl,trp,lehman,daihatsu,grantee,enhancer,anglers,rottweiler,filefront,visualize,psd,adb,hoses,bidpay,ias,turntable,screenings,pivotal,pai,heuer,fic,nix,lineno,fdi,provo,checkins,plating,lycra,planck,yugioh,reactors,npc,kingsley,careerbuilder,gillette,fluoride,stacking,cochran,suomi,sissy,trang,calculates,thunderstorms,cip,transcriptional,finalized,referees,deerfield,lsc,cochrane,eldorado,esmtp,conservancy,otrs,omim,dielectric,anand,electrophoresis,sprinkler,imbalance,cine,scarlett,xen,novak,backcountry,artistdirect,outboard,pitches,scc,lockheed,raj,iana,elmo,unmatched,scranton,ixus,pinpoint,gabbana,neumann,outta,dieting,andhra,ralf,appraisers,xenon,hybridization,anh,abercrombie,trax,otherosfs,ssc,danbury,nofx,sharma,rockers,palliative,recieve,cufflinks,queues,relisted,beep,dunedin,remanufactured,staffed,lightspeed,grilling,stalin,kaye,bps,camo,shoutbox,toms,homeschool,ccg,lifehouse,windsurfing,pattaya,relocated,untreated,mkdir,riaa,divisional,chihuahua,mcconnell,resell,chandigarh,centrino,osbourne,burnout,classpath,designations,spl,microwaves,coliseum,ephedra,spawning,endothelial,citrate,eduardo,snowman,edmonds,potty,microbiol,shooters,norwalk,bacillus,fk,cla,spooky,belleville,venezuelan,cbr,colby,pab,hom,subpoena,hons,interpretive,bareback,extender,glucosamine,proj,modesto,designjet,typhoon,launchcast,referrer,zhejiang,ricci,superhero,tooling,tomography,berman,vocalist,tidbits,cystic,pacifica,kostenlos,anniversaries,infrastructures,littleton,commenters,cali,fairway,postdoctoral,prs,fairchild,ssb,spinner,evanston,homeopathic,ordinarily,hines,cpd,braking,ece,platelet,messageboard,setback,recipezaar,installers,subcategory,markov,factbook,tuple,fibromyalgia,rootsweb,culver,bratz,bucharest,ntl,lacoste,renters,timberlake,zack,markham,gels,iframes,thinkgeek,nafta,advertisment,mountaineering,screwdriver,hutch,beckett,homeschooling,dealerships,sakura,byu,jupiterweb,phosphatase,mahal,killings,robyn,adirondack,casablanca,sdp,pulaski,mantra,sourced,carousel,mpumalanga,thermostat,infarction,polypropylene,mailboxes,southend,maxell,tundra,vars,youngstown,farmland,skater,iep,imho,disrupt,rampage,fink,jurassic,gpg,gnupg,aliasing,comix,solves,hiroshima,jiang,oscars,boosting,knownsite,macarthur,powerhouse,deodorant,youre,compulsive,perky,reinforcing,extensible,mtb,catheter,practicum,photocopy,zipcode,mcpherson,saharan,pixma,hubbell,lesbienne,timeframe,disarmament,aed,actin,interviewer,vms,wno,dbi,waikato,syslog,orr,gastroenterology,travelmate,composting,mackie,choi,uva,fga,oceanography,vastly,stardust,radiological,commando,bathtub,urdu,aedst,greer,motorway,repositories,freaky,guangdong,merlot,civ,spielberg,lesley,thom,phoneid,salinas,legged,unilateral,dsn,shri,aegis,colloquium,matrox,vk,springsteen,uhf,fatalities,supplementation,embodied,altec,mohammad,verbose,marbella,sth,iterator,recieved,slc,cfl,deterministic,nci,predictor,salmonella,nga,nantucket,viewable,subnet,maximise,lotr,isn,chalets,reimbursed,lau,watermark,totes,mohamed,dyslexia,hubble,thugs,organics,dearborn,feds,yiddish,dopamine,multiplier,winzip,sacd,payoff,spv,sonar,monticello,flasher,subcontractors,evangelism,abortions,lesion,akira,progesterone,ethyl,earthlink,caramel,immunodeficiency,washburn,xtra,capitalized,ceos,maint,pancreas,octopus,xena,neuro,ara,receptionist,cessna,tru,zombies,cambodian,interagency,activision,synchronize,jenn,juegos,titties,tay,hornets,crossfire,ankara,spandex,hdmi,tamara,ctc,capcom,cato,peachtree,handyman,aeg,ethic,harlan,taxon,lcs,indefinite,slackware,cougars,earch,ambience,genet,photopost,uo,infor,neuronal,carrollton,checkers,torrance,yuma,spokeswoman,baccalaureate,tripods,logistic,middlesbrough,personalization,enema,easement,goalie,darkroom,hydrocarbons,gpm,hoh,hla,donaldson,tiscover,recor,mori,adi,rockland,uniqueness,hfs,cascading,metros,hangers,broadcaster,musculus,degraded,topo,viewcvs,eisenhower,flashlights,myyahoo,rosenthal,affordability,latham,jailed,depp,grapefruit,trna,motorbikes,verdana,bonita,nippon,decorators,dwl,jizz,pendleton,psoriasis,mavericks,dianne,earnhardt,amtrak,resid,tostring,lessee,goodyear,utica,overclocking,kitchenaid,cbt,peacekeeping,oti,interferon,aas,selectable,chechnya,rory,woodbridge,jas,intersections,sma,capitalization,epi,responder,qv,thoracic,phaser,forensics,infiltration,serine,bing,schemas,orthogonal,ohms,boosts,stabilized,wordperfect,msgs,zhou,selenium,grinders,mpn,cse,assn,punches,masturbate,parachute,glider,chesney,taos,tong,lotions,adrenal,sixties,booting,cunts,dri,ozzy,elearning,zx,valuations,kidman,jpn,postoperative,cytology,nye,biennial,ifndef,bq,circuitry,cdw,robb,kinja,tweaks,readership,northstar,dif,worthington,groundbreaking,transducer,serotonin,complements,isc,params,radiators,beagle,cadmium,bodoni,speedo,detachable,simplifies,sleeveless,motorists,tbsp,waivers,forsyth,ricerca,agilent,plumper,uterine,apartheid,bnc,businessweek,morphological,windham,ellington,ria,cdi,polio,clp,sharm,alvarez,regatta,chatroom,polarity,overrides,riff,widths,dest,attenuation,kluwer,martins,italiana,telford,shuman,grapevine,russo,daunting,topples,futuristic,autofocus,chai,obsessive,transplants,referrers,junkie,admitting,alsa,galactica,wkh,rotational,withdrawals,pageviews,hartman,finalist,pornographic,armageddon,smallville,selectively,albans,fallout,brownsville,galeria,stalker,kathmandu,nyu,kristina,dps,icmp,sophistication,wrt,messed,oceanside,foxpro,taiwanese,officejet,helens,ppg,sym,combos,cloned,fulham,dahl,pla,nfc,mathews,bestseller,enrique,minidisc,downside,malvinas,honcode,reissue,striker,memos,tensor,whitehead,whoa,brookings,accomodations,integra,laredo,nntp,logiciel,jaguars,mga,tracer,frist,lsd,synthesizer,ejaculating,biodiesel,mcleod,waldorf,microfilm,lear,subsidized,simons,optimizer,zire,pituitary,sow,repeater,teamxbox,bytecode,mccall,wiz,autopsy,joltsearch,ym,itv,colo,ying,bce,inode,glenwood,allstate,horticultural,hahaha,spamming,ssn,wartime,mou,hpv,jain,geriatric,mayan,navman,futon,grannies,hairstyles,nays,webspace,rds,mellitus,multiples,cryptographic,disparate,boardwalk,ineligible,homeopathy,entrants,rallies,simplification,abb,insolvency,roleplaying,affective,wilma,compusa,histogram,wheelchairs,usaf,pennington,lesbiana,liberalization,insensitive,greenpeace,genotype,contaminant,informa,collaborators,malvern,proxies,rewind,issuers,sinh,kerberos,schoolgirls,hilo,stratton,idx,astronaut,instituto,lowry,constipation,aec,sheryl,nashua,ikea,oswego,gbr,koi,sues,cba,mckenna,eudora,candida,sildenafil,adjusts,sqft,pickups,squaretrade,chandra,cheesecake,oth,porting,lubrication,shootout,racine,webserver,vnu,fragmented,chevron,reinsurance,slated,tera,guantanamo,reina,energizer,clarksville,vandalism,acpi,acetaminophen,wolfram,ofthe,contraceptive,necrosis,iva,bonanza,lumbar,disparities,umass,flamenco,osprey,flammable,biometrics,buspar,wasnt,nds,softwares,dbm,alchemist,marr,ssw,mcdonalds,hormonal,vh,calender,distro,virgo,rink,jesolo,unrealistic,rhonda,pov,pings,pcp,inxs,desy,teaser,impairments,courageous,rho,promos,transceiver,warhammer,iterative,catered,callahan,neuron,xlibmesa,pulsar,enewsletter,dav,pedagogy,bcc,afrikaans,ecb,cinematic,ugh,malik,tshirts,fellowes,illus,telefon,maguire,nlm,numeracy,caviar,popups,sleepwear,quads,grady,kelsey,enforceable,bouncy,vcrs,retinal,sponsorships,textrm,screenwriter,vendio,otago,ducati,allele,sylvania,optio,purifiers,commuting,hiphop,kato,kama,bcs,keating,eczema,northland,icu,veg,roadster,confetti,fv,raptors,irda,veggie,dharma,chameleon,hooper,luciano,grp,abrasive,henti,koruna,edp,ensembles,backpacker,bainbridge,scs,comfy,assuring,gettext,registries,eradication,herefordshire,ectaco,doh,jodi,quintet,groupwise,ambiance,chun,damian,bakeries,dmr,fucker,polka,wiper,wrappers,giochi,iterations,svs,ntfs,namespaces,mismatch,fdic,icd,vj,oxides,qualifiers,battered,wellesley,smokey,passwd,vacuums,falun,precip,lagos,rapper,hooters,calligraphy,advantageous,mustek,monique,fearless,ortiz,pref,morningstar,recessed,fmt,palladium,totaled,levitt,vd,shipper,darryl,hobo,nys,merrell,cra,sly,reductase,raul,shenandoah,harnesses,wtc,loma,oshkosh,multivariate,geil,kitchenware,unigene,lans,immunoglobulin,silverstone,uniden,telechargement,remstats,unitary,getnetwise,hospitalization,clubbing,microelectronics,observational,waverly,crashers,schwab,deregulation,vba,carpentry,steinberg,sweetie,mideast,hispanics,podium,paranoia,faceted,sito,gecko,fullscreen,interchangeable,rollins,scp,hst,starship,miele,seeded,cyclists,fey,cmt,nurturing,enzymology,amadeus,usm,galapagos,uconn,picker,xls,mulder,lesbicas,dialer,mooney,syntactic,envision,jetta,downey,codex,lsb,userid,cosmology,noodle,gromit,sargent,bangle,humping,donnie,privatisation,tofu,rq,unhcr,battlestar,intuit,adoptive,cda,minimized,partnered,twat,filibuster,glamorgan,adwords,tulane,usp,facet,behaviours,redneck,imax,xpath,synthesized,encapsulation,samsonite,accordion,rooney,minimally,webpreferences,skoda,matchups,ucc,mailings,ono,beachfront,cem,crosswords,pubchem,integrative,kelowna,embed,gurus,allotted,shutterfly,gerhard,watersheds,trimester,clickable,spyder,electricians,nexium,capricorn,dipped,perm,rte,spectrometry,snippet,pha,permeability,waukesha,igg,scart,wsu,normalization,skillet,neoprene,vlc,offeror,thermo,huber,jarrett,farechase,maintainers,maarten,ginseng,blackout,detergent,rosetta,grenade,occured,karin,lana,fontana,kang,crafting,ivillage,mowers,bratislava,policymakers,sienna,watford,misco,givenchy,reimburse,esperanto,modalities,pcc,lighters,shutting,endemic,spr,carly,hydrologic,stansted,nep,huddersfield,aimee,davey,csp,helpsearchmemberscalendar,ait,transduction,silverman,clarifying,aortic,drc,hoa,starcraft,martens,ficken,structuring,konami,lipids,jurisdictional,desi,cellphones,cordoba,xj,sheppard,dpkg,folsom,triggering,mapa,aip,rackmount,binocular,eda,specialise,rar,remortgages,mckinley,hanks,dosing,strobe,waffle,detectable,pmi,arrowhead,nigga,mcfarlane,paycheck,sweeper,freelancers,seinfeld,tdm,shen,responders,keepsake,birthdate,gettin,upbeat,ayes,amenity,donuts,salty,interacial,cuisinart,nautica,estradiol,hanes,noticias,gmp,schaefer,prototyping,mth,zeros,sporty,tumour,fpic,pdc,atpase,pooled,bora,shu,stabilize,subwoofers,tcs,clueless,sofitel,woodruff,southport,walkthroughs,radiotherapy,minifig,transfusion,sams,zend,newtown,mcmillan,csf,lyn,witt,mcd,unep,newsflash,recombination,messing,budgeted,slogans,flashback,photometry,sutter,inr,knicks,ingestion,mindset,banda,adulthood,inject,prolog,dunk,goofy,mcintyre,aga,guilford,raglan,photonics,cdf,celtics,heterosexual,mappings,jel,snip,fascism,galerias,audiovisual,diagnosing,neutrino,wouldnt,mq,codecs,certifying,dvp,traduzca,csb,subj,asymptotic,isotope,moblog,locales,preventative,brampton,temperate,lott,srv,meier,crore,deserving,banco,diagnoses,thermaltake,ultracet,cortical,itchy,glaucoma,homosexuals,mhc,estee,wysiwyg,oversees,odp,categorised,thelist,diss,cta,diamondbacks,nzd,subtype,psx,thessaloniki,dmv,leafstaff,literate,ayp,bikers,harcourt,bubba,mutt,orwell,mietwagen,bakeware,cleanser,lonsdale,velocities,renewals,tsx,dnl,mtu,salford,ephedrine,longview,closeup,venous,hereunder,ouch,teflon,cys,debadmin,cleans,fpga,everton,rosters,herbicide,marlene,futura,smd,cheddar,ql,tucows,regex,bukake,chs,mcclellan,gopher,distal,zar,frommer,joss,shortfall,harmonica,geothermal,texmf,atlases,kohl,lorazepam,hosp,lewiston,stowe,fluke,khi,estes,hdr,caches,stomp,acidic,anc,doin,tld,gangster,deliverables,censored,fascist,lido,matchbox,trl,businessmen,bpo,incubator,experiential,eraser,jordanian,jiwire,libra,rtl,iea,uniprot,statystyki,pkgsrc,nonprofits,desnudos,czk,ethylene,slows,opm,inhibits,exploratory,spectrometer,outsole,lista,tmc,inset,polynomials,elegans,openers,shasta,dob,inet,cov,fallon,sidekick,tcb,dmca,rewriting,bahama,idl,loretta,lingvosoft,dax,allocating,newell,juveniles,gamermetrics,lcds,ortholog,tasmanian,hydrocarbon,lobbyist,kelvin,secondhand,xo,cheatscodesguides,mdl,clientele,technica,gratuito,hts,arkon,hort,bureaucratic,cooperatives,raceway,sopranos,hotties,gq,terrell,yc,closings,registrars,strlen,faye,cto,lakeview,ospf,tunneling,methamphetamine,murals,bangs,asic,knockout,radon,avantgo,asl,obi,timelines,roget,cristina,visio,autoimmune,coder,replicated,pom,timetables,kline,anorexia,errno,workplaces,harpercollins,clk,heartburn,empathy,ica,motivating,clockwise,frisco,mitzvah,chong,bashing,boosters,cyl,grupo,mikhail,denominator,changeset,cec,jovencitas,texttt,islamabad,freestanding,resilient,eyewitness,spartanburg,hippo,trung,tenancy,offsite,realaudio,clements,dogsex,ticketing,heterogeneity,bodied,dudes,maytag,norco,altos,sleeved,overs,watercraft,scully,cellulose,cathode,monographs,nra,digitized,rotated,gaia,motown,pryor,sato,greeley,ccr,agro,ramos,quizilla,citibank,scotty,pvp,meridien,taxa,brunettes,bic,irl,mfa,endo,unhelpful,microorganisms,twister,krakow,sequoia,emt,activator,incredibles,familial,marquee,resilience,thermodynamics,seton,makita,subgroups,catchy,aia,tig,synaptic,bobcats,zappa,eec,chicas,swahili,nlp,dzwonki,enrolling,commercialization,smt,cataloging,snowboards,sami,tesla,elan,csd,ingrid,longman,unleaded,mesquite,kroner,frm,javadoc,hotbot,denali,inhibitory,phonics,dbs,refs,smh,thaliana,meningitis,motivations,rees,asteroid,donegal,endings,mwf,unlisted,philippians,conductive,sooo,echostar,microscopes,kenmore,reagent,achievable,dla,glamorous,interacts,litchfield,lavoro,hobbynutten,chomsky,venezia,yamamoto,zhu,interleukin,flashcards,homologene,interception,voltages,assignee,kip,bla,algarve,valance,stc,pisces,cpanel,orc,hemingway,gti,hdl,rendition,danmark,yun,sourcebook,hui,matador,smut,nac,dang,bradenton,meetups,bilbao,ewan,cwa,akai,deletes,adjudication,autoconf,rasmussen,bibliographies,milne,fsc,unplugged,ttc,currie,torvalds,neff,tailgate,hollis,lanier,overseeing,escalation,polymorphism,semitism,sevenfold,colocation,woodbury,tshirt,epidemiological,medic,grail,espana,horne,nostalgic,aldrich,tabled,farsi,excelsior,rial,greenspan,dhabi,chobe,tafe,pz,andrei,frazier,criminology,jeanette,constel,talkin,dup,syd,permittee,hangover,capitalize,fsu,motocross,boomers,wedgwood,mcdermott,youngs,lep,grossman,pecan,freshmeat,fnal,benzene,mcp,topper,ittoolbox,manny,arse,osteoarthritis,westlake,czechoslovakia,addictions,taxonomic,judo,mizuno,palmetto,telco,ltc,microarray,electrolux,elephantlist,sparked,qualcomm,whitaker,opc,connelly,conner,hospitalized,fec,opml,cana,ation,entitlements,wingate,healey,jabra,qmail,soybeans,awd,electrostatic,topological,coz,oversize,westinghouse,unk,reb,rios,craftsmanship,cic,pyle,seuss,cheetah,ldp,competed,fridges,hatchery,judgements,msr,zr,corbett,asx,curr,fingerprints,conv,cheesy,ahmedabad,dimlist,winfield,pinto,gallerys,jana,martindale,webstatistics,dhl,mays,risc,hcv,oboe,tzu,hurd,geotrack,kolkata,imation,hematology,expressway,steelhead,ahh,turntables,lindholm,clooney,facilitators,mcnamara,shiva,toners,kenyan,wynn,hsa,motorbike,niles,zippo,sergei,upfront,battlefront,gosh,fansite,colossians,addicting,gerd,copa,gtp,zlib,whitespace,tektronix,doesn,mccullough,cnr,microfiber,mdc,tsa,deployments,stearns,insurgency,boyer,behringer,akg,ttm,perceptual,fz,midlothian,follando,instr,ott,bsn,rambler,drywall,suzy,dekalb,sumo,topsites,hsc,tse,refurbishment,pfam,tdi,grassland,jeffery,councilman,swaps,unbranded,astronauts,lockers,lookups,attackers,actuator,reston,sftp,reinstall,lander,coby,methanol,miscellany,simplifying,slowdown,bridesmaid,transistors,marys,colgate,lousy,pharm,foreseeable,nutritionists,techweb,berkley,resistors,blondie,drwxr,cfc,isu,stm,villanova,iw,tif,cbi,cesar,heuristic,archivist,gallup,valtrex,usn,antimicrobial,biologist,cobol,homolog,fruity,stratus,fips,urea,bumpers,lumix,wildcard,rvs,desnudas,plextor,oxidative,brits,healy,pliers,kayaks,ibanez,marxist,couldnt,naperville,diplomas,fieldwork,damping,immunol,regan,wwwroot,bootleg,intellectuals,winslow,minis,rhs,leftist,tequila,limoges,wildwood,oop,germantown,bergman,gmac,pulitzer,tapered,mollige,toothbrush,delegations,plutonium,factsheet,squarepants,subsurface,guadalupe,halliburton,underscore,borg,glutamine,slutty,mcphee,doa,herbicides,usgenweb,inscribed,chainsaw,tablature,fertilization,glitch,gearbox,stang,alejandro,tensile,varchar,intercom,ase,osg,mckee,envisaged,splice,splicing,campfire,cardbus,hubby,graphing,biologists,improv,hempstead,exilim,xlr,debuts,esi,diskette,ubs,commend,contender,southland,spie,globals,diaspora,anu,moratorium,safes,goodnight,alcoholics,asme,gatlinburg,cai,pharmacol,swe,xorg,newsquest,wavelengths,unclaimed,racquet,cout,cytoplasmic,qaida,kpmg,lanarkshire,steakhouse,stubs,solarium,sedo,fillmore,shox,greenhouses,spotlights,perks,harlow,morrissey,igp,lutz,capacitance,birthstone,primitives,bong,lingual,unframed,iter,vibes,tmdl,programa,republication,zap,veneto,zhao,hippie,acyclovir,benoit,organizes,unaudited,rz,summertime,airbag,lal,sweetwater,bjc,cfm,internationale,krystal,expansions,gms,correlate,linkout,poc,pittsburg,bylaw,kenyon,trims,epiphany,pny,devin,viewfinder,homewood,mcrae,hind,renaming,plainfield,maxon,sprintf,armagh,livechat,pdr,bhp,lyman,notfound,pho,pathogen,zagreb,gayle,ust,overwrite,revitalization,camry,postmodern,jayne,hci,kuhn,typos,glutamate,melton,oneworld,realtone,mikey,telephoto,pooling,jy,drury,ctw,tbs,sct,custer,borderline,surgeries,lobbyists,sfo,zionist,gaskets,photoblog,cushing,nonstop,hummel,corgi,ellie,citigroup,seasonally,uci,bizwomen,dti,malkin,adbrite,psychosocial,butthole,ellsworth,cline,backlog,thema,filmmaking,wwi,townhomes,usf,instapundit,mcmaster,bayside,thinkcentre,cea,biophys,hodgkin,vhosts,laughlin,congresses,electrically,ophthalmic,yz,prong,unreleased,ipa,chaplin,dfw,histology,gilman,klamath,atrial,equalizer,vbscript,helmut,lynda,vax,yak,silt,councilmember,endorses,expos,cherish,aap,undead,pto,critters,blob,kurds,ela,ical,macleod,devry,rahman,fundamentalist,subtraction,superstars,chmod,leveling,piggy,stadiums,playable,uz,sunos,lancia,perf,interconnected,tunning,whitepaper,platt,lexis,virology,csm,purcell,vidal,svcs,subsystems,oxfam,johnstown,beading,robustness,ifn,interplay,ayurveda,mainline,folic,vallejo,ratchet,cee,yl,yee,wicca,cygnus,depiction,jpl,tiered,optima,seward,photons,transactional,lhc,doggy,anodized,exxon,hurdle,donnelly,metastatic,encyclopaedia,errata,divas,ong,trey,thankyou,alerting,insofar,smileys,surrogate,breathable,differed,dickies,gonzo,programmatic,trs,teammates,barrymore,ddd,barracuda,accesskey,appellants,usergroups,initiates,pwd,mation,aiwa,whiting,grizzlies,okidata,methadone,offsets,tryin,jodie,jdk,tallinn,descarga,monterrey,harrogate,lotteries,bozeman,coauthor,cybershot,airflow,thur,oper,stn,unattached,maher,karlsruhe,yuri,cheung,honeymooners,cheaptickets,howie,dieter,centerpiece,mplayer,unwind,outings,crotch,wavelet,nothin,pathogenesis,diodes,realestate,reinstatement,botox,nge,dipole,cleo,norge,kata,tangled,giga,walsall,burnaby,lilo,adf,majorca,agribusiness,validator,jax,pixie,proofing,clits,keyring,vehicular,workbench,deph,landscaped,aziz,lula,nucl,farber,impala,commenter,celsius,flicks,hardwear,prefixes,racquetball,endl,flavours,pundits,unset,murano,optimised,bariatric,hitchhiker,isotopes,entrez,erich,conduction,grabber,orch,peridot,produc,skechers,pacers,salvatore,nts,rbc,neurosci,parton,apec,centerville,mcl,ebuyer,dermatitis,roxio,nagoya,sfc,snowfall,sss,fundraisers,fecal,vorbis,hazzard,lbp,gorman,validating,healthday,newsstand,dossier,psion,tcc,corbin,songwriting,ecg,hinton,nighttime,fluxes,kombat,finders,dictated,darlene,westcott,dca,lua,lpg,opti,proximal,canciones,irix,qp,peroxide,bryn,erm,rfi,outages,complemented,finley,thanh,backlash,gallo,agence,zs,kjv,jonny,biblio,qm,opacity,userland,townsville,turing,veggies,centenary,barclays,eid,drexel,pedagogical,lockhart,fishnet,combinatorial,unintended,raman,rochdale,prnewswire,sthn,smog,ucl,poa,mics,punjabi,prem,katalog,kettering,hayek,brookline,montpelier,titty,ntt,fart,oxidase,qw,caterer,pregnancies,fiori,dateline,stdout,unassigned,adriana,lyndon,groupings,mems,midterm,campsite,dropdown,marketer,huntingdon,jcpenney,gelatin,qvc,adenosine,milliseconds,swatch,redefine,backdoor,jazeera,envisioned,pws,extrem,automating,sempron,cursors,divert,phnom,tbc,kanji,vod,recreate,smackdown,dropout,jrst,fallujah,lockout,moron,tnf,townhouses,horrific,abacus,lifeline,gto,torquay,dao,conjugate,winch,elektra,webtrends,shes,sabotage,blueprints,limos,fraunhofer,warhol,suppressor,dogpile,birt,rensselaer,jocks,unzip,floss,sarge,endnote,leland,telugu,midwifery,huff,pornos,primates,rmi,tangerine,amoxicillin,graz,basingstoke,crawler,angled,comin,longhorns,doha,ebsco,lynchburg,overriding,wilshire,ard,wachovia,groff,ects,lok,invicta,dongle,ecumenical,tanaka,internacional,kwan,cdl,archiv,placid,lenin,marsha,gradients,ritalin,retrieves,ferrous,dhaka,zillion,chino,ltr,caveat,gangbangs,toiletries,bedrock,clio,zines,multipart,forklift,repurchase,orthopedics,wsw,vnc,nfpa,dnf,badgers,chp,kinh,appetizer,disbursement,weblinks,telemetry,consumable,winn,depressive,stabilizer,ovary,rune,accrual,creatively,amateure,abd,interfaith,cay,automata,northwood,payers,gritty,dewitt,rect,ipx,sebring,reborn,bia,lagrange,treadmills,bebop,streamlining,trainings,seeding,ulysses,industrialized,botanic,bronco,moodle,chased,cti,intermediaries,tei,rotations,knoppix,montessori,biomed,murine,entomology,rodent,paradigms,lms,putter,fonda,recursion,flops,initiator,hsu,pobox,zeiss,ferc,tanf,sunscreen,llvm,antidepressants,decentralized,freaking,whittier,elmira,bassist,oakville,skaters,luminosity,emulators,toefl,keychains,karat,modis,ginny,egan,posh,bangles,stereos,submittal,bnib,moh,mink,simulators,nagar,zorro,ecran,ealing,ozark,pfeiffer,miers,vickers,interactivity,corso,constructors,doj,ipm,rnd,jama,lsi,malfunction,magma,smithfield,gtr,canucks,hammersmith,sdi,cricos,blum,parkland,pcbs,werewolf,wnw,midwestern,ezboard,charisma,chilli,iac,suspensions,nss,smi,malnutrition,logcheck,layton,gaines,inbred,intercultural,skateboards,mainboard,goshen,functionally,rabies,catalysts,datetime,readability,dakar,dspace,cappuccino,modulus,krause,cuisines,maclean,tuscaloosa,boosted,sprayed,gearing,glutathione,adoptions,tweaking,angina,geeky,rnb,coupler,lexapro,aig,paisapay,zanussi,minimizes,hillsdale,balboa,penh,wainwright,agc,guadalajara,pinellas,umts,zappos,daimler,spo,tadalafil,everglades,chipping,montage,geelong,ionization,broome,biases,sprawl,marantz,alfredo,haunt,hedging,insulating,mcclure,vbr,qed,waterfowl,adress,reacting,virtualization,itat,collide,syst,mankato,segregated,ests,avengers,technologist,pigments,impacting,lamont,aquariums,rigs,arginine,moot,pleasanton,televised,giftshealth,acd,simplistic,hepa,amphibians,encapsulated,injector,kessler,gardenjewelrykids,leung,edo,impl,grained,relatos,newsday,gmat,dani,announcer,barnsley,cyclobenzaprine,polycarbonate,dvm,marlow,thq,osce,hackett,divider,cortez,associative,cmo,rsync,minivan,victorinox,chimp,flashcoders,giraffe,pia,stroud,lefty,cmg,westside,heres,azimuth,logistical,firenze,okavango,jansen,tween,payback,hydraulics,endpoints,perrin,quantification,coolant,nanaimo,yahooligans,prilosec,hutchison,parsed,shamrock,schmitt,korg,warmers,newt,frontend,itanium,alleles,weiner,ola,halftime,frye,albright,wmf,clemente,handwritten,whsle,launceston,wembley,sandman,mejores,scoops,dwg,truetype,eigenvalues,airbrush,ppb,comms,regexp,quickstart,beaverton,trucker,willamette,chiropractors,tyco,mirroring,massively,aeronautical,lasalle,pwr,wordlet,hanford,plac,exhibitionism,riser,redux,gaim,audiobook,compensatory,couplings,jeezy,monsanto,cleric,rfq,contactos,esri,equiv,macrophages,yao,npt,computes,pickett,oid,charismatic,lda,teleconference,mma,whitepapers,polycom,tux,asymmetry,xpass,cfd,barbour,tijuana,niv,hamiltonian,cdg,algebras,quotient,wildcat,inlay,peta,paco,avocado,octets,dubuque,evaluator,gid,jumpers,edmunds,lerner,manifolds,awg,napoli,kristy,variances,pki,objectivity,sistema,massager,incubated,feedster,federer,turnovers,bev,eai,changers,frs,hereto,osc,clinician,alltel,gss,curacao,rapporteur,arcserve,gump,powerline,aspell,avp,safeguarding,paxton,herbie,yabb,chromosomal,hickman,runescape,salesperson,superfamily,tupac,cassini,tobin,zoos,activates,hibernate,ning,extremists,montego,rohs,cyclical,cytokines,improvisation,mmorpg,toured,tpc,flatts,cmf,archiver,rainer,rsc,covariance,bobble,vargas,gulfport,airfield,flipping,disrupted,restocking,lgbt,extremetech,citrine,neoplasm,rethinking,xfn,orientations,calumet,pellet,doggie,inflow,msw,lymphocyte,weinberg,saigon,whiteboard,wic,brody,invertebrates,elliptic,ffa,agonist,hyperion,partypoker,rockingham,sandler,schweiz,grundig,rethink,musculoskeletal,aggies,prereq,nikita,aetna,truckers,giro,laserdisc,kaspersky,dor,determinant,morpheus,ayers,junkies,ccna,jacquard,assesses,okinawa,autoscan,quantified,pnp,uppsala,distortions,subclasses,glo,condolences,hitter,livelihoods,psf,cala,telluride,apnea,mkt,floodplain,valera,wenger,crusader,backlinks,alphabetic,delonghi,tailoring,shavers,mcdonnell,aborted,blenders,symphonic,asker,huffman,alistair,navarro,modernity,wep,uab,olp,booties,cancels,newsblog,gangsta,mgp,foodservice,teton,newline,prioritize,clashes,crohn,bao,quicklinks,ethos,hauppauge,solenoid,stis,underdog,fredericton,tep,bextra,copywriting,technol,mdr,asteroids,continous,hplc,ovulation,doggystyle,quasar,euthanasia,schulz,okanagan,liters,tarrant,blacklist,clermont,rooftop,ebert,goldfish,witherspoon,slimline,animator,barbra,irreversible,flanagan,encyclopedias,csiro,downtempo,campsites,graco,lighthouses,xg,adt,hemoglobin,tung,svga,postpartum,condi,yoda,jst,dalai,xn,nytimes,kenzo,alden,trampoline,zi,restricts,gees,intakes,dogfart,swearing,ith,montel,ubbcode,yw,ninemsn,lgpl,jsf,psychotic,allyn,higgs,pulsed,ignite,hornet,atypical,contraceptives,slimming,dispatcher,devoid,jms,maricopa,mbs,northfield,idf,elites,fifo,correlates,casters,heisse,easygals,mandalay,haircare,climbers,atty,madera,calibex,mailbag,smartmedia,vilnius,dbl,doping,postwar,strat,bsp,barebone,thrombosis,smarty,whitley,lse,windermere,curtin,dilemmas,cci,gwynedd,edwardian,hppa,saunas,horowitz,cna,undergrad,mocha,escada,knockers,jitter,supernova,loughborough,directtv,feminization,extremist,tuttle,aoc,medway,hobbit,hetatm,multipurpose,dword,herbalife,ocala,cohesive,bjorn,dutton,eich,tonne,lifebook,caster,critiquer,glycol,manicure,medial,neopets,accesories,faxed,bloomsbury,mccabe,ennis,colossal,karting,mcdaniel,aci,brio,baskerville,syndromes,kinney,northridge,acr,emea,trimble,webinars,triples,boutiques,freeview,gro,screener,janine,hanukkah,caf,adsorption,sro,underwriters,foxx,ppi,noc,brunton,mendocino,pima,actuators,internationalization,wht,pixies,pancake,transmembrane,photostream,guerrero,firth,hathaway,emf,beatty,andersson,lunchtime,miro,slams,looping,crates,undated,takahashi,ramadan,lowercase,technologically,anaerobic,satelite,pioneered,tabloid,pred,solubility,troubleshoot,etf,hatcher,coders,insecticides,electrolyte,watanabe,firestone,writeshield,sph,descargar,letterhead,polypeptide,velour,bachelorette,nurs,geospatial,zoned,pubic,pizzeria,mirc,henning,acf,bae,nitrous,airspace,santorini,vdr,tms,convertor,brahms,genomes,workable,ordinate,seminal,rodents,ytd,xin,precursors,relevancy,koala,discus,giftware,realistically,hol,polska,loci,nanotech,subunits,awsome,hula,laramie,toothpaste,maxine,mennonite,subtitled,qms,maidstone,abr,sda,jcb,wpa,fastener,ctf,foxy,sexiest,jupiterimages,categorization,inclusions,fosters,conc,transsexuel,limbaugh,cassie,altman,lethbridge,peng,fillers,symposia,nia,templeton,stds,hav,typography,ebitda,eliminator,accu,saf,gardenjewelrykidsmore,gazebo,preprint,htc,naxos,bobbi,cocker,steph,protonix,systemax,retry,radford,implantation,telex,humberside,globalspec,gsi,kofi,musharraf,detoxification,ree,mcnally,pma,aureus,informationweek,chm,bonneville,hpc,beltway,epicor,arrl,iscsi,grosse,dfi,penang,zippered,simi,brownies,lessor,kinases,panelists,charlene,autistic,riu,equalization,corvallis,reused,volokh,vari,fordham,hydroxy,technologists,snd,dempsey,httpdocs,speakerphone,reissues,shalom,khmer,recordable,dlt,dredging,dtv,extrusion,rtn,preggo,defamation,theron,proteomics,spawned,cep,phendimetrazine,wiener,theorems,samplers,rfa,pasco,hilbert,tamworth,itmj,msd,etfs,cde,praha,zona,landry,crackdown,lifespan,maybach,cysteine,responsibly,slideshows,aceh,techtarget,geotechnical,fantasia,camisole,atoll,shredders,gags,rips,futurama,hari,ironman,ducts,marmot,remand,hawkes,spoof,spammer,presets,separations,penicillin,amman,davos,maturation,internals,bungalows,beckinsale,refractive,grader,ecd,transducers,ctxt,doxygen,rtd,akc,cgc,intercollegiate,zithromax,onkyo,niosh,rainier,furman,newsfeeds,larkin,biztalk,snapper,hefty,ipr,valdosta,ulead,delaney,hairless,lactation,innsbruck,offbeat,teenie,protons,machined,holman,eviction,dic,pio,regionally,thurman,canaria,showcasing,afa,certifies,primes,renton,lambeth,frappr,liturgical,easements,aida,openafs,assword,rving,exogenous,sram,sault,trolls,flor,rfe,oleg,smo,analyzers,scorer,swami,oilers,nik,mandela,listers,ordinated,arlene,dividers,recoverable,gators,intraday,cruces,hollister,enews,lactose,gifford,competitively,rockstar,hampstead,chrono,nahum,raja,nextlast,xinhua,ltl,lofts,feral,neurosurgery,ringgit,ukranian,parmesan,kiosks,pnt,hooking,wip,rawlings,physiotherapy,wrexham,billabong,prepayment,jonesboro,bangers,handgun,miscategorized,itp,desoto,innovator,mitochondria,mewn,sername,usmc,amicus,vijay,redirecting,gma,shih,cervix,biblia,cosby,lufthansa,msnshopping,sewerage,ele,mantis,alerted,lsp,intron,bri,remodel,carpal,natalia,cjk,specialises,condiments,adventist,eggplant,coun,ctv,wycombe,monaghan,blogarama,undocumented,esb,vaccinations,gutierrez,bernd,needham,inuit,wordnet,wedi,keyes,photocopying,tca,avn,dressage,cafepress,phylogenetic,kurtz,morbid,inno,refresher,freakonomics,impreza,cheeky,arco,proponent,brasileiro,kar,rojo,perscription,aic,streisand,eastside,bioethics,redo,piranha,rps,cmu,uncompressed,vps,pseudomonas,sotheby,avionics,minimization,ascot,linearly,dolan,titleist,genesee,grays,fdc,psychiatrists,bom,multiplex,srt,bradbury,babysitting,asd,beehive,aeon,livin,leblanc,shorty,injecting,discontinuity,littlewoods,enquirer,downturn,fission,modulator,spybot,hrc,worldview,choreography,sfx,nth,buffering,denison,killarney,scoping,srm,mammography,epc,nepalese,communicable,enzymatic,melanogaster,extravaganza,kamloops,spss,tftp,rotherham,underestimate,hana,mycareer,pra,cooley,gratuitement,eriksson,schaumburg,exponentially,chechen,carribean,bunnies,choppers,psyc,pedersen,earphones,outflow,scarab,toasters,skiers,eax,jamal,raunchy,biologically,nbr,ptc,qe,zyrtec,riyadh,pell,quicksearch,coates,octane,mtl,krabi,funders,apj,kal,fai,ccp,environmentalists,fatah,ifa,ackerman,gbc,soooo,soapbox,newberry,deanna,bestellen,elongation,webcrawler,wanking,ofsted,yb,dortmund,boardroom,nico,taping,mro,atleast,somatic,fcs,niki,malloc,lanzarote,slump,nerds,laude,mec,simulating,enrol,bts,cflags,xps,datafieldname,wycliffe,dda,apts,aikido,slo,batches,dap,ssr,kournikova,moshe,fsbo,shippers,mtc,cav,rrr,wildflowers,polygons,delimited,noncompliance,upi,sna,vidsvidsvids,herts,bellagio,webapp,haryana,eeg,dlls,babysitter,linotype,produkte,lesbica,pes,mediators,hone,riggs,jockeys,seater,brightstor,deliverable,sanding,buffered,orton,indesign,lakeshore,ctl,aland,clarins,pelham,huf,ronin,comps,mgi,greco,kontakte,edema,leaderboard,mce,hsv,geocities,argc,palos,ori,carotid,citi,squish,cny,gorham,calphalon,blasen,midwives,nara,nab,netbeans,cyclones,tapety,snowflake,blackhawk,weinstein,sterilization,assessors,chenille,dehydration,haircut,fhwa,misconceptions,alternet,undeclared,bari,songwriters,tolerances,incarceration,hierarchies,redondo,lactating,aquamarine,yg,edm,sedimentation,optometry,mobilize,attendee,bmd,dialogs,rpt,viktor,trajectories,federico,openvms,ppo,pag,precio,leapfrog,thermoplastic,sexchat,kingman,deterrent,ghraib,duplicating,tuba,encodes,garamond,cirrus,alanis,kilometer,ballarat,wacom,nsta,actionscript,ivf,modifiers,hijack,thomasville,accorded,fryer,namco,xmms,dammit,produkter,motorhome,ade,mfrs,editable,greats,milosevic,marcy,boron,creighton,wolfenstein,bolivian,rowbox,pauls,phobia,superfund,vcc,sadler,piercings,riffs,briana,geronimo,tetra,freakin,alb,retrofit,cytokine,stylesheets,coalitions,tactile,cinematography,vivitar,wannabe,blogwise,amador,skier,storyteller,bpa,pelicula,ischemia,fms,comput,wristbands,livecams,hibiscus,rheumatology,edn,somers,cray,iol,waterbury,selectivity,carlow,maxx,haggai,demonstrators,raiser,sanger,mullen,periphery,predictors,woodwind,snl,modblog,repo,burnley,antispyware,sumter,rcd,woodside,tylenol,megabytes,backlight,naturist,zephaniah,airbags,plethora,cabriolet,yh,retiree,atol,sonet,anthropological,mikasa,iverson,cae,buckeye,dollhouse,stereotype,uship,ubisoft,escalade,breakaway,produkt,sealants,montclair,dinghy,gnus,melia,feedbacks,concurrency,healthgrades,hoya,revista,lrc,flied,tvr,joliet,ped,chappell,wollongong,peo,blowers,doubleday,guidant,remodeled,eea,bcp,situational,nasd,chakra,dfa,jammu,wetsuits,edc,birkenstock,vivendi,emulsion,fielder,sorta,courseware,biosphere,skb,plumpers,muschi,qcd,ollie,gurgaon,rwxr,federalism,gizmodo,laminating,coltrane,colitis,unincorporated,liang,blogged,cryogenic,antispam,homologous,hassles,symptomatic,rtc,trademanager,bipartisan,rhodium,exchanger,preseason,januar,bumble,intimidating,randi,placenta,abbotsford,upn,dulles,brainstorming,wea,dougherty,sarcoma,sniffer,rotorua,bahasa,iona,bioscience,tricia,residuals,gforge,copd,homie,leesburg,afm,xref,flashpoint,mobygames,cortland,mailers,tented,nicholls,skew,mahoney,infoplease,budd,acn,hollands,muni,modernism,elizabethtown,dunhill,eee,didn,guidebooks,scotts,wye,wsj,biosciences,macgregor,atms,habakkuk,depaul,binge,cyst,hexadecimal,scissor,progra,smyth,mott,jazzy,headboard,diflucan,bronson,standardised,cations,cics,ecole,centos,hysterectomy,housings,wrc,movado,mcdonough,krista,pharmacokinetics,chantal,morristown,riverview,loopback,torsion,ultrastructure,lucida,leftover,sykes,anecdotal,rheims,integrators,unlv,arboretum,sharealike,lowepro,erc,ischemic,illustrators,plugging,macbook,bjp,arent,vignette,qf,homebrew,altoona,pheromone,fireball,decorator,franken,netpbm,antalya,harmonious,nne,recordkeeping,modernisation,myx,sdr,muskegon,daley,modality,liberalisation,utilise,arturo,appellee,granules,multidimensional,rollout,homegrown,datamonitor,reinforces,dirham,leahy,myc,esophageal,kira,approximations,forzieri,intermediates,kgs,albumin,grantees,loveland,maloney,sativa,paramedic,trademarked,edgewood,stressing,potable,limpopo,intensities,oncogene,antidepressant,ballpark,powys,orca,mascara,proline,molina,nema,wipers,snoopy,informationen,esf,riverdale,unleash,juelz,bls,noarch,koss,captioned,paq,summarizing,ucsd,gleason,baritone,independant,chlamydia,relativistic,rotors,driscoll,andalucia,mulher,bagels,subliminal,insecticide,segal,spline,undisclosed,noni,letterman,almeria,bryson,wtb,towson,htaccess,malayalam,crue,loo,pinoy,pallets,uplink,sheboygan,terrence,ghc,gateshead,probationary,abducted,warlock,breakup,fiche,juror,bowden,goggle,metabolites,brainstorm,smu,ahl,bateman,egcs,chirac,museo,coffeehouse,scitech,gcn,trolling,elmore,grads,lz,andi,localpref,kayla,ccl,smeg,donut,libido,fuselage,diabetics,ballerina,crp,morgantown,paseo,ptsd,redheads,curran,diam,ragnarok,hkd,summarised,jx,caitlin,conscientious,bandai,hobs,eft,endometriosis,cushioning,mcneil,belvedere,nar,acetyl,boomer,perinatal,idm,automake,multichannel,petr,daredevil,corcoran,mrp,holliday,daimlerchrysler,bowes,mcgowan,agfa,mep,goss,mulch,jvm,harwood,ranma,marinas,mobipocket,streptococcus,murcia,landfills,mcknight,edd,baud,mcfarland,designline,undies,prepay,kodiak,printout,nonresident,marysville,curso,palmos,dorsey,roo,soulful,websearch,infotrac,mpgs,fouls,openssh,bravenet,etsi,serendipity,tq,sequentially,yogi,landslide,howtos,skool,evolves,iberia,anakin,duffel,goodrich,subfamily,perennials,ary,matchmaker,sagittarius,locates,dysfunctional,maastricht,bulletproof,mcr,uga,stenosis,chg,recentchanges,abrasion,eindhoven,opportunistic,pcl,analogs,bba,hillcrest,cantor,econometric,trafford,opie,cro,elkhart,ringers,diced,fairgrounds,cuyahoga,plt,cartons,mustangs,enc,addons,wstrict,gow,pharmacological,headwear,paediatric,genitals,hendricks,ivr,telemedicine,judi,icom,academically,chilton,cbo,amaya,flickrblog,fulbright,foaf,cllr,xh,fulltext,centrum,tecra,kinks,unisys,preschools,mcallen,contoured,aberdeenshire,icm,schenectady,schematics,dojo,eserver,nin,interfacing,borrowings,hrt,heparin,universiteit,hardcopy,connective,nihon,oso,adkins,dunlap,nsc,irr,clonazepam,wikiname,gaithersburg,biophysics,chromatin,mathis,bulova,roxanne,fca,drg,refurb,wasteland,plotter,findlay,cymraeg,alc,meek,phonebook,doodle,arb,wabash,chronologically,wms,whitfield,mchenry,eide,assy,dusseldorf,mmol,shabbat,nclb,accommodates,cmi,stacker,msf,touchdowns,plasmas,barbell,awk,bibs,sneaky,smarts,lankan,synthetase,lightwave,alignments,coached,jac,framingham,opensource,restroom,videography,lcr,spatially,doanh,preprocessor,cohn,aon,marginally,ocs,bak,cavalli,ddc,grunge,invoicing,bigtits,carney,braintree,southside,vca,flipped,cabrera,mindy,surfaced,glam,cowgirl,loginlogin,mtr,nakamura,layoffs,matures,cty,apm,iggy,margarine,sneaker,glycoprotein,gcs,queued,sab,hydroxide,hanley,cellulite,hwang,mtd,mcqueen,passat,fluff,shifter,cartography,firstprevious,vito,predicates,bcl,douay,zeitgeist,nickelodeon,dru,apar,tending,hernia,preisvergleich,britton,stabilizing,socom,wsis,anil,midsize,pullover,lpn,hoodwinked,photoes,beastie,yucca,harvester,emmett,shay,obstructive,pacman,retroactive,briefed,bebe,krusell,clickz,kermit,gizmo,atherosclerosis,demography,migraines,wallingford,newborns,ljubljana,restarted,rnc,meow,thayer,kilograms,packager,populate,pembrokeshire,arcane,impractical,tcg,decentralization,honeymoons,authoritarian,alu,judaica,tropicana,tyan,cardholder,peavey,gothenburg,geocaching,ident,fluoxetine,tipton,teva,lsa,effortlessly,failover,cysts,primetime,kenosha,kokomo,penney,snorkel,amin,iridium,dwyer,conserving,toppers,cfg,tvc,alternator,nysgrc,underwriter,springhill,panhandle,joann,isoform,borden,bombed,elt,halton,guaranteeing,fasta,gonzaga,boobies,nadine,breitling,nutr,ingersoll,sandia,pacs,azur,helms,beos,srcdir,sherpa,tuff,ligands,smalltalk,sorghum,nucleotides,mmv,ebi,sbd,lmao,enhancers,collaborated,produ,lila,slotted,nnw,fila,decking,boz,accelerators,howstuffworks,neighbourhoods,michal,rab,hideaway,dwayne,coda,cyanide,kostenlose,grotesk,marek,interlibrary,provenance,sra,sog,zinkle,fanfare,mapper,boyce,mlk,dystrophy,infomation,footballs,emailemail,bathurst,fof,duracell,feinstein,magnavox,evra,servlets,tss,neill,epithelium,thc,webbing,bef,jaya,mame,ppe,emusic,tso,epp,glencoe,untested,overviews,affleck,flinders,informationhide,hearst,verifies,reverb,kays,commuters,rcp,welivetogether,crit,sdm,durbin,riken,canceling,brookhaven,gauss,artistry,phpnuke,falkirk,pitts,dtp,kwon,rubric,headlamp,operand,kristi,yasmin,gnl,acdbvertex,illini,macho,ningbo,staphylococcus,busting,foss,gfp,yhoo,sloane,wooster,delong,mdi,nilsson,substring,gac,smelly,gallatin,hangar,ephemera,heli,choo,testicular,miramar,wearable,carling,buildup,weaponry,swann,lian,landline,entrees,corpora,priv,geeklog,antiviral,profiler,lodi,minimalist,wolverines,bbcode,protagonist,rata,freephone,plm,raytheon,refseq,kingfisher,numark,moline,esac,takers,gts,amana,worldcom,hiroyuki,procter,pragma,winkler,walleye,icf,bagel,asbury,alpharetta,syncmaster,wists,xfx,wicklow,tsr,baer,yf,cmr,chil,leftfield,lettings,walkway,coos,petrochemical,fia,chula,zalman,carer,humankind,cmms,hawley,inverters,mccormack,pdu,faceplates,yeats,motorhomes,cie,icts,mcmurray,zucchini,lanai,pwc,chiral,fermi,newsreader,multiculturalism,cuddly,listinfo,shp,primedia,chl,estrada,pricey,shekel,apn,diocesan,readout,clarifies,klm,dimes,revlon,dtr,cranky,paparazzi,zheng,merida,bambi,interceptor,rox,jamster,noritake,banding,nonstick,origami,marketwatch,yeti,arf,umbilical,linz,donates,foursome,lawrenceville,azul,springdale,moisturizing,loeb,isr,huston,gatos,disqualification,suunto,angiotensin,spitfire,wfp,realnetworks,summation,plame,querying,gpc,autonomic,fq,pathname,novartis,ufos,manatee,qh,restructure,larval,zeu,socal,resettlement,mistakenly,radiative,drapes,intimately,koreans,realy,womans,groin,greenway,spamassassin,mata,gigagalleries,algerian,frat,egullet,electrics,joni,stencils,reinventing,reqs,latte,shaolin,shopped,beattie,hrm,hypnotherapy,muppet,abp,checkpoints,tpa,derechos,pieter,timesselect,viacom,strcmp,kardon,sideshow,classifier,westbrook,repro,moser,studi,sdf,colonialism,supermicro,scorers,sitcom,pastries,aldo,azim,authorizations,holsters,neuropathy,backorder,humphreys,metroid,vcs,nikkor,mcf,jacobsen,conjugated,lcc,unethical,vacances,whos,asr,alphanumeric,grumpy,fixedhf,holm,sirens,lfs,benelux,caters,slp,prasad,kirkpatrick,jamahiriya,tol,coagulation,girly,bnp,archdiocese,orbiter,edgewater,lem,keyless,repatriation,tortilla,dissociation,industrie,watercolour,ucb,waite,madsen,mnh,opticians,nop,newmap,mse,bottleneck,regressions,linton,sio,buckeyes,bodywork,applique,jewell,gef,hornby,redefined,empowers,informix,tots,goalkeeper,startseite,blurb,feedburner,dominatrix,norcross,compiles,bancorp,encoders,pmp,boomerang,temecula,ghg,structurally,caveats,homeownership,birdie,disseminating,lanyard,horst,interlock,pagers,esophagus,ocz,sexshow,jackpots,optometrists,zak,krueger,hickey,erode,unlicensed,termite,ibuprofen,drugstore,audiology,gannon,integrals,fremantle,lysine,sizzling,macroeconomics,tors,thule,gtx,eeprom,kaleidoscope,dmitry,thawte,busters,officemax,absorber,nessus,imager,cebu,kannada,sailboat,hectare,netball,furl,holographic,defra,salaam,respirator,countertop,gla,installments,hogg,partying,weatherford,sav,exited,crispy,coffees,knowhere,sequin,bendigo,unis,bandwagon,janssen,myst,polymerization,byval,nozzles,labview,snitz,rpi,hcc,unbelievably,pasting,butyl,ppd,forested,unrivaled,roadways,varna,maidenhead,almanacs,gfx,randomness,middlebury,muon,ringo,svr,caliper,lmb,woolf,innovators,anode,microprocessors,tps,stk,siting,misinformation,aneurysm,closeups,kinsey,prp,cnbc,eroded,tris,lonnie,hartlepool,bol,alastair,agr,fafsa,javac,uclibc,fodor,afrikaanse,colognes,contestant,snell,prescreened,believable,anesthesiology,elmhurst,misha,melatonin,bongo,rmb,mdf,terr,xw,bloke,avc,oxnard,cess,cedex,electrochemical,brevard,brw,brenner,slalom,waterhouse,calif,acces,aquatics,cari,lurker,buffett,chews,hoodies,phony,vila,fsf,gmake,nikko,grasslands,monolithic,polifoniczne,bugtraq,cpage,engr,subcontract,prophylaxis,texinfo,ings,cotswold,guillermo,unstructured,boop,hitman,tla,mercier,restated,nukes,duplicator,mehta,macomb,fundamentalism,australasian,isk,rerun,moda,segmented,cranberries,leas,pleated,handshake,digests,innovate,goode,erisa,jeb,dismantling,ferrell,hellometro,leavenworth,snowmobiling,fora,fdr,gaba,vfs,dlc,byers,codon,webnotify,sfr,pylori,loomis,acidity,gershwin,formaldehyde,welder,cyp,kendra,switcher,ocaml,goldie,mab,gooshing,mockingbird,ponte,xlt,hogwarts,juicer,lloyds,echelon,gabba,arranger,umbro,metallurgy,baa,neq,liteon,queuing,vsize,shiite,valuing,argon,coheed,hooray,flightplan,carefree,souza,kershaw,millar,biotin,salter,testicles,morph,econometrics,remo,msec,marconi,ote,receiverdvb,expatriate,tantra,codified,ncs,overlays,thingy,comforters,conservatories,ruskin,dpf,cyndi,germination,lipoprotein,ayurvedic,planetarium,tribeca,bihar,keenan,discos,eastbourne,robles,gianni,dxf,homebuyers,nogroup,freescale,wiccan,sess,merrimack,groton,billboards,searcher,uttar,mailinglist,metacrawler,priser,osceola,bioterrorism,tourmaline,leatherman,microns,unifying,anaesthesia,videogame,aws,dtc,chc,intranets,escalating,bluebird,iucn,gls,mahjong,interstellar,kenton,underestimated,groupsex,loudspeakers,flexi,vst,junctions,redman,transferase,bvlgari,hampden,nls,selby,wausau,stoppers,snowshoeing,uppercase,cirrhosis,publib,metrology,connexion,stoneware,moncton,traci,krumble,pathogenic,rasmus,raritan,riverfront,humanist,usefull,pompano,skewed,cleary,nepa,ludacris,sequenced,xiao,teaming,flatshare,aromas,positional,alesis,glycine,vee,breakthroughs,cashback,throwback,charlestown,nexrad,gestation,powering,magee,osnews,logins,sadism,emb,muncie,panoramas,plenum,ato,aotearoa,foro,hydrolysis,flac,labia,immunizations,existential,umc,sweaty,segond,addis,beasley,breached,rounder,rectum,nha,perched,jah,dsr,lta,videoconferencing,cytoplasm,makin,sedimentary,laurier,aachen,wnd,olney,massimo,chlorophyll,scop,shipyard,centering,manley,sunroof,dvorak,etch,answerer,briefcases,gwent,bogart,amit,kaufen,untranslated,raffles,reconnect,teeny,benthic,mcmanus,infotech,carlin,lithograph,ure,stoner,repost,iras,resurfacing,kelli,spitzer,jae,dunne,hyperbolic,pstn,bisque,anzeigen,standoff,westbury,solano,kailua,acoustical,photovoltaic,orchestras,redline,reggaeton,qstring,declan,tama,wank,virol,iy,solvers,linuxworld,canadiens,rockabilly,smokin,tumours,loudspeaker,handicapping,tatu,evangelion,excretion,breakage,negra,horsham,jing,petro,notations,midgets,comprar,homemaker,neverwinter,ddt,categorize,geophys,loa,tga,foreskin,jornada,inetpub,premierguide,reflexology,sophos,helphelp,foundries,registrants,sweats,atvs,capstone,adecco,sensei,publicized,transessuale,federalist,objectweb,portrays,postgres,fesseln,hidalgo,prosthetic,kristine,microfiche,dce,watergate,setbacks,karan,cdata,kfc,grandview,amerisuites,aural,gatekeeper,heinemann,decommissioning,nq,gestion,thermodynamic,patrice,profiled,disambiguation,mmmm,bittersweet,mul,gustavo,isolating,xine,bigfoot,nrw,mycobacterium,yamada,coldwater,whitehouse,cultivars,santorum,mugabe,margo,rundown,carbondale,gizmos,effingham,beastility,agus,ucd,dowling,mitac,steels,oakdale,nda,mystique,cortislim,oes,disp,loaders,trouser,oai,hoboken,sepia,differentials,sabi,dancehall,sarajevo,brava,underscores,roadshow,fbo,sabah,russel,nephrology,squamous,mvn,wz,malden,mita,orissa,ise,vfr,chianti,minsk,coffey,domestically,qantas,brandi,artefacts,solihull,tation,tchaikovsky,refineries,ronan,pricewaterhousecoopers,swimsuits,automates,wylie,whomever,sidelines,shaffer,toolbars,preservatives,wagga,kenai,bobs,mortensen,unplanned,characterisation,ppa,mip,peering,fopen,vgn,wmissing,csn,rudd,bourke,pelvis,goodmans,potluck,ioffer,cial,davidoff,creamer,tsc,gfs,contax,columbine,portables,fledged,aquinas,kidz,edonkey,hourglass,pagetop,paloma,gunmen,disables,ssangyong,antiretroviral,moschino,hoyt,okc,lockport,pittsfield,pollack,hoyle,arousal,inhibiting,reo,mammary,trampolines,hillman,trimmers,bridgestone,muvo,wcities,boi,diddy,conveyancing,apl,echinacea,rok,phish,frigidaire,oxo,hah,halibut,penrith,brno,silverware,teoma,rcra,mlo,ideologies,feminists,fff,sculpted,uq,rta,embo,rollin,contraindications,einai,ssrn,oup,rebuttal,underside,alumnus,archeology,preise,ontologies,fenders,frisbee,hmmmm,tipo,hyperactivity,seagull,nanotubes,polos,bonaire,hehehe,fim,reece,elsif,spinners,annealing,maximizes,pld,ctp,eurasia,dickey,ako,carpeting,yorkers,ltte,eukaryotic,bexley,sions,bremer,marisa,frustrations,delgado,resection,dioxin,islamist,brant,hss,kubrick,fft,touchscreen,layoff,facelift,decoded,gry,shitty,dodger,ihs,lessig,zaf,revell,sched,rpgs,euphoria,acuity,popper,lockdown,nsp,transmittal,heatsink,assholes,hayman,novi,equilibria,requester,allrecipes,serialized,hangzhou,bjork,stringer,nanjing,milligrams,jab,snohomish,strathclyde,yoko,intramural,curated,finalised,tania,cdd,gund,tascam,noam,hardstyle,arun,cga,waistband,fibroblasts,leandro,metastasis,userpics,greenbelt,leuven,printk,reachable,pss,radioactivity,caine,gyfer,boch,howdy,cocksucking,marlon,timmy,liga,gregorian,reorder,aerosols,archeological,logarithmic,sexape,robby,completions,yearning,transporters,sandalwood,megs,idp,rapidshare,tsb,omnibook,gamepro,bca,decontamination,tamiya,euclidean,salina,woodford,formalism,aching,nbs,audigy,libexec,eyepiece,bibl,bobcat,freehand,guo,ltsn,itil,nugent,esr,sce,killeen,jamming,applicator,icrc,mezzanine,meghan,cupertino,logfile,zed,humidifier,padilla,susanne,collapses,yung,longwood,krw,mainstay,descr,dtm,atcc,tasman,accessoires,mucosa,dachshund,zf,syringes,breakpoint,telus,stoney,nepali,regimens,wok,canola,slicing,reproducible,experi,skydiving,sof,bogota,discogs,datagram,videographers,cag,nicks,platelets,trannies,pamper,nineties,bracknell,disinfection,perfusion,postseason,tigerdirect,smoothie,punisher,tabbed,tcu,alene,lismore,coquitlam,auctioneers,somethin,daniela,dials,enhydra,kyrgyz,iia,bianchi,iata,zim,buscador,roadrunner,blackhawks,jsr,misfits,quiksilver,nwn,sqlite,siu,tarantino,addi,jkt,buyout,replays,wcs,adrenergic,bottling,caldera,baseman,botanicals,techie,farr,vtech,donde,beyer,versiontracker,pse,hashcode,tradeshow,lewisville,aster,transparencies,bloomingdale,northrop,revo,overkill,nlrb,lazio,enr,diag,chiapas,freedict,disponible,morissette,effortless,hydroelectric,cranial,hindsight,orientated,abrasives,fpc,brl,vpns,feingold,thunderbirds,dha,wot,geog,harrah,wxga,nmfs,boynton,cashing,spousal,abusers,twinlab,vick,aml,sodimm,copley,mallard,twikipreferences,airman,configurator,clc,neurobiology,diamante,dreamworks,corsets,dowd,escrituras,bureaucrats,songtext,wham,phpgroupware,cyclin,conyers,youll,kowloon,fairytale,pickens,bybel,mln,wres,barm,amplitudes,nmap,nvq,ocd,ryu,microcontroller,premiered,institutionalized,hamm,gyno,bhopal,circulatory,centerline,chairmen,guerlain,pedo,hussain,portlet,proscar,histone,opioid,totalling,pyobject,translational,lehmann,keaton,elkins,jamison,interstitial,inest,tanzanite,helical,redlands,sagradas,fondue,windscreen,adderall,othello,supersonic,pocatello,maniacs,sysadmin,foothill,earmarked,highspeed,uncheck,rapes,vlad,cif,photosynthesis,junit,remotes,epo,mcm,ucf,nacl,sfa,empirically,dfes,addon,pon,feelin,callmanager,deteriorating,statenvertaling,cypriot,entert,fascia,woburn,jalan,fryers,cally,layering,geriatrics,picky,conley,boces,barth,lvm,mooring,mcdonell,expats,bizarr,loadavg,perla,micheal,bok,friendster,endoscopy,msx,buzzwords,lumen,airwaves,jagger,setups,inman,schindler,limewire,drawstring,midrange,frodo,superpower,recliner,trisha,trium,utm,grimsby,wyeth,urs,kds,adjuster,impeccable,shari,marketplaces,tefl,sudo,technische,characterizing,gawker,gagging,cyclist,atg,generics,richey,magneto,crunchy,teletext,drwxrwxr,crabtree,underfull,hemscott,webmasterworld,objc,musicmatch,sealant,timberwolves,harriers,shangri,robo,roto,mnem,nnn,aidan,fidel,executables,concertos,vob,extracurricular,haverhill,squirters,hbp,tonal,atr,ashtray,gpu,payton,psychoanalysis,hesitant,poco,nedstat,rcmp,microchip,eroticos,fea,kors,susquehanna,userinfo,modulo,antler,bangladeshi,desking,nikolai,nuys,ludhiana,rdr,spankings,chatrooms,pretreatment,brittney,jer,tianjin,qj,winnebago,mcfadden,notecards,tix,murfreesboro,quaternary,subtracted,tropez,mcgovern,olivetti,hikers,vivaldi,cuties,lnb,gilchrist,preheat,bernadette,microdrive,rookies,overton,potpourri,neiman,seb,sigs,jarhead,momo,uzbek,ttt,dubya,signatory,cim,energized,brite,shs,minimums,needlepoint,deng,camargo,oems,bolle,webrings,ehrlich,azz,firefighting,icalendar,disallow,exch,mclachlan,zaragoza,brixton,efi,kilo,tcmseq,moisturizer,suonerie,remanded,empresa,shoebox,disagrees,lowdown,trove,filer,apologetics,englisch,texarkana,threonine,metart,siti,encephalitis,tomatometer,arias,kenner,anamorphic,subspace,cleats,ifp,circ,pressured,peppermill,sml,clarifications,zionism,pti,retin,klicken,disjoint,ema,openldap,koenig,carats,hijacked,tch,burlingame,checkbook,candice,coworkers,eno,karla,cus,gio,statm,haifa,reincarnation,budweiser,heuristics,tunisian,hologram,macular,eral,refinishing,chia,celestron,leyland,reloading,hombre,munch,basf,rolleyes,bidirectional,ahhh,chica,starfish,kurdistan,boro,heartbreak,preps,irina,mylar,congestive,dmd,schilling,twikivariables,battleground,tectonic,equate,corbis,inflatables,naacp,pathologist,minnetonka,langston,memoriam,underserved,rectifi,elmwood,fukuoka,glbt,rsi,parr,pob,ods,welles,gujarati,sportsline,leno,healthwise,vrml,sida,azres,sapporo,jscript,predictability,pajama,paddlesports,adenocarcinoma,toning,gestational,kravitz,ptcldy,snowball,adl,travelogues,crl,zocor,ecotourism,leadtek,hkcu,morehead,niro,fueling,orthopaedics,crayons,tikes,revamped,olap,curfew,hamlin,brandeis,bree,stylistic,corneal,beckman,crusher,riva,prefs,militaria,marshfield,elo,swank,matisse,villeroy,proactively,mccarty,zas,acdbcircle,horney,modeler,progressives,grosvenor,linger,creationism,dork,claritin,psychosis,fei,firsthand,gigi,cranston,hayley,ags,muted,turbidity,mountable,kiki,vz,avondale,oceanographic,zzz,tsg,epl,nonzero,iwork,scavenger,touted,candace,kava,kronos,adjuvant,tyneside,travolta,sari,preventable,bumpy,aleph,lga,conroy,mastermind,vaccinated,coburn,rawk,acceptability,stryker,surcharges,noticeboard,chapin,permutation,colpo,ucsc,mulligan,fod,ketchup,alimony,tng,viscous,skk,cmm,unambiguous,emphysema,epistemology,grantham,avila,solana,toolkits,soloist,rejuvenation,chn,jse,anaconda,bsnl,carfax,leveraged,wega,scanjet,ibc,meng,burley,efa,freesex,plasmids,steffen,xz,woofer,lada,hinckley,millimeter,snape,rollercoaster,tdc,connery,newswatch,roundups,keylogger,parka,scouse,unists,timo,hea,spock,ffs,bmj,farrar,decompression,draco,mika,galena,msft,inactivation,metafilter,mbna,lymphatic,ofc,gian,berks,hdv,wirral,boxset,ashrae,ilford,allman,kroon,gmo,sdc,builtin,lisboa,coc,rollback,westgate,thd,bobo,crockpot,weaning,snowshoe,hijackthis,backside,fetchmail,candlewood,angelfire,ucsf,painkiller,nutty,fenway,restrooms,myeloma,scallops,osteopathic,vividly,rmit,countermeasures,ofertas,gwinnett,dirs,duvall,wildflower,stackable,greensburg,barebones,merino,stooges,chatsworth,jello,mtime,barium,toric,looting,kiefer,agg,mauro,shearer,decca,hydrophobic,unsw,millard,btn,terraserver,returnable,ohs,resuscitation,cancelling,rns,nrg,stratification,oliveira,cahill,grumman,webdav,adagio,sunburst,ayumi,sev,zt,bela,swt,startups,ranting,udaipur,tonya,erupted,ghostscript,meltdown,rainwater,gellar,alm,vy,cnrs,redefining,shar,vesicles,piccolo,scalia,resizing,showrooms,verifiable,lobo,nunn,boyds,havens,bacterium,zb,sideline,bushing,ligament,penpals,translocation,costco,serialization,wst,playgrounds,universidade,fong,hbs,zips,ntot,eigenvalue,conductance,albemarle,mudd,dvs,niels,explodes,lindy,coimbatore,panzer,audioscrobbler,keri,soviets,tweeter,poncho,sids,faerie,oooh,oceana,ayn,wakeboarding,stinger,yuba,chipsets,anastacia,collapsing,yaoi,gwyneth,kuwaiti,jalbum,storageworks,duplicators,cubicle,rana,winfrey,avanti,iop,blige,papaya,auger,macclesfield,mongoose,crossfade,instrumentals,iconic,sulfide,dawg,mahler,maurer,auschwitz,gambit,accom,stb,uxbridge,baan,baumatic,slt,landis,fredrick,jogger,occlusion,jz,charlize,covent,reinvestment,ssdasdas,chatterbox,neutrons,fss,silo,polystyrene,amon,jodhpur,intelligencer,dundas,netmag,molokai,pluralism,kobayashi,tetanus,bcd,neuromuscular,fkq,caribe,iit,nphase,multifamily,timres,nrcs,farnham,coors,execs,hauser,citeseer,hiker,manuf,strategist,electroclash,outlays,ktm,zloty,osmosis,mojave,renova,hsp,soothe,mariposa,bir,advancements,franck,bock,fsm,leary,slurry,ker,dte,soulmates,marissa,sga,beretta,chiropractor,vibrational,sandusky,obsidian,dressers,winger,endeavours,argonne,runnin,bfi,gaye,colfax,logics,camedia,ctd,optimise,ernesto,voeg,adamson,coeds,subdirectories,asain,guilder,comparator,sealer,sleazy,onstage,todas,waterproofing,devlin,riel,pinky,lewisham,mints,wdm,avocent,invertebrate,brea,rebellious,carnitine,trib,webex,pairings,guesthouses,yikes,exorcism,grilles,mim,cultivar,orson,teammate,idn,hrvatska,sequencer,grandparent,demonic,wonka,prezzo,opto,collaboratively,oberlin,nrl,gorda,newburgh,alcoa,mums,facs,lossless,mmp,beasteality,imbalances,andean,superconducting,spectroscopic,armpit,dect,mew,worsening,symp,igf,metalworking,groundhog,clomid,ginkgo,decedent,dimethyl,retval,openurl,baku,telescopic,vespa,phasing,lactate,poughkeepsie,dodson,monorail,bookworm,enero,sabbatical,ced,skeptic,backlit,smr,kentech,lamette,gita,itm,ath,hennepin,foucault,onshore,acls,pwm,florals,millimeters,krauss,asca,wicks,pathologists,fanfiction,pathol,toxics,ipcc,kinesiology,potions,tern,squirts,delmar,storybook,grenades,rls,etrex,contrasted,opting,hauled,taupe,renta,grd,odeo,jiangsu,osd,hookup,myron,atb,ctg,doreen,altima,keepsakes,seawater,ecko,zarqawi,contenders,conveyors,accenture,iagora,haier,crutchfield,fulfills,rota,kelso,petaluma,ifrs,servicios,printmaking,miata,julianne,dotnet,reconstructive,metcalf,vicksburg,gri,bookshelves,supermodels,glycerol,wiseman,sliders,carhartt,redford,itemized,rsp,defamatory,eir,matheson,amalfi,currentversion,renminbi,yap,mangas,bottlenecks,pyrex,huffington,sculpting,sedans,dpt,hoobastank,launchers,finishers,psychologically,ssm,schaeffer,northside,interdependence,microfinance,droplets,inducted,fos,uninitialized,conor,repercussions,woking,longmont,medion,monika,hydrological,runes,hobbyhuren,ents,ortega,breweries,landon,burrell,forecaster,quickie,stephane,parabolic,boreal,bankroll,bioassay,martinsville,ldem,interventional,teensex,tabulation,joop,creampies,trier,arbitrage,dogwood,convergent,enviar,hutt,majoring,techwr,glitches,dugg,qwerty,equivalency,rela,sedation,quik,rosemont,xk,harmonics,devi,highschool,orvis,centimeters,lavatory,destructor,accelerates,opts,relocations,wilco,tricare,beckley,ryde,januari,kee,blacksburg,anova,midfielder,tornadoes,nand,ladd,docklands,mgs,tanzanian,padi,msl,clamav,megastore,xander,eon,winelands,syllabi,elif,lorne,noida,visalia,mykonos,wcc,krieger,safeway,sheri,prosite,wikis,mozzarella,glenda,uta,dqg,waterville,yonkers,republish,endoscopic,dilbert,vfd,transen,konqueror,feliz,biscayne,sexocean,debconf,disproportionately,taskbar,libero,synchrotron,tet,memorize,marquez,williston,muppets,volumetric,umpires,shuttles,jumpstart,motogp,hyperplasia,nber,donahue,parodies,prado,legit,humax,scrapped,ingo,dillard,orphanage,disruptions,erasure,preamp,pde,mcallister,ziegler,loewe,dowload,msb,iptv,bondi,freelancer,felton,dpp,umax,radars,dmg,materiel,megadeth,cooperstown,sdh,staffers,mawr,daw,comptia,teddies,upsilon,sizable,coenzyme,enzo,afterlife,mather,ncurses,harddrive,cml,counterpoint,batesville,skywalker,franke,takashi,wristband,jimenez,esque,chiller,barra,ales,worthing,zna,jonathon,psr,sump,breadcrumb,sucrose,amro,portege,neogeo,renewables,filipina,sgs,mbas,ihop,cortisol,banshee,supersedes,bullseye,prezzi,rbs,pacino,cajon,downloader,seabrook,leif,jrr,iwc,taranaki,chronically,merkel,megaman,setq,preschoolers,vcl,unenforceable,lto,busi,noone,rotc,fisheye,oaxaca,gerontology,microsano,predation,gaas,kilimanjaro,exacerbated,emr,infestation,yarra,volker,linearity,huey,aerials,stylist,porosity,schofield,alam,sprayer,tirol,sfu,gliders,corby,wenatchee,prognostic,unregulated,mult,pittman,bbl,hadith,ots,kdelibs,jayhawks,teesside,rav,lobos,reportable,dickerson,carotene,filesystems,enrollees,cena,sanjay,compaction,juicers,gemm,methionine,lala,toplist,holyoke,dewpoint,rdiff,osp,delimiter,forsaken,richfield,hangout,striptease,jhi,amf,sonicwall,burgeoning,unicast,amnesia,cipro,cherie,klip,libxt,menswear,inthevip,wrenches,actuate,capote,cvd,flexeril,molar,databank,montevideo,sunglass,lhs,kassel,followings,shipley,accretion,asha,bullpen,mamas,schreiber,gnc,dysplasia,freeroll,efl,igs,utopian,kota,iden,dil,wia,sosa,negril,hyped,epidermal,autopilot,garza,decrypt,batik,crain,subd,utilising,dsu,fermanagh,idr,interoperable,mam,delano,sonja,plex,compat,replaceable,forint,nudism,netcom,formulary,irvin,galery,hounslow,fosamax,striping,excavating,recoveries,mrsa,mainstreaming,awt,hola,hoody,dci,geri,seasonings,marcelo,pantech,fcp,scaricare,roxbury,clamping,whiplash,dildoes,takeoff,wiggle,truely,henna,cartesian,gamezone,yank,llewellyn,shag,asymmetrical,universitat,williamstown,trolleys,interlocking,doped,headband,internetweek,outperform,ncp,harmonization,hamid,differentiating,hitters,konrad,wickets,restarting,bcm,xilinx,wideband,tmobile,rocha,pbox,aea,stevenage,moorhead,directorio,restructured,aerodynamic,hopewell,evaluative,zuma,annuaire,subtracting,bram,kuna,logbook,xor,louth,pict,truetones,gabor,rotates,ezcontentobjecttreenode,leanne,bgcolor,rescues,wim,corsa,causality,tiling,ethnographic,waffles,doubly,fandango,powermac,catalysis,annexes,lisle,pushj,naylor,wrongdoing,paducah,gunter,iranians,aat,commandos,abcd,repeatable,deh,epiphone,scf,weekender,milner,schott,welders,semifinals,quantization,surfacing,vegetarians,hagerstown,polyclonal,transponder,gottlieb,withdrawl,geneid,tierney,glock,guatemalan,iguana,glaring,cifras,salman,choker,ecologically,scoreboards,mohr,dpa,spaceship,digimax,moremi,btc,technologie,tunica,powerbuilder,aorta,unconfirmed,dimitri,degenerative,delve,torrey,celica,beloit,nir,substr,lowrance,ballantine,crimp,bss,mousepad,umbria,oregano,rashid,microtek,geary,boaters,soyo,visualisation,brianna,handlebars,weightloss,interconnects,playtime,enrollments,gyllenhaal,criticality,geoscience,mhonarc,golive,deville,meh,moseley,spacers,unido,deferral,hersh,hilliard,vlsi,keegan,feces,uy,bute,activewear,transcriptions,metered,bugfixes,cami,interna,quintessential,babycenter,gardena,cultura,stockpile,psychics,pediatr,williamsport,westlaw,hetero,meteorite,extruded,lakh,starware,phage,laszlo,hernando,vogt,wolfpack,lags,eldridge,wray,hajj,edirectory,longstanding,knitwear,apocalyptic,fatties,darmstadt,mco,ucsb,fillings,marti,aberystwyth,infineon,fdd,inflows,tmpl,estuarine,lita,nubuck,socialization,estock,mbit,valign,caving,vec,alkyl,artichoke,leasehold,directgov,ubiquitin,fuerteventura,hairdressing,dhhs,fecha,nio,wsi,quigley,yellowpages,pretec,biomechanics,microcomputer,discipleship,hella,womack,magnifier,acdbtext,pitney,esters,haan,ofcom,ablation,nutcracker,dosages,prn,zm,dfs,multiplexing,indentation,hazmat,eac,dalhousie,ahem,retardant,shankar,overheads,southfield,iee,gnustep,spm,azkaban,dermal,metar,sizeable,aftershave,lahaina,earners,tenderloin,dji,ipp,chee,hamburgers,oliva,gaultier,cios,margie,nms,wandsworth,caltech,stapleton,gsc,francophone,sqm,xoxo,coord,mocking,nri,serengeti,raccoon,shrinkage,prd,uris,hamsters,codphentermine,thrashers,calibrate,gilmour,rambo,cleburne,serrano,niacin,strawberrynet,wesson,ormond,oxycontin,bibliographical,wynne,glyph,nagios,marinated,marko,sfas,genotypes,conde,alford,madurai,evacuees,urbanization,kilgore,unwired,elseif,pneumoniae,skyscraper,ebags,gnn,tooled,intermec,charlottetown,submersible,condensate,matchup,undefeated,krs,movin,kino,vidio,photographing,pocono,footjobs,trackers,kinkade,unify,dissident,sperry,iframe,tur,commu,xterm,swapped,stent,vermillion,angiography,areaconnect,brockton,daz,abcdefghijklmnopqrstuvwxyz,dunst,livonia,specialisation,nsi,walgreens,plasticity,crux,nhra,armband,leamington,mosley,iga,stemmed,appleby,grayscale,labonte,lek,cartoonist,flotation,geol,deterrence,cardin,aardvark,cosmological,dothan,isotopic,hadleionov,langford,ssg,understated,obit,unt,randomised,amphetamine,shia,grout,reba,wrx,rsgi,bharat,sls,slg,kilometre,tristar,gippsland,pastels,stallions,paramedics,fishbase,rolla,curie,bootable,skit,sourcewatch,decimals,boe,catania,countertops,paola,elwood,hocking,prerelease,seqtype,femoral,anz,visceral,fructose,edta,silverstein,broderick,zooming,hamasaki,keswick,extinguisher,subpoenas,spiele,rincon,pll,donny,vitale,fledgling,boinc,traversal,bagder,erick,kcal,midfield,hypersensitivity,redshift,glaser,sado,cusco,imagemagick,uic,fernandes,prosthesis,jsc,omron,alberghi,electricals,kelp,taker,placeholder,moulton,yall,npdes,massages,catalist,metarating,tupelo,syriana,batt,dbms,asb,videotapes,backseat,kauffman,manipulations,accomodate,tioga,aylesbury,submenu,kwacha,chondroitin,sandpiper,vamp,overarching,janes,selectors,condoleezza,internationals,estuaries,schulze,osti,paleontology,emporio,stepper,reykjavik,waterskiing,renfrewshire,superheroes,marg,leftovers,mariano,bangboat,guestrooms,urethane,stoughton,paphos,sprinklers,accum,bms,datsun,sainsbury,chefmoz,helo,yvette,procmail,midsole,ayuda,geochemistry,reflectivity,moog,anth,durand,linea,butterworth,datagrid,metetra,rodrigues,apprenticeships,oncol,dop,asymptomatic,retails,offroad,simpletech,gandalf,minot,evidentiary,kpa,whelan,synthesize,doan,localisation,laparoscopic,pem,hotelguide,bayview,overridden,sorensen,hinds,managment,racially,stinky,riverton,expertly,mgc,langkawi,ftpd,colloidal,guarantor,imperialist,suc,veneers,reaffirmed,zambezi,tibia,raquel,wpt,kiddie,tulare,venturi,sundries,linebacker,danzig,neurol,beanies,irreducible,trixie,ridgeway,henckels,srb,verifier,dimensionname,eurasian,galbraith,pesky,underwire,salvia,aep,radioshack,sportstar,alana,upd,duma,osh,ddbj,stah,scripted,ated,mutagenesis,posada,vocalists,tiburon,lpc,geiger,cmyk,everlast,obits,jekyll,sportsbooks,andaman,hallam,spoofing,rockhampton,reauthorization,poolside,xiamen,trc,pita,chopard,skeptics,nast,motorist,kwik,peritoneal,jaffe,freebie,harare,tunbridge,spycam,lowes,lineto,ncaab,publicize,neohapsis,sanibel,bulimia,newquay,intros,ladybug,analyser,armando,conwy,algorithmic,rectifier,banknotes,aem,bookshot,bassoon,scrapbooks,hydropower,clearances,denominational,dominguez,meas,tamron,dfid,vlans,spreader,deu,otolaryngology,ezines,vbseo,snowmobiles,oca,phen,educa,lagrangian,dubrovnik,idt,eases,hippocampus,crim,repeaters,longoria,matsushita,reimbursements,kotor,encodings,yuen,eqs,eca,actionable,gangbangsquad,cornea,overfull,southgate,minibar,kitchenette,ols,liberian,tuc,hth,repairers,liczniki,rcc,numerology,armitage,brac,barware,corsi,normalize,gsp,bcr,krt,buffs,tamoxifen,phenotypes,kinross,kieran,informatie,mccallum,triplet,geosciences,sonics,timmins,django,pllc,lotta,upg,nhtsa,swissprot,archaeologists,voss,pussys,moveto,tentacle,stx,iaudio,prednisone,salespeople,motility,dengue,gaiman,incineration,dumont,shanks,bissell,organza,centralised,unbreakable,supersized,depictions,wml,sexcams,kaffe,karim,aww,gtc,pbl,cael,separators,informatique,resetting,indepth,funnies,cumin,chicagoland,keystrokes,setters,inertial,payless,ona,pec,payee,cinematographer,preorder,oig,teenies,ppv,ventilator,annonces,camelbak,klear,micrograms,pediatrician,cymbal,convective,haymarket,nosed,bre,shogun,rescheduled,bala,sidestep,readline,preemption,microbiological,corticosteroids,pseudoephedrine,stockholder,engnet,quanta,sturgis,synapse,cwd,innostream,airplay,uppers,sib,pitman,bodrum,leathers,embossing,redirects,fuzz,roscommon,meryl,izmir,meticulous,multiplexer,menorca,dendritic,minima,wstnsand,naproxen,operands,mikael,conceptually,crichton,cct,nics,hardwoods,clarita,xfs,capping,parisian,humanism,hiroshi,hipster,accel,annualized,sandi,npa,becca,basildon,khoa,testis,uclinux,unusable,tigger,approximated,dhea,consulates,wonkette,versioning,breakdowns,dbh,periodontal,macmall,iphoto,uncredited,recordi,lacroix,rupiah,bullish,hippy,klik,northerner,xsd,mackintosh,kenney,fabricators,mutated,layne,moonstone,scilly,sheng,fsp,yk,strep,offical,hps,tampere,testo,synergies,fundamentalists,amyloid,emachines,understandably,icarus,appletalk,goff,dialed,geoxtrack,bemidji,harcore,intermodal,spx,catalunya,baymont,niall,mitts,rik,nappy,diario,khalid,fuchsia,chowhound,muscat,ffff,kmart,handover,knott,butterfield,hialeah,finney,salamander,driveways,ummm,ayres,lukas,cavan,aswell,skippy,marginalized,sooners,cityguide,maritimes,permanente,texaco,bookmakers,speci,hgtv,contacto,mbc,marston,newsline,coverages,bap,specialities,loca,systematics,renderer,matsui,rework,snowmass,deq,rosh,coffs,cleansers,acu,webby,footbed,inicio,moretrade,apogee,allergens,worsen,mlc,applica,tankers,whopping,issey,rtr,bes,cust,brookes,anim,tull,informatica,computeractive,finline,permissionrole,quickcam,shunt,rodeway,scrollbar,breen,voyuerweb,mbe,kenshin,dpm,clackamas,synch,patten,leppard,allis,estimators,functionalities,rmt,downes,koffice,evidences,mux,dbx,fetishes,isaacs,outrigger,enclave,fibrillation,licorice,statically,ipl,dixons,goldmine,lhasa,developmentally,ziggy,ingles,senders,steamy,atf,madhya,marinade,passwort,extinguishers,stratosphere,tbilisi,updater,geico,fld,cabos,companys,tinputimage,ggg,nicaraguan,icn,wanganui,sconces,insulator,endometrial,mohan,hegemony,focussing,gallerie,bioperl,eprint,tennant,ebp,tryptophan,checkin,gilroy,extensibility,aei,qg,mcculloch,thang,lorem,seng,bianco,salma,consortia,asimov,renato,bungee,murdock,hokkaido,alternates,brdrs,configures,multilevel,mvs,pce,albertson,renoir,getclass,perthshire,mucus,suspenders,realtek,morons,dismantle,pharos,obp,zovirax,twikiguest,reimplemented,eavesdropping,orgs,numerator,gds,nme,resurgence,metastases,gino,timings,mecha,carburetor,merges,lightboxes,icra,jeopardize,ltp,loews,fanlisting,flet,bds,hyland,experian,screenwriting,svp,keyrings,hca,hdc,hydrolase,koa,mobilized,accutane,zonealarm,sexkontakte,canaveral,flagler,someplace,vcard,antibacterial,rund,extremism,edgy,fluctuate,tasked,nagpur,funroll,tema,flips,petsmart,libuclibc,chaney,aventis,macrophage,palmas,useable,ferndale,saipan,councilor,tcr,myinfo,jellyfish,newington,reissued,mpv,noa,airconditioning,wiggles,bho,synths,kennesaw,rubbermaid,spector,medica,ayer,incumbents,ashok,vern,writable,usepa,reflectance,mobo,bunn,chiba,uint,tgb,yj,coliform,selena,olmsted,broomfield,darpa,nonpoint,realignment,undermines,ferreira,sasl,defibrillators,kraus,certs,nwa,jstor,aarhus,supercomputer,bouncer,phenol,jigs,loudoun,lifetimes,grundy,histamine,byline,mbox,mustafa,bedlam,ioexception,abdel,bothell,synergistic,aur,lippincott,maplewood,tillman,maints,rhp,handball,shandong,cch,stylized,folate,lenoir,manitou,cytometry,goofs,wokingham,connors,musc,ripon,nypd,plexus,systolic,hyman,unreachable,deepak,desarrollo,tian,jisc,merc,covina,noonan,ufc,modernist,waring,janie,fams,yasser,weathering,totalitarian,putters,waypoint,prx,interrelated,delray,lifedrive,santander,southbound,solidworks,cronin,averatec,huren,patios,firebox,synopses,venta,sadr,tuples,brdrnone,diarrhoea,sonatas,barbecues,walther,deadwood,mancini,rpmlib,milpitas,commonsense,bsi,piii,romford,emporia,digidesign,violators,phrasebook,reconfiguration,sledding,lakefront,excision,traceability,yangon,booktitle,lemony,recursively,ney,kilda,auctioned,hennessy,basset,antwerpen,paltrow,rda,limiter,imtoo,jmp,cornwell,dah,blueberries,notting,comprehensively,amar,deftones,apg,zyxel,kno,limelight,schmid,alg,bme,solis,cdx,mju,hoosiers,criss,glynn,aerotek,unmet,toa,competes,olathe,ciw,compositional,sez,trig,taylormade,catawba,mbytes,ordinal,tth,inglewood,gila,magnitudes,downed,firstname,metairie,polluting,wellcome,pedicure,duplexes,edgewall,webchanges,backplane,daschle,transceivers,disrupting,biodegradable,spore,meps,phpmyadmin,bloodrayne,tessa,unrealized,hei,artistas,roomate,acetone,alanine,elko,dvdrw,spt,ries,inthe,blitzkrieg,nickels,banbury,igm,snf,optra,choctaw,issaquah,interactively,fredrik,aventura,ewa,dpic,mufflers,quarks,refactoring,monrovia,forman,marrakech,optoma,walkways,heineken,shelbyville,oxidized,bugfix,sharif,bloodstream,yx,underpinning,resistivity,hollinger,conformal,racquets,sherri,dbd,nevermind,moa,tenchi,potters,detergents,cheri,bombardier,subsp,cytotoxic,frag,eseminars,colophon,morin,ico,tatum,unforgiven,thesauri,gaffney,harrell,toowoomba,friendfinder,uts,bootsnall,relais,allocates,freecom,yoo,kabbalah,dgs,punks,chorley,ivanov,unannotated,endian,dari,patchy,haters,mutex,worldnow,giuliani,hina,millennia,pathophysiology,frith,pao,doran,remixed,hypoxia,newyork,penile,hemi,positron,metallurgical,ordinating,caregiving,molybdenum,easley,plo,psn,hexagonal,throated,contravention,bacteriol,healers,superbike,biosafety,binomial,engels,staybridge,mullet,canfield,hardball,orem,scholl,renovate,dvdr,phenterminebuy,metformin,actuary,addressbook,xquery,csl,purdy,rattus,xian,latches,ardmore,cosmetology,emitter,wif,grils,yom,ralston,estados,begining,apartamentos,sassoon,tna,hotlog,duquesne,oclug,formatter,rhinestones,shootings,splitters,gdm,pizzas,contig,whittaker,trafic,winders,walkie,adorama,uucp,postmarked,devolution,avion,innes,reunification,izumi,caenorhabditis,moderating,gadsden,cthulhu,eurostar,dooley,diebold,unsaturated,hotsync,ryerson,bfd,nonexistent,liquidated,decoders,validates,dae,jackman,biophysical,mendes,lasagna,landers,belton,qing,docu,tapas,calla,curriculums,supermodel,rezoning,schumer,exclusivity,motivates,debuted,lifeguard,chrissy,havasu,kei,danforth,kilmarnock,bignaturals,hendersonville,poweredge,sequels,licensor,pantone,granby,laboratoire,headteacher,viajes,etosha,ndc,coexistence,leona,dpr,brownfield,aguilar,supervises,orthologs,pataki,redistricting,jil,amritsar,lpi,pram,acqua,mekong,anesthetic,dsi,maduras,pfi,paperless,perc,fansites,sherbrooke,egyptienne,hyn,anisotropy,heaton,rennie,sno,redox,cladding,seaworld,hotlist,trumbull,retransmission,luau,tiscali,overlaps,meticulously,sitka,ucs,lsr,hellboy,jakub,hanselman,rangemaster,interceptions,rrc,dyna,appt,nonviolent,evangelicals,cunny,goddamn,wolfowitz,epping,accra,bimbo,jamboree,multicolor,tritium,ptfe,leaching,sauer,cricinfo,isomorphism,lsat,estab,stockbridge,invariants,jillian,islip,egp,didier,capistrano,yardage,neve,enviro,gte,bodybuilders,ranchers,bremerton,wbc,radii,schwinn,expander,regt,referer,electrolysis,signatories,wetsuit,flatrate,vendita,nazionale,peroxidase,folkestone,angkor,delcampe,taylors,rahul,mmr,zp,vserver,neurologic,chd,opac,cmv,macabre,neurontin,popeye,gruber,excerpted,spotter,pyongyang,hmos,beltonen,chamonix,recycler,declarative,semaphore,dprk,carmarthenshire,tristate,standardize,recyclable,knickers,overloading,angioplasty,fanboy,sharapova,moen,irin,deseret,eastbay,bfa,androgen,parkes,kilogram,pacemaker,duarte,evaluators,tarball,nears,kapoor,pah,allard,mog,tures,standout,lll,holley,ogs,ptt,sfs,transamerica,bdrm,comparability,buckhead,industrialization,cabana,mbr,yoshi,skokie,catwalk,homesite,pecos,stinson,blurry,etrust,minibus,coty,denby,openbook,unfunded,jobsite,dls,levinson,kasey,disbursed,cristian,ballooning,nats,antineoplastic,amplify,shitting,coden,congressmen,dft,xsp,strapless,qualitatively,struc,whitefish,flourished,ejection,puyallup,bonham,miu,cosplay,gazduire,dodgy,parasitology,thymus,handlebar,sanborn,beale,lesbianism,locators,belive,mnogosearch,aoa,childress,pppoe,phytoplankton,wireline,handpainted,suprise,neath,casseroles,generational,coppola,burrito,sandton,spylog,biltmore,coriander,edtv,chopra,streamflow,montoya,lesbien,manipulative,hypnotize,liaisons,backers,evocative,mcclelland,centerfold,burch,chesterton,warlord,guage,powerball,snider,creuset,wildland,oster,conti,sichuan,wrigley,bollinger,sensitivities,offshoring,uiq,bayes,vipix,amphibian,substation,optically,ceasefire,haag,alj,swartz,nanoparticles,affine,sitios,woot,obo,uname,employmentnew,sepa,asrock,hijacking,blurbs,downsizing,subcutaneous,creatinine,factorization,netbios,fleshlight,reliever,ender,indenture,arlen,trailblazer,coney,avenida,ern,shocker,barnstable,ioctl,bronte,refrigerant,caterham,bajar,movei,barkley,datacenter,presidio,transfection,fung,legg,moyer,roux,rectangles,caseload,catharines,pdx,wget,collaborator,cruzer,eeoc,tnc,cnw,sausalito,clas,xenopus,reflectors,endorsing,qingdao,kiwanis,onlinephentermine,replicator,assertive,aldershot,weirdness,oblast,townhall,sunnyside,datos,pham,glycogen,tain,selangor,detainee,brd,hoosier,balearic,toluene,jini,tubal,longford,johansen,photocopies,haccp,narconon,dyno,blakely,klonopin,photonic,kyiv,tami,hijackers,buell,informazioni,mccracken,ultrasonography,cale,alyson,taupo,possum,milligan,rosacea,transgendered,thos,toxicological,mackey,ristorante,obama,dvc,jermaine,platypus,breakbeat,karina,jang,thereunder,kink,winton,holla,multilayer,strcpy,xzibit,mohair,chore,agb,prt,abm,kgb,preemptive,guzman,subcontracting,counterterrorism,communicators,embodiments,sociedad,taskforce,gatineau,pertussis,concentrator,astrophysical,apap,pairwise,nagy,hofstra,kbs,filmstrip,shortcake,hsm,chilliwack,bidorbuy,tetracycline,lovett,motorhead,salam,hofmann,paramilitary,flipper,eyeball,outfitter,rsl,minden,hardwick,immunological,wifes,phenyl,telefax,giao,famously,hattiesburg,telematics,tsai,maier,lca,bossier,franchisees,falco,armin,ique,controllable,surfactant,telecommuting,culvert,prescriptive,wcag,hott,spanner,mchugh,firehouse,currys,diadora,laporte,wgbh,telekom,puri,factsheets,karts,orthodontic,visors,leste,lithography,bonobo,hamptons,proofreading,rmx,evokes,jdm,dehydrated,whyte,interop,initializing,manfrotto,waveguide,pnc,aussies,murtha,reinhard,permaculture,suburbia,kamal,catwoman,optimally,darko,windstar,polymorphisms,sexist,mdm,embryology,styrene,alumnae,inducible,riesling,triage,ees,krugman,mrt,mazatlan,silencer,foreclosed,chernobyl,rigby,allergen,crystallography,frosting,gallbladder,photogallery,nightwear,sconce,vgc,drivetrain,skelton,ovaries,mamob,phenterminecheap,daddies,impressionist,tourisme,hpi,clif,fairways,watercolors,klipsch,tekken,lactic,bydd,katana,ameriquest,boson,culo,milled,mcarthur,analgesic,mya,btec,geez,crocheted,acetylcholine,modblogs,pud,firsts,ferrets,enlight,wop,twas,menzies,agonists,eisner,staroffice,acg,photometric,fokus,ntc,buzzer,tok,trams,vickie,tinnitus,vectra,benidorm,gerrard,marketworks,libertarians,downers,kevlar,sequestration,yoshida,inositol,praia,follicle,itemsshow,brunner,indore,inspectorate,ultralight,toutputimage,saudis,octal,debilitating,twd,keypress,notifyall,hdf,corrs,turku,centrifuge,curators,multipoint,quang,marla,mths,caffe,projective,fandom,cws,kao,debacle,argh,tts,plantings,landmines,kes,sdd,khaled,kimmel,famc,tva,arbitrators,deakin,instock,gilligan,unh,unpossible,waldron,kihei,daq,bronchial,emg,nanoscale,hmong,brownfields,emmylou,antcn,unilaterally,hypoglycemia,sodomy,bukakke,bigpond,famosas,nsync,zd,revaluation,conditionally,moira,tenured,padd,amato,debentures,rfcs,acyl,rehoboth,lmc,dht,drucker,lmi,tham,cigna,dlr,nifl,sealy,axa,carrey,ige,dde,foy,evesham,mcneill,manitowoc,baguette,haves,erections,overpriced,grantor,sux,orbiting,soares,gsl,ihep,resubmit,bader,gymboree,kyo,yunnan,miyake,rah,saggy,subtypes,moultrie,vasquez,iogear,merch,uplinked,cognos,northbound,cardigans,ket,rasa,taglines,usernames,gpsmap,ngn,midweek,pirelli,rialto,tvw,durations,bustle,trawl,shredding,reiner,risers,taekwondo,ebxml,unedited,inhaler,granularity,albatross,pez,formalized,retraining,naa,nervosa,jit,catv,certificated,spicer,karsten,surfboard,scl,garfunkel,handguns,ideograph,papillon,dmn,citywide,stingray,bmo,toscana,analsex,larsson,franchisee,puente,epr,twikiusers,tustin,physik,savute,slinky,cubase,weatherproof,parkplatz,roadsidethoughts,oxy,pthread,postmenopausal,mixtape,tuxedos,fujian,batters,gogo,nca,minivans,yerevan,duffle,scraper,posner,bwv,technet,sdsu,decl,lombardi,musi,unger,gophers,brando,ksc,multifunctional,noes,relist,webjay,vtr,haworth,transfected,dockers,swg,screwdrivers,tir,guitarists,manta,christa,sff,moffat,surfboards,deteriorate,compo,roos,eesti,caulfield,midpoint,orland,malagasy,shoplocal,standardisation,matlock,nair,polymorphic,emd,phenomenology,substantiated,slk,phong,bandera,cred,lorry,recaps,fet,resolver,kagan,chiu,anthropologist,opcode,jugg,revamp,herbarium,grb,readonly,arista,barcelo,unknowns,kean,coq,cpo,brosnan,chamomile,tgf,mobilizing,anya,allo,geddes,wayland,cerro,methylation,ecol,clanlib,jayson,prostatic,uj,metcalfe,oppenheimer,mcclintock,android,primaries,converges,lation,anisotropic,voorraad,ucr,mxn,ambrosia,springboard,rubella,eisenberg,bif,constitutive,vesa,signoff,guggenheim,sapphic,killington,otr,intec,xem,instawares,kearns,showcased,summerfield,cooperatively,oshawa,targa,triplets,hec,billionaire,leucine,jobless,slingshot,cutout,disgruntled,coker,selinux,crosslinks,resurrected,skyscrapers,spamalot,sfp,noob,crb,moviefone,beecher,goog,mdgs,democratization,biostatistics,sakaiproject,cilantro,equ,xilisoft,zc,terracotta,garvey,harford,pcie,dartford,dicaprio,rosso,onlinebuy,gilliam,certiorari,walkin,contributory,applescript,esol,giggles,suture,jacobi,fark,autoblog,glaxosmithkline,dof,sextoys,tice,accor,buford,uspto,balfour,calipers,penalized,pyruvate,loggers,envi,kissinger,rmc,whew,orchestrated,conformational,choreographer,mcsa,impressionism,bucknell,martino,cranbrook,taz,ocp,subdomain,precios,simcoe,abnormality,varicose,newtonian,genova,libor,infomatics,hyannis,howland,federations,syed,urination,bewertung,broadcom,cautionary,escalate,spotters,kucinich,noosa,sider,mitral,dafa,verdes,inproceedings,crestwood,takingitglobal,dmz,antisocial,baz,gangsters,daemons,foundational,probs,huntley,kanpur,uah,elven,isotropic,adodb,enlaces,edelman,rubinstein,flier,griswold,ome,carcinogenic,micr,rrna,goverment,mercado,lum,dekker,supercharged,magicyellow,primavera,timescale,fico,overwritten,marcinho,kor,erb,keanu,edina,perle,lebron,terminally,bundaberg,lbo,breyer,kochi,pirated,leavers,vpl,pubsulike,aquifers,nittany,dakine,rescuers,amsoil,revitalize,messageboards,lakeville,apotheon,eukaryota,permeable,rsm,lastname,pxi,faxless,napalm,annuncio,usmle,racetrack,atenolol,riveting,cbbc,absorbers,xseries,biweekly,parkside,rez,hows,posi,derailed,shoebuy,ashworth,keira,meadville,skynyrd,threechannel,fid,rua,monologues,subroutines,subspecies,penton,eoc,figleaves,bab,ketchikan,immagini,shafer,qca,broiler,ctn,lickers,akbar,cbl,skimpy,fisa,reflexive,drool,godin,exchangers,interbase,sepsis,appli,boxdata,laing,oscillators,choline,doolittle,trikes,pdm,joerg,removers,grisham,diffuser,indesit,rouble,kamasutra,camila,belo,zac,postnatal,koizumi,tallied,ikezoe,niggas,lorain,tko,keying,ballpoint,kq,lupin,eidos,computerised,maf,rsv,munson,ftm,munoz,hbv,jeffersonville,willfully,orienteering,eoe,cavs,humphries,puss,ngs,podiatry,truffle,taka,beal,kalahari,blockage,hallo,abo,recv,obstet,bulma,chicos,cliche,sadc,tolar,screenname,chlorinated,hypothesized,upbringing,fmc,newry,zonal,defun,unsustainable,maas,ghostbusters,interdependent,rockwood,dbe,asda,civics,literals,unanticipated,seminoles,plist,tabulated,workloads,chemo,vhdl,pretrial,fermilab,hotplug,rotator,krups,myosin,mtx,carpool,honky,matsumoto,armpits,clug,gasolina,caruso,fsh,joysticks,visualized,bosworth,soic,clitoral,bers,carsten,riverwalk,convertibles,literotica,pgm,ringetoner,tpm,floorplan,oscilloscope,getz,mgd,dictators,levees,annandale,hillel,jeffries,pacheco,slacker,miva,sns,gca,xchange,kraftwerk,bandana,pentecostal,extrapolation,fennel,telemark,spg,quy,datasheets,smit,flywheel,futons,interviewees,mosfet,maryville,oskar,ital,quarkxpress,nondiscrimination,republika,icici,fixings,leith,kickboxing,deming,deactivated,caliente,oligonucleotide,crtc,golgi,channeling,stopwatch,maroc,lemieux,subscript,starfleet,odi,substandard,phenterminephentermine,phoned,ncl,gmtime,convener,becuase,dailies,dansguardian,miramax,busta,maury,cng,jizzshot,moya,nackt,commercialisation,cunni,cardinality,machado,insurances,qn,tinting,epidemiologic,isset,burnie,bushings,radionuclide,typeface,changeover,jian,termites,dotnetnuke,decryption,etnies,subsec,cxx,grinnell,alexei,helly,protestors,signings,parnell,gretna,guida,abl,farscape,hdtvs,sde,cyborg,yanks,hematopoietic,clot,imprints,opensolaris,inflationary,elie,traceroute,fgm,cuddle,workbooks,fallback,permutations,downer,abelian,cabela,transferee,quantitatively,sheepdog,cameraman,pinochet,replicating,tci,slashes,streetpilot,renovating,paralympic,dwarves,cakewalk,pyro,phenterminediscount,tye,bna,uwa,stinks,trx,behav,blackfoot,kuo,schaffer,kemper,glycemic,plesk,slicer,joshi,realtytrac,sandburg,dnb,nwi,reza,operable,wargames,guerrillas,saito,tce,fullsize,auc,anzac,kulkarni,rabbis,mendelssohn,investigational,photojournalism,anaal,christiansen,centaur,rubio,transando,rapist,ert,pratchett,climatology,baise,labtec,prioritization,pinhole,hdpe,bioengineering,dirac,mcu,alveolar,westmeath,lewinsky,webx,acco,soya,moz,exorcist,biofeedback,atrios,honduran,seaview,douche,rsh,soundcard,resistive,sylvain,chubb,snooper,atn,dbase,katja,icr,firepower,agu,ges,cissp,mangalore,laois,ime,unmodified,keystroke,zell,parkersburg,yoon,gillmor,joyner,vinnie,ccf,grocers,simulates,flathead,castellano,sigia,vesting,misspelled,prono,headcount,panache,inu,hallelujah,joes,cayuga,nob,tpb,glug,zodb,gubernatorial,goran,bauhaus,sarawak,sparky,sebastien,wirelessly,wpi,sysop,factored,eula,ohh,bsb,polymeric,salivary,mfi,ftaa,async,dnd,kristian,circadian,analgesics,flintshire,prakash,productos,phenotypic,pelagic,agronomy,vss,aironet,weightlifting,yugo,audiophile,unidos,motorcycling,raine,testbed,pediatricians,fingerprinting,bunbury,tasking,gmd,emulated,tweaked,phonological,barco,gomes,osf,faridabad,aprs,snappy,opa,colonic,jeroen,qin,zircon,svt,dansko,caspase,encinitas,tuo,remoting,ploy,achat,freefind,spellings,canopus,dme,gaulle,maplin,dutchess,wattage,puke,distinfo,leia,expeditionary,amortized,truckee,albury,humanistic,travelogue,triglycerides,gstreamer,leavitt,shotguns,discounting,etoys,thirties,swipe,dionne,ebscohost,tns,geoquote,upkeep,truncation,gdi,bausch,pomeroy,harrods,downgrade,roomates,biliary,dumpster,universalist,acdbarc,ywca,oceanview,fazendo,shayne,tomy,resized,yorkie,qx,matteo,shanahan,japonica,froogle,rehnquist,megabyte,ginsberg,vivienne,penticton,inseam,csh,pressurized,sld,faves,edf,massagers,ente,timesheet,anniston,sigur,toughbook,histological,clays,pcx,suzie,honeycomb,denier,udo,etcetera,reopening,herrmann,ifr,quantifying,qigong,cbn,kurzweil,chanukah,programas,fumbles,jobseekers,nitrite,catchers,mouser,rrs,knysna,arti,andrey,textarea,weis,pesto,ilm,ponderosa,kroatien,transitioning,whoops,catamaran,preoperative,cbe,verilog,helios,qz,wheelbase,narayan,voyforums,csg,unctad,monomer,refueling,ilife,biennium,coho,pellepennan,quartile,anwar,infobank,hexagon,ceu,geodetic,anda,emporis,ahmadinejad,lubes,consensual,altimeter,nmi,psm,lawler,sharpener,stellenbosch,soundex,setenv,mpt,goldfinger,asahi,ascorbic,himachal,dichotomy,communigate,covalent,cantrell,tarpon,bluffton,radix,orthologous,taichi,borealis,nerf,rosedale,policyholders,nst,racecourse,extraterrestrial,kok,servicemen,starwood,asco,nui,phylogeny,jis,tiesto,ameri,plankton,pkt,seamus,sublets,unthreaded,microstrategy,cleanups,fitchburg,flowchart,tacky,sauk,supercomputing,antiwar,illawarra,benetton,menopausal,workgroups,relive,ketchum,nieuws,mirago,reproducibility,abalone,ashmore,ssx,eachother,gsx,juggs,ded,geometries,petzl,edie,quirks,sbe,bundy,pina,crayola,acceptor,iri,precondition,padova,indica,roddick,teasers,beveled,consumerism,flr,yeovil,boneless,intracranial,kbd,tatoo,gameday,solute,tupperware,ridgefield,gce,quadro,mumps,trucos,mopar,haggis,electromechanical,styli,whipple,fpm,arcata,perego,guwahati,loudon,legolas,rockaway,exhibitionist,woolley,msps,toolset,ferragamo,bott,godiva,nsn,vfw,masculinity,schrader,bld,lightfoot,capitalizing,rucker,browsed,hcg,freenet,bundling,cannondale,mcat,blt,mencken,commerical,dagenham,codename,nesgc,profess,rearrange,warfarin,stdin,rohan,overheating,condon,inflate,npd,gunnison,hhh,sfmt,devonport,copywriter,bodybuilder,poss,psigate,ecp,airforce,fleischer,atmel,rasta,ravel,jupiterresearch,flycatcher,cusack,jenni,gbps,bombshell,llbean,arnie,subdomains,kale,pcd,shemp,findtech,huck,vouyer,horrendous,complainants,addy,ehs,fabricating,mmo,verdate,cyberpunk,enotes,pecans,ababa,whitehorse,barak,juke,schnauzer,hairdressers,prioritized,rainforests,exo,rabin,workday,eared,earphone,passaic,vme,hypermedia,udb,jinx,illiteracy,carcinogens,offres,addressee,thefreedictionary,informants,tics,sublimation,harnessing,extenders,fishman,hmi,tsk,inj,wvu,zimmermann,dupage,belarusian,maia,lynyrd,messianic,mexicana,generalist,gastronomy,ugs,huckleberry,ridgewood,pii,dua,phan,lightsaber,vivanco,catheters,azerbaijani,whitmore,footy,joinery,wasatch,octagon,equates,sorenson,eames,tacos,misspellings,trivandrum,kingsville,magnetics,rce,halide,metabolite,clo,genders,headgear,gretzky,harming,insole,colvin,kano,thurrock,cardstock,journaling,univers,aragorn,principled,namibian,slacks,mcsd,wmp,fairmount,physica,subtropical,sager,trk,bowflex,subcommittees,jia,ramesh,sitepoint,prawn,phylum,mephisto,prf,mundial,waveforms,algal,schafer,riddell,gimmicks,reparations,injectable,sher,trondheim,mhs,libwww,phenix,tlv,rena,tcpdump,quinlan,ecampus,kaya,ethically,sity,fkk,freeradius,nmh,puffin,freeride,ahern,shaper,locksmiths,lichfield,cheater,tora,hsi,bootcamp,torus,mondeo,cotta,oac,evi,jre,vignettes,aculaser,waxman,raping,oryza,leashes,babydoll,srgb,practicality,winer,thon,battelle,inp,europcar,pancreatitis,americus,immunohistochemistry,woodlawn,filigree,forecasted,bypassing,chock,chocolat,messier,gravis,edson,nathalie,calendario,blenheim,clarksburg,trigonometry,virusscan,flanges,bowlers,tsi,ipos,harlingen,keypads,sosui,campanile,vassar,regress,ghosh,iab,hao,ntu,ivey,techdirt,pmt,minutemen,pias,celiac,hough,ingested,hypothyroidism,boyfriends,jeong,equifax,baroda,cybernetics,tissot,daf,prefered,rappers,discontinuation,mpe,elgar,cumulus,brltty,klan,goku,offsetting,airmen,halliwell,ionizing,angebote,morphy,bookmaker,curio,hookers,amalgam,notional,webactive,bechtel,zambian,reinhardt,bridgend,bendix,dists,magnetometer,populist,mimo,bsu,renfrew,hesperia,chautauqua,mnemonic,interviewers,garageband,invariance,meriden,aspartate,aramis,pleural,tsu,mediating,gabriele,resonator,provincetown,afx,surpluses,ertl,holger,castlevania,vaniqa,finisher,ead,quartets,heber,muschis,anthropogenic,thermos,macroscopic,torrington,gillingham,geopolitical,flaherty,varietal,assfucked,engle,gorillas,ihc,shatner,euc,juarez,helicobacter,epidural,luisa,teardrop,anion,glosspost,numeral,mdx,orthodontics,tabby,cyngor,onl,claddagh,abf,therm,myeloid,pugs,sprocket,roh,unilever,ctu,genomebrowser,sima,hants,maclaren,chairmans,yim,workflows,adn,ansel,dragostea,hrvatski,ayala,bfg,tonawanda,imovie,regionals,kami,jansport,fanfic,tasha,nikkei,snm,lynnwood,glucophage,bicentennial,arl,radiologic,kts,agosto,mineralogy,corsicana,harrier,sciencedirect,krugerpark,oireachtas,esposito,adjusters,olympiad,fname,iar,allende,ldc,sited,surry,strainer,paragliding,whitetail,pagemaker,astrid,tripled,gwar,atwater,overpayment,faeroe,wisenut,nagel,blatantly,chicano,chongqing,corporates,applicators,erasing,svetlana,fleer,bossa,deuces,fud,dalian,anycom,gunfire,mcnair,subtilis,hdi,percutaneous,cursos,cols,urth,northbrook,rmk,mgf,voli,leann,pixmaps,gigablast,metronome,blackman,fliers,rdbms,imprimir,grouper,negate,roessler,intrastate,manawatu,blass,ainsworth,denzel,tfl,moped,appointees,bunkers,refrigerate,ligase,otp,beleive,warlords,hatteras,symlink,almeida,blogcritics,cochlear,janelle,alphabets,atta,foldable,hydroponics,precast,univer,purest,fatboy,cei,westerners,camarillo,kelty,volunteerism,pdq,openacs,hor,newham,energie,radiographic,kinematics,errol,otabletest,isobaric,hba,gratuitos,innd,eads,personalise,tbl,fso,patenting,reciprocating,rto,subcellular,crosbie,harmonisation,dunfermline,janesville,egroupware,caritas,tsm,egf,roa,debhelper,nsaids,milt,burleson,pba,ragtime,adopters,impor,philo,backseatbangers,rushville,saitek,synthesizers,vulva,arapahoe,posey,minuteman,zinfandel,mayoral,fortis,medicina,gallary,honeys,pinus,interlink,greening,tesol,artnet,crw,bansko,brien,silvery,guevara,thinkin,sedu,automakers,igmp,overtake,semicolon,bubbly,edwardsville,ques,homebuyer,nodal,mpo,unbeaten,rawls,ocx,ork,sheeting,hallways,alzheimers,snooze,kestrel,nadh,americorps,prawns,nonpartisan,naps,domina,eldon,palomar,riedel,hoppers,onscreen,gdk,distillers,uploader,caltrans,tyra,cocksuckers,mtbe,hypertensive,xie,chinchilla,bucs,transformational,sailboats,heisman,grn,jct,exemplifies,arrhythmia,astrometric,workwear,tolstoy,asperger,koop,newydd,transpose,lpr,xray,ferrer,microeconomics,kafka,telly,grandstand,toyo,slurp,allocator,islas,ila,westland,instantiated,lewisburg,stylists,blackwater,vivi,hippies,pul,larkspur,kea,lesben,motherwell,ahs,cappella,neocon,getname,coyle,rudi,departamento,winrar,mussel,britax,diwali,raines,dso,wyse,geourl,etheridge,docomo,webindex,accrediting,stapler,pheromones,woodson,imm,volcom,telewest,lcp,bisexuals,ozzie,kitsap,oic,cutest,hoon,mpp,cte,dymo,yolo,quinton,jorgensen,printouts,tempt,credentialing,scalloped,sealey,galvin,etudes,gurney,bluefly,schweitzer,jawa,geochemical,allegany,aldridge,digitizing,aki,organically,chatboard,lomb,uddi,yng,roleplay,pavillion,barstow,patna,rootkit,spearhead,leonid,sunnis,reticulum,dulcimer,unl,kalman,npl,coronal,rendell,transparently,mfs,freeform,gianfranco,tantric,reif,woodhouse,lifter,seymore,ogle,sayin,cpas,videographer,gpe,stallone,uams,pula,trudeau,buss,ouest,korner,fatherhood,debussy,qsl,reflexes,hlth,wyman,kingsport,gauthier,vadim,magnetization,trd,aitken,millers,titted,clerics,busses,trai,underpin,ajc,dumbledore,vinny,delicately,webroot,yip,producti,teksty,pullout,dmi,yellowcard,sbi,dmt,nce,birdhouse,bnd,neko,chillicothe,peacekeepers,schmitz,rimming,solent,propylene,supercross,zsh,multnomah,foxconn,fuelled,biohazard,horrifying,parque,toffee,fpl,riemann,horsesex,mahatma,mubarak,bachmann,caswell,chiron,hailey,pippin,nbp,ramallah,isoforms,dictyostelium,tauranga,hawkeyes,maxxum,eire,knowit,topanga,geller,parliamentarians,inadvertent,utes,boardman,denham,rofl,homophobia,winches,uptodate,centralia,eschaton,hoaxes,hillingdon,buble,hairspray,acdsee,offerte,urb,intellicast,minn,frc,antisense,pelosi,shader,gisborne,grafts,hillbilly,intifada,carina,fon,ehow,vpi,brunel,rtx,roald,externalities,metzger,balsamic,classically,calorimeter,necked,idiopathic,lileks,tahoma,ogc,unidirectional,westbound,layla,galeries,cabinetry,suarez,stipulates,towertalk,optimizes,serializable,universite,ald,ringsurf,toques,rayleigh,dropouts,fws,gamecocks,gazprom,braden,amet,sinusitis,rusk,fractals,depressants,clec,tryouts,rushmore,shel,adapts,farlex,emac,phl,remax,wizbang,endnotes,rodman,dissidents,iterate,conair,ember,vsa,neolithic,mgx,acuvue,vetoed,uruguayan,corrigan,libxml,etronics,simian,atmos,msk,iib,multimode,teensforcash,annu,sunbury,girardeau,dbg,morrisville,netmeeting,asso,estore,universes,ganglia,ghanaian,resonances,subjectivity,microarrays,easypic,abbeville,newsre,cobble,flightgear,spode,berea,mckinnon,bucky,plunger,xing,siggraph,bookends,klingon,moreland,lowery,histograms,moll,floorplans,netherland,frasier,rossignol,polyline,laroche,cytosol,disposals,xforms,mosul,motu,amersham,chordata,crafters,kingsbury,yoox,hyphen,dermalogica,moreton,glycoproteins,aristide,unsorted,rambus,ptf,scorsese,patricks,microwarehouse,bch,blyth,grampian,livedaily,nces,alizee,detain,andrzej,optimus,alfie,immunisation,pfaltzgraff,eyelets,swordfish,legals,hendry,homogeneity,hartland,recreated,leaded,hunan,supersonics,amstrad,vinaigrette,scd,mch,nintendogs,dvx,unreadable,plattsburgh,balsa,aya,brasserie,gcl,salton,paulson,dvdplayer,silverton,enduro,peepshow,givens,bristow,pecuniary,vintages,ozarks,johor,zia,mucosal,prehistory,histidine,mti,drape,tectonics,lorentz,distributive,sharps,seguridad,ghd,gilberto,doomsday,otters,gervais,mews,scarring,daydream,gooding,snicket,bicarbonate,boggs,wps,dietitian,itf,harriman,paprika,haviland,novato,dyn,hornsby,biden,disallowed,zahn,jordi,correo,frida,chappelle,resourcing,methuen,zoneinfo,adelphi,orbison,geffen,informatik,novella,brie,galeon,silos,lrwxrwxrwx,shortstop,cua,dordrecht,permissive,creston,prec,nco,nehru,bromwich,disposables,estrogens,mulholland,rui,haz,eol,odometer,tooltip,ibb,mosby,druids,aggregators,herfirstbigcock,rti,arvada,fixme,rodger,tively,gizmondo,cucina,ivo,griddle,pricelist,juventus,conroe,multipliers,aparthotel,kitesurfing,couplers,aftershaves,rehabilitate,patina,scansoft,quadra,sousa,phonology,dunkin,deat,plasmodium,bums,undersea,aretha,lts,boxster,staf,bcg,overexpression,vanadium,wilkerson,riverboat,voa,kohn,bgl,jiu,ipi,contl,ottumwa,gynecologic,unstoppable,pedometer,shortfalls,ksa,bookmarking,ingham,yoder,esu,vbs,barbershop,drinkware,idiosyncratic,googlebot,floppies,tashkent,foxboro,allstar,hervey,fes,kilowatt,evga,nikos,tance,varian,mops,coughlin,commutative,lansdowne,bcbg,syrah,affx,angiogenesis,nicosia,nematode,kegg,pkr,enso,administratively,tma,capa,ronaldo,leverages,cco,cancerous,banderas,gmane,vq,gabriela,secretory,mmx,pinehurst,nro,reassessment,ippp,chillers,elbert,sunil,yuki,periodicity,trypsin,bursary,dependability,overdraft,deirdre,colonia,mycoplasma,lesbains,adelphia,scribner,aro,activites,uaw,frankel,cacti,bugaboo,palmdale,aeration,kita,muscletech,watersport,paf,nxt,uscg,yitp,gibb,gener,nak,unm,zhong,chowder,expatriates,centerpieces,freaked,curbs,tdp,gruppensex,triphosphate,acronis,wcw,prostaglandin,completo,darwinports,abiword,hippocampal,atlassian,technik,vineland,commentaires,ters,stuttering,forcefully,depo,edinburg,kwanzaa,kzsu,mascots,harrisonburg,cadbury,scoble,aor,conundrum,bullard,aiff,comedic,apical,synoptic,miyazaki,beryllium,disinfectant,sentra,joi,jokers,wci,piglet,wildcards,tresor,sketchbook,bbd,halliday,manolo,tifton,repre,hendrickson,windhoek,lomond,atapi,hbh,eccles,ofa,dcu,spatula,intergenerational,epub,cates,featurette,gotcha,kindersley,drifter,cvsnt,ogy,lagerfeld,lewin,youve,unaids,larue,stardom,assad,glenview,brantford,kelis,nola,lxr,toastmasters,appr,recs,ranchi,exotics,articulating,jiffy,goodall,gconf,verkaufen,scalextric,ryobi,qname,immerse,farris,joinwelcome,cce,wittenberg,capone,mtp,busines,rebounding,usborne,hirsute,prelim,prepress,rop,militias,ttd,commodores,ecnext,dbf,goldsboro,ashburn,roslyn,neverland,coolio,lindbergh,freeciv,indice,vertebral,ectopic,abcs,lge,bnl,coulomb,minton,oban,restatement,wakeboard,unscheduled,dbc,visser,clipland,thermocouple,masala,clt,drw,rosas,rdram,mcclain,maki,rosenbaum,eagan,slv,sunburn,pleistocene,nips,sfi,canisters,kas,waddell,solvency,lynette,plainview,fielded,blowfish,zyprexa,altrincham,workin,afton,topologies,touts,pino,xelibri,lora,mendez,undelete,samuels,rajesh,soros,unjustified,nfo,crf,digitale,sitcoms,analogues,leukaemia,ukulele,paperboard,fied,cobain,trillian,offaly,girlie,ilcs,friggin,wq,davinci,oxon,expressionengine,bains,rse,callbacks,cdv,hannity,replicates,sidewinder,queueing,slugger,humidifiers,desai,watermarks,hingis,vacanze,onenote,montebello,streetcar,stoker,fulcrum,sadistic,cassiopeia,corwin,qut,martingale,saucony,winslet,criticizes,baytown,synchronizing,reclassification,woohoo,htl,caithness,takeaway,timeouts,reit,dietz,devo,morgage,koo,ducky,bola,mdb,multimodal,recenter,hematite,hensley,asterix,hokies,blumenthal,multinationals,aag,debs,playin,emeril,mcalester,adria,shipman,burzi,incinerator,muenchen,convening,unorthodox,fibroblast,gloryholes,carrick,immersive,darmowe,catagory,glob,cisplatin,rpa,fertiliser,nuova,halstead,voids,vig,reinvent,pender,bellied,oilfield,afrique,ream,mila,roundtrip,mpl,kickin,hiatt,droid,addenda,restorations,boll,knightley,worksite,lcg,typename,aris,isv,doctype,balinese,sportster,dence,lesbi,saversoftware,bursaries,cuny,cardiopulmonary,biologic,wanadoo,shiatsu,homewares,dpc,qk,schizophrenic,unplug,albergo,pressroom,gingrich,basra,greenbrier,superoxide,porcine,oldfield,wxdxh,luder,shim,manx,understatement,geda,tormented,immanuel,whistleblower,hopi,idd,gol,bayswater,lyne,epox,kennewick,subtree,inshore,ibd,hepnames,benn,kettler,clots,reducer,naturists,lvd,flonase,sympa,hinsdale,trav,spina,meatballs,underrepresented,bpl,etb,brane,tightness,tracklisting,horizonte,rgd,concatenation,suffixes,kilmer,cloverdale,barbera,seascape,amdt,linings,horseradish,telepharmacy,itasca,varbusiness,paulsen,cortina,ides,hazelnut,ashfield,chaco,reintegration,pampering,boland,airtime,surrealism,imi,eit,clamshell,tonk,luminance,ixtapa,gryphon,ecos,cair,rochas,farnsworth,synchronisation,suresh,minnow,bloor,gumbo,faqforum,kunal,jossey,rci,upa,melamine,wonwinglo,episodic,xcel,jurys,descendents,ezmlm,twikiaccesscontrol,tonos,lated,montero,divisive,soci,guia,gastonia,inappropriately,valentina,lubricating,itworld,deca,branford,kody,accruals,epitope,jdj,crenshaw,perlman,medallions,rokr,usg,microtel,rsx,graff,jcsg,fds,cooney,whittle,gmthttp,rayburn,etat,suppressant,hecht,sportsnation,sso,ccnp,reworked,etl,catapult,vries,procurve,cbot,elitist,convoluted,iberian,optoelectronics,mailscanner,kazakh,stimulator,schoolchildren,commweb,thornhill,tweezers,lani,ouvir,filetype,bearcats,fanclub,boehringer,brasileira,webservices,kinematic,chemie,inoue,unsupervised,norvegicus,copycat,orrin,snooping,hashem,telesyn,mcb,imple,dorms,elist,laminates,ingalls,checksums,tandberg,iirc,mackinnon,roddy,margolis,erotaste,pimps,mcdougall,smg,mpx,fhm,travelzoo,thermally,teleconferencing,albino,cargill,hyd,visualizing,mothercare,sprinter,isomorphic,pepperdine,cvc,mahon,conjugation,macally,anklets,impasse,disinformation,beavis,delicatessens,intensively,echocardiography,pav,amok,riddick,sexism,ordinates,gallaries,baldur,elon,beasty,arty,leukocyte,chau,cotter,peptidase,fsi,postmodernism,osm,squeaky,silicate,alcohols,zydeco,testi,trujillo,predictably,weider,shareholding,giordano,cardiomyopathy,aprilia,mcnabb,lenz,homeencarta,disconnection,scada,spacetime,trb,awol,espa,bionic,batista,bookshops,feynman,captioning,sibelius,obstetric,marigold,ostsee,martel,hcfa,ino,ctm,whi,typesetting,ervin,chroma,steinbeck,pusy,biblioteca,neutrophils,dunbartonshire,lollipop,brash,avl,opi,declaratory,corus,elph,naf,htp,hydrate,ubb,littlefield,neutrinos,aso,bric,subways,tui,leominster,ncsa,snipsnap,negativity,arcview,picasa,tortillas,awww,dara,ragga,innova,doorbell,ebc,sgl,unsettling,snps,explicito,phila,bugger,persson,embolism,iip,silverplate,lats,ovc,roebuck,sbp,lipton,starling,coreldraw,haney,globemedia,adrenalin,murphys,nicklaus,yardley,afghani,tst,hrd,haulers,energize,prohibitive,sydd,nida,barcodes,dlink,includ,orgie,macnn,danni,imaged,sprayers,lindberg,filesharing,calibrations,atorvastatin,teague,vantec,lattices,cucamonga,warne,derwent,hospitls,flintstones,rotisserie,orcs,scallop,biostar,computationally,jobseeker,siem,sunbathing,ronda,npg,cerritos,kaz,chard,pershing,clotting,zhi,programm,singlet,morningside,simm,egr,hackensack,taf,kinshasa,availablity,lrd,lugs,kiddies,cpsc,hebert,asta,gato,cimarron,crowell,fanart,nagin,gfi,collapsible,helsing,haringey,phu,stes,prophylactic,rosenfeld,cityscape,tradeoff,sask,instill,ypsilanti,lifes,imate,firestorm,homestay,inept,peet,shiseido,steves,sascha,reconstructing,okt,droplet,dhe,lakota,revises,ipt,macrae,parlay,bdt,woodville,xlarge,proform,gothamist,coexist,advisement,fulltime,macosx,metra,cyg,turtleneck,aquos,hcs,tsar,isbl,gigabytes,triangulation,burleigh,anarchism,stabilizers,gbic,ciba,activa,cgt,terrance,smoothies,orsay,belling,bnsf,opps,representational,kagome,snark,woodard,malignancy,makati,cbm,bwi,farah,sitewide,newfound,collider,candi,lgf,boylston,swi,rizzo,wristwatch,owensboro,papas,subscribes,lah,wining,cies,ganesh,castleton,zippers,decaf,emphasises,cbp,crx,shakur,rso,euroffice,roush,caloric,plaintext,ofm,daniele,nucleoside,xsi,buttercup,oakes,searle,shuppan,lanyards,cushman,admissibility,courtenay,aspartame,sleuth,trudy,neem,magix,cosh,aurangabad,golding,ethnography,yamaguchi,bhs,bulkhead,kain,abta,herzegowina,minas,paradiso,cityscapes,oit,replenishment,autobytel,kroger,dexamethasone,strunk,yoghurt,nationalists,tfs,definable,bruin,psychoanalytic,reserva,nasser,simp,zmailer,birthing,collinsville,dimer,powells,abebooks,stemware,landsat,peebles,dewar,docked,burp,radioisotopes,obstetricians,vinson,efx,naia,idb,fahey,multisync,worley,oms,kerri,arith,democratically,datasource,mcelroy,cze,shopgenie,udev,nicol,camara,degas,benassi,prefabricated,gastro,accessor,meteorites,notts,lipoproteins,attleboro,parenteral,biosystems,cerebrovascular,fsn,bahraini,actuaries,delicatessen,rng,marianna,creatas,kidderminster,waukegan,antifungal,promulgate,mvr,socorro,maximized,bde,dlx,erythromycin,dtg,nady,leibniz,flix,cusp,homers,crandall,holcomb,beaulieu,tct,abington,pointy,hamradio,meso,monmouthshire,danvers,tpl,baptisms,backprevious,carnaval,recompile,mainboards,fclose,melodias,cliquez,doberman,installshield,fasb,estas,htpc,stover,cerruti,brainerd,oxycodone,istituto,revs,maha,compressive,wombat,antenne,patek,zippy,neteller,odeon,sbir,backslash,townhome,victorville,amityville,arpa,trannys,goers,chipper,gulfstream,modulate,xserver,infosec,agt,underwired,ambiguities,khai,norepinephrine,kundalini,elkton,carcassonne,saygrace,appending,marathi,songbooks,islamists,recursos,newcomb,stampa,newscast,vtp,stockwell,nederlandse,outtakes,boos,lavie,fina,retinopathy,deportes,tremont,barrio,buggies,zacks,exercisable,speedup,holl,efc,cibc,ontological,thinkstock,flashbacks,kennett,dentures,eckerd,xetra,stg,reimbursable,informit,cdbg,yeltsin,nitrates,aeruginosa,rpath,archaeologist,mitotic,generalised,outliers,sug,frac,cowon,semifinal,deactivate,studie,kazakstan,sva,citesummary,kubota,chroot,falciparum,shifters,undetected,mepis,caries,microstructure,ringwood,pleaser,compuserve,disassembly,miter,propositional,javaworld,ssd,writeups,hoskins,buytop,frome,talkie,loy,exxonmobil,emeryville,gamepad,metazoa,kml,maul,taoiseach,siskiyou,censuses,offseason,scienze,shelved,etd,carryover,fagan,jada,wholeheartedly,polyps,avast,northport,inelastic,puebla,idps,warrenton,traffickers,neckline,aerodynamics,eto,satcodx,leviathan,dfg,classico,harvmac,wrinkled,minimising,bifurcation,kimi,npcs,astrazeneca,poetics,jef,miniseries,yesterdays,dcm,issa,toxicol,libdir,angolan,waynesboro,relayed,fcst,ulcerative,bgs,airlift,downlink,endothelium,suppresses,weinberger,appointee,darcs,hashes,nuff,anza,borehole,flt,htdig,hain,nodules,bowdoin,tunable,memcpy,ucp,panelist,opr,transsexuelle,mailroom,nijmegen,medalist,ryman,gmos,recessive,putas,abou,encrypting,enola,rippers,steyn,redefinition,infield,reformat,atchison,yangtze,zw,peels,preterm,mindfulness,hwnd,stances,synapses,hashing,gere,lrg,unmounted,armoires,archetypes,behemoth,stereophonics,obsessions,piosenek,mhp,thrower,prana,trike,bmps,distillery,estudios,ceredigion,funnier,rickard,disengagement,gratuita,gifting,lpga,esse,maglite,iodide,bakker,hariri,digitization,fistula,campaigners,kel,acca,lauri,rockwall,kellysearch,crawfish,tigi,symbolizes,liverishome,thay,ecuadorian,injectors,natick,mornington,booklist,centrist,inria,torbay,femur,methotrexate,landslides,separatist,jelinek,darwen,aung,outlooks,matrimonials,busybox,openview,lifeboat,hara,tuskegee,aly,ciprofloxacin,gul,reconfigure,ahn,instantiation,trw,spambayes,shelburne,programma,lbl,escalated,lucasarts,eastbound,grits,apoptotic,pulldown,redditch,trendnet,iupui,nsr,treehouse,payson,jaz,hedrick,lineman,streamlines,reengineering,cleaver,prodotti,inflight,tracksuit,polyphonics,skidmore,catia,overuse,mge,newsprint,visakhapatnam,miko,hemorrhoids,haulage,torrie,usergroup,poms,mostrar,convolution,endtime,maura,hefce,abbie,mfp,galician,golem,conifer,phenylalanine,wareham,nonpublic,henk,inversely,beebe,dancefloor,eyelet,immunologic,chengdu,beeswax,lanham,crosswalk,lecken,kitsch,scand,sweeteners,farnborough,jalandhar,publi,visioneer,sprints,reinhold,emptive,compa,hrk,faked,manilow,burnsville,banyan,opinionated,quirk,hnl,caterina,blinks,fiore,rationing,tellers,jrnl,waterborne,astron,nity,gree,tradeoffs,goldeneye,occuring,calientes,recomend,functor,trowbridge,niu,mmvi,obe,gyro,technews,shampoos,unfiltered,sabha,bundesliga,enix,communique,cantina,cafta,polyamide,selectmen,lncs,luge,necromancer,carcinomas,subcontinent,dodds,seaton,transcriptase,balmoral,specifier,subsidize,icl,galaxie,ldflags,hiya,nappies,crippling,xul,nti,aspherical,misheard,ecw,sundial,odom,flaky,schlesinger,kryptonite,typology,hydrangea,preamps,aesthetically,vrs,alvaro,htg,heston,ghia,sophomores,binh,allrefer,dcf,scarica,chorale,ooc,fredonia,tiaras,sdio,distr,dscp,cogeneration,flite,harddisk,kennedys,telefono,saleen,bosco,cyclase,dreamcatcher,csw,braddock,ethnically,wbt,morro,smurf,yeager,gelding,blurring,deva,fom,mastectomy,cassell,sarnia,jaundice,lastest,asterisks,nympho,jeffers,hyun,cooktop,fddi,aspergillus,agric,kdc,medics,mwh,photosite,gip,affirmations,variational,socializing,crankshaft,isls,mensaje,tagline,airframe,beater,preowned,dietetic,storedge,redacted,rittenhouse,stereotypical,klass,fpa,treks,victimization,parallax,zante,splices,imagenes,rete,akita,nonresidential,hellman,durex,robison,tof,lpd,seri,freetype,nexis,ldv,collegefuckfest,aiu,molloy,carcinogen,brs,catalyzed,heatwave,yv,spindles,herron,sita,watchtower,fabrizio,unmanaged,gtg,preteens,heme,renumbered,omr,cowell,hyip,crossbow,speciation,tfc,whidbey,betta,imt,emmet,jewelery,lumina,statistician,symmetries,observatories,bupropion,telligent,fungicide,aiptek,crosstalk,mello,deepsand,litas,haart,worx,coyne,adenovirus,hakim,countywide,gnucash,puree,stott,sdg,mandeville,portugese,maurizio,tachycardia,aja,eaa,warrick,cosine,veb,patong,ballina,summarise,accrington,rnas,haddon,xpc,swath,azeri,wta,ulf,kleen,cvm,meehan,jenifer,infiltrate,mapinfo,knightsbridge,renounce,jesper,blairsville,copilot,koontz,fma,northgate,phobias,metaframe,nutritionist,effector,bumsen,rcm,hairstyle,nesbitt,diuretics,cemetary,iap,discards,basie,discontinuous,iqbal,uncorrected,stillman,chloro,bighorn,heartbreaking,xxxvogue,leitrim,prg,justifications,gimmick,brasilia,recordin,abra,trn,zg,acrylics,recensione,fouled,wiretap,dvrs,vocs,moniker,scholes,sharpeners,calida,nse,calloway,tpicd,prods,hfc,ltda,snk,waypoints,nrm,underscored,herrick,starwars,smbs,unreported,phelan,guarani,tampon,easels,sxga,webform,artista,elkhorn,ventana,sublet,chiltern,antares,peaking,stichting,forall,menuitem,marshmallow,hawai,nfa,cals,seltzer,utep,homeostasis,swp,akamai,goodie,milkshake,thrasher,switchers,brussel,hartwell,aup,electrolytes,machu,unshaved,gor,ilya,maneuvering,gaby,softwood,ajay,croupier,hausa,compacts,similiar,elev,egos,rhinitis,dreamhack,aop,beastialty,whedon,microcontrollers,dreamhost,overcrowding,retractions,pinging,catheterization,holton,smears,jmd,melo,exons,mariachi,igi,bday,reseal,compositing,oskaloosa,coopers,psone,versione,storys,escher,hotfix,rmp,gaynor,biota,dossiers,arpt,winsor,hairdryers,axon,morrowind,puter,chubbyland,deflation,pdo,dreyfus,worsened,darlin,treme,reconstituted,aveda,legge,kasper,mugler,yorks,ddi,badlands,deploys,pols,internets,backstroke,resultados,spooner,musicmoz,toothbrushes,bugatti,abrahams,comentarios,brandywine,callaghan,diskettes,resonate,intellivision,castelle,advertises,fives,titusville,plas,royston,nace,digitaladvisor,adesso,geekbuddy,lipoic,hazelwood,gravatar,outfield,carcinogenesis,gdr,phenolic,incrementally,pqi,lenght,acompanhante,orm,terrapins,daria,vander,ccie,mathml,legalization,allendale,modernize,orl,gert,restarts,juris,brookside,streamer,rollei,accumulator,picchu,abril,crocus,zl,citizenry,accountemps,swenson,unfpa,ewido,centreville,alisa,kingsway,erlangen,offtopic,laundromat,redeemable,maxillofacial,slutsfree,glp,baumann,revolutionaries,chillin,cardomain,creamed,tarp,schering,aten,bikaner,chimpanzee,petco,flurries,rau,miki,meson,parathyroid,cmb,analgesia,nqa,theyre,elp,altera,jeddah,nannies,pawtucket,bimonthly,senna,wardrobes,surgically,nongovernmental,inge,rmdir,miso,itx,hydrostatic,attrib,cheaters,hagan,canlii,leong,koehler,clostridium,nerdy,mcnulty,megastores,imperatives,bpd,archetype,kkk,oren,halsey,artic,techworld,vnd,shamanism,numara,csx,reiserfs,roussillon,cheadle,crea,alcorn,ences,bowser,fizz,rationalize,karoo,unearth,biopsies,inconclusive,hookups,herrin,thermostats,canoscan,moldovan,jamiroquai,xerces,subclause,classname,makefiles,bettie,sheesh,birdwatching,speakeasy,harpers,hayashi,epitopes,drivel,blandford,foci,toppings,cantilever,biloba,pth,tweety,initializes,keck,fisica,macromolecular,eic,skagit,kimura,baca,pareto,lymphoid,apacer,forklifts,pvs,refuges,jal,habana,stateless,virtua,cerebellum,vtk,breville,statehood,dct,palgrave,bledsoe,insanely,inglese,aidable,bubblegum,aphex,wroclaw,rajkot,taxidermy,esubscribe,cartagena,juergen,itravel,pashmina,gustafson,jacqui,salim,barnum,anthropologists,glues,undercut,eci,cstv,watsonville,roaster,redbridge,hypertrophy,raza,duron,xserve,wobble,fergie,bohr,boilermakers,counterstrike,hinterland,sufi,milfcruiser,afdc,niggaz,housewarming,regenerative,corre,liquidators,clegg,bagless,bleachers,deodorants,bacteriophage,sheena,prez,brasileiros,transect,thumbshots,soloists,borges,sinusoidal,manpage,lazer,babys,crossovers,parsers,lsl,chuan,hauler,cataloguing,oralsex,storia,fotosearch,usfs,leappad,interesdting,headroom,fortnightly,yerba,kuta,clearfield,huggins,washoe,srg,stabilisation,sayers,publis,intangibles,tameside,summerville,uvm,whalen,kusadasi,hcp,flak,ual,cubed,yuck,concacaf,textbox,erythrocytes,dinky,divo,injunctive,honed,coincidentally,kolb,kruse,microm,portugues,pil,tht,deathmatch,publica,mde,pollination,ews,synchro,etobicoke,midori,chutney,jrs,naturopathic,dermatologist,thumbnailpost,casein,chillout,stefanie,chewable,direc,quintana,normals,villeneuve,scrum,everyman,lopes,eastland,footballers,xviewg,metropole,swarthmore,multicenter,fett,sagebrush,convenor,pco,proteome,warheads,radiologist,liao,westview,optus,medicinenet,hitches,britten,palettes,vma,depauw,gunman,agassi,panoz,uwb,movi,scanlon,nutri,mitra,guilders,filmpje,indexer,ofdm,ullman,coachella,localised,recom,downgraded,ncep,lalique,weill,jeez,varadero,chicco,athabasca,redd,azusa,unbuffered,phoning,rtty,spacey,fmla,albatron,breakpoints,sperma,aran,ciencias,mortage,legato,agarose,avoca,reservados,russellville,oneonta,badass,cfi,pesca,carvalho,nass,mainpage,mccord,kellie,allstars,darwinism,tariq,workarounds,omia,flannery,rediff,lecithin,okmulgee,lates,recertification,phosphorylated,fusing,nerc,avermedia,abuser,sevens,mukherjee,anatomic,watercooler,gatsby,litho,mischa,bangla,menard,rattling,artes,vacaville,teo,enermax,hypo,hadron,gosford,legalize,millbrook,epinephrine,transom,liebherr,mwc,biel,vcu,mils,oreal,picayune,rabanne,gorbachev,norelco,playset,massacration,frontman,garvin,autologous,wiretaps,duggan,jrc,chantelle,liddell,enraged,gir,adrien,blotter,jq,menubar,gagnon,sitters,rdc,jod,meteo,cept,bih,programing,humpback,fournier,alquiler,reprocessing,chaz,bartending,sshd,opodo,patiala,jaques,glc,fantastico,schiffer,preclinical,sfn,conklin,wheelers,deductive,cunard,pygmy,jewett,environnement,biddle,basu,tachometer,bks,nonproliferation,cacharel,elysees,orchestration,adipose,usu,freeservers,potting,uncomplicated,piaa,progs,ues,tobey,sife,wenzel,debi,baez,tana,gedcom,uvc,puccini,seca,ligation,deconstruction,inductance,topicparent,zanaflex,medicus,dmitri,reallocation,kalispell,haight,teleport,skylights,rehabilitative,swab,latimer,boombox,prorated,bbr,pansy,reassignment,hydrodynamic,confirmations,postulated,unlabeled,tosca,brentford,integrin,ranlib,differentiates,skelaxin,velo,multiprocessor,tabla,celluloid,identically,saddlery,whiteside,eurail,endicott,dingo,sessional,pagination,webtopiclist,infopop,accc,iie,burl,truncate,hightower,polygraph,allianz,digress,overseen,scg,thotlib,bluetake,cowes,mailorder,fetuses,lowndes,shr,childbearing,aaj,crayfish,minotaur,heist,mayne,repaint,asq,contr,zool,spastic,suprised,illuminati,piezoelectric,rfps,cutouts,ilc,vinton,enw,meir,tanita,tpr,subsidised,arcsec,wrestlemania,fhs,getter,mimics,watermarking,aftercare,coombs,wolfson,sefton,compu,bonaventure,appz,ecl,gview,temperatura,diastolic,defaulted,cesarean,dialling,rescinded,chitika,tsvn,discoloration,chelan,morel,iles,kashmiri,stacie,collages,enabler,ogo,mowbray,schuler,finlay,gezondheid,ylang,lufkin,tenge,acosta,turbotax,herbals,moderates,piotr,chairmanship,covad,comunidad,moores,hurghada,malformed,mks,seatbelt,dumbbell,chasers,hamer,sherwin,redissemination,stine,mcmullen,skopje,gpx,supplementing,lowrider,liaise,citric,opentype,jpmorgan,nitride,achievers,unbonded,cowen,subdir,rehearing,balmain,crissy,nake,wtp,scn,mendota,makoto,alloc,ultradev,viaggio,cig,scipy,depositary,redhill,caveman,nunez,starfire,whitlock,pelletier,lanark,yada,sandro,jervis,placemats,pathologic,darden,bunnyteens,gordo,otitis,ordinators,bma,leningrad,harkin,eatery,peony,economia,cytosolic,glycerin,tailings,shirtless,darla,rayman,boardhost,frontera,crumpler,hargreaves,mkportal,nucleon,pkc,dov,ndt,hideout,lrs,calcite,fpu,fts,spud,mang,nology,luiz,belden,lense,hendrick,publicati,unverified,untapped,vario,pmsa,recensioni,xq,tev,batty,briscoe,dwr,fingernails,ocarina,camus,mackinac,itis,saks,hahahaha,romenesko,croc,ftes,keyspan,aoe,reposted,cgs,moduli,mra,ery,payoffs,tpi,maywood,buchan,roberson,defrost,ecr,coleraine,arianna,biomarkers,consecutively,bongs,loox,idrc,pretzels,anmelden,vdd,underdeveloped,mktg,yancey,feta,peres,assemblyman,enforcer,suk,customarily,cillin,jett,bility,mingw,ltv,sarees,aaas,bloopers,framemaker,piscataway,cytoskeleton,wuhan,maximising,hoists,fichier,amitriptyline,sgr,scrubber,gratuites,reentry,playtex,communi,buisness,freepics,kbit,marmaris,logarithm,granola,inefficiencies,monocular,kankakee,tandy,ferrite,formato,gaysex,dbus,autorun,nivel,ayatollah,undifferentiated,flowershop,evp,vazquez,reaffirm,dynix,pictur,collette,oooo,dian,doxycycline,weblogging,cluttered,sportsmanship,relievers,hwa,vikram,booktopia,lampoon,airtight,firming,mrtg,shoreham,annular,hallmarks,sparking,anale,ikon,lanl,gfdl,commandline,usfws,adic,nns,pmd,rfd,ized,rsd,guardianfilms,gryffindor,ror,blogspot,thao,obsolescence,linguists,blogads,xinjiang,recode,onus,heinlein,oks,kimble,reservists,blaupunkt,statins,descendancy,obsoleted,phim,betacam,mlp,rearrangement,disulfide,myer,bypassed,onefit,interp,neutralizing,tirana,occupiers,kingpin,bnm,relaying,bga,amilo,overlord,daffodil,ukiah,devotionals,figueroa,imd,warenkorb,dfo,habib,archivos,lymphocytic,kala,deering,undetectable,infact,vermeil,silage,ejaculate,smithers,gaeilge,swr,goudy,inkl,bilge,texto,satb,prolactin,bejeweled,bastrop,sunbelt,chewy,paginas,decimation,coen,hypotension,stateful,pypy,busby,gaither,tta,patterning,rdp,cheep,ldr,denbighshire,wittgenstein,preexisting,coffeemaker,braveheart,pbr,ctt,ginsburg,superconductivity,eurostat,kyi,amygdala,corrie,lonestar,dueling,challengers,reshape,photoset,electrolytic,hasegawa,gainers,calidad,tinkerbell,aldara,poway,physiologic,optimality,riyal,hwn,dremel,cerebellar,dth,dancin,summarises,choy,heartwarming,unwin,strider,eastlake,hyp,cannonball,mathcad,skipton,patently,bitmaps,biopharmaceutical,analytically,sll,aramaic,bogged,incremented,homem,valorem,publicist,acb,muzik,tempera,recyclers,pillsbury,seach,intermediation,lacing,aggregating,soundboard,teapots,rif,neb,archivo,smartdisk,boho,titration,tschechien,sef,boney,oxidoreductase,lino,lcm,skimmer,mccullagh,gats,extrinsic,erlbaum,sketchy,gooseneck,bof,tiffin,pacer,battersea,noname,gung,asv,sasaki,outboards,owings,xue,tbi,interlaken,kampala,jcc,tentec,kilpatrick,pixmap,bitty,pge,dtmf,prosser,ojai,stethoscope,monotonic,ebookmall,perot,medien,kahuna,washroom,jacoby,neurotransmitter,intercity,broadview,micros,straus,flack,amortisation,pfu,tonite,vonnegut,distros,teething,subsector,mechanistic,orbis,flawlessly,lidar,frp,whatnot,tripartite,studebaker,cartographic,rwd,preconditions,gardenia,adland,miembro,irland,linwood,biotic,kowalski,marymount,zathura,highgate,fudforum,takeshi,taro,mpd,crowder,socialize,scunthorpe,deepwater,clickbank,ruleset,viscose,perso,novica,manhunt,pavers,elks,aalborg,occupier,lunchbox,euchre,proporta,mitosis,paychecks,bellaire,suitcases,postel,mdg,tutu,paisa,wbs,slidell,psb,vocab,mmhg,clocking,sks,hemorrhagic,plein,hitchens,fone,crores,classifiers,novosibirsk,greenwald,rtt,copacabana,videorecording,kickstart,biggie,neutralization,pvm,ksu,kph,pdl,preprocessing,particulates,skylark,llandudno,squirrelmail,oviedo,pauly,bromsgrove,starsky,prion,simfree,pennywise,grier,apd,diphosphate,lbj,interscan,pipers,tronic,surfside,tsunamis,dordogne,hotlinks,neely,jeri,proteasome,transl,goulburn,vtkusers,energizing,butane,stf,bluebonnet,htf,stmt,inked,novatech,iid,elektronik,maturities,nameserver,tomlin,jigsaws,distorting,kamikaze,quaid,juggernaut,gordonii,latrobe,bboard,consultancies,handley,gramercy,ccb,derrida,mgb,bioavailability,ucas,tdr,nochex,lilith,foreplay,waas,mccaffrey,privatized,uncovers,gargoyle,stockists,ostream,lenmar,mamiya,mildura,insn,bodega,hardworking,dockets,dedham,ered,stomping,kottayam,carle,eest,pondicherry,mpr,fiddling,panamanian,buyitnow,bungie,goya,superclass,categoria,buyback,uhh,gigolo,tmj,vangelis,kingwood,arn,dorling,maximization,wls,absenteeism,quantifiable,pion,sliver,leptin,sxsw,bummer,isometric,retraction,amboy,dunning,grinch,okeechobee,shouldnt,teeniefiles,gcj,whatcom,bbe,unb,sws,hydrocortisone,cerebrospinal,susana,rumba,bouchard,yesteryear,orthotics,spunk,superdrive,jolene,jalapeno,propellant,touchpad,raisers,mdma,confocal,jochen,caddo,dcl,expatica,bitstream,igo,bartenders,refilling,modell,keighley,rangefinder,nostdinc,oficial,lanparty,monza,sportfishing,rlc,exacerbate,beckwith,anemone,equivalently,duxbury,zhen,cordele,ebel,ninjas,milla,incase,mva,zinn,comercial,segfault,wisden,maingate,costner,powerpuff,gsfc,lycoming,regula,lastminute,winbook,talladega,optiplex,syrups,chiles,estimations,jaxx,cercla,slb,absolutly,guesswork,tradeshows,javascripts,irritant,warcry,optura,combinatorics,graceland,encino,disconnects,castello,monolith,mct,geos,hls,intrusions,glories,prelims,kanawha,yglesias,squibb,memset,edirol,mandala,alexey,homecare,dugan,calmodulin,ameritech,umar,timepieces,nonfarm,anklet,wsp,byrnes,determinism,addams,moeller,normality,wiesbaden,deflect,taoism,ikeda,chakras,samara,unsung,gargoyles,massaging,ajmer,lossy,mitogen,hurwitz,gulliver,bul,aerodrome,darkside,intensification,raya,ruger,rba,gennaio,seaford,ungarn,vincenzo,warszawa,dillinger,bandon,odell,riddim,perforation,cida,annika,uart,tryout,proxima,fst,lladro,parameterized,assfucking,manageability,crystalspace,pandas,choiceshirts,taa,servertime,fmii,nepean,tracklist,indio,tino,bernal,hbr,homogenous,policyholder,distributional,tidewater,ngfl,erlang,starz,follicular,grupos,oq,gonorrhea,blaqboard,listeria,afaik,lawmaker,datatypes,arie,flavorful,apu,fyrom,refunding,subcontracts,moissanite,finchley,mediates,polyacrylamide,bizzare,standish,conus,competences,jtag,compatability,millville,coches,biathlon,mico,moxie,biff,paulette,chania,suu,backspace,aways,fugue,dissonance,medicated,initio,bestality,hypothermia,carman,timberline,defenselink,sunfire,mckean,smithville,mtf,rebooting,storytellers,lamisil,morphing,chua,sevenoaks,haplotypes,fiskars,speer,lathes,refillable,yearbooks,engin,kyushu,tricycle,penne,amphetamines,systemworks,keele,afficher,trillium,nena,bulfinch,transients,hil,concedes,swot,howarth,andante,farmingdale,bitching,overtly,rateitall,tubulin,gmx,bannister,omer,humanoid,infringements,stylebox,tiredness,branden,panning,wasabi,morecambe,hawkesbury,cocksucker,sak,kilobytes,breather,slu,adjudicated,methylene,wholeness,gnue,gynecol,uas,nacogdoches,simcity,hummingbirds,garnier,kath,cppflags,educause,cotswolds,heifers,sephora,joao,tremblay,gynaecology,vertebrata,blackcomb,ffxi,ottomans,rodin,ecac,actu,nde,lockable,dslr,evaporator,antihistamines,uninstaller,airliner,bibdate,unwrapped,dumbass,brc,arrhythmias,netweaver,sateen,rtos,eip,moteur,fotopage,uhm,birr,autosomal,protec,purim,rhododendron,canadienne,profes,pjm,ddl,underlay,granule,setfont,cookin,gillett,rocklin,welland,ageless,nuernberg,bleep,emedia,regensburg,gama,xfree,sills,berwyn,howler,hardtop,carded,lipo,zandt,reformatted,internment,dominick,mahmood,avent,swaying,igloo,ambler,voyeurism,bachman,referential,hydrating,adaware,dewpt,repressor,galego,neilson,scorecards,newlines,arcana,aau,transworld,nmc,discoideum,wairarapa,fogerty,beit,heidegger,backhoe,leftists,quinnipiac,mannequin,malloy,enviroment,mako,anl,noyes,eprom,trashed,ryanair,betsey,rath,lobbies,silvertone,cupcakes,artest,netfilter,voldemort,oldenburg,bazooka,gerbera,cient,psg,mittal,camellia,pronouncements,fonseca,rescind,asps,asheron,mance,viggo,qar,hepatocellular,styrofoam,malfunctions,lindner,linc,salida,dunwoody,dioxins,shaq,epmi,excavator,adolescente,redcar,urac,oncolink,cartoonstock,cwm,bibb,gymnast,inexpensively,isystem,evol,nmda,hazen,davide,forceps,motherfucker,ccw,mainframes,sapulpa,costas,searcy,labelle,adjoint,mclennan,killa,lipscomb,monocytes,requestor,cyn,splint,digitech,mrnas,llamas,multifaceted,gamez,voorhees,boas,solvay,thorsten,yeo,terk,privatevoyeur,coolmax,rebooted,toskana,unidiff,radionuclides,tilburg,decoys,pariah,offerors,wmi,darnell,meaty,gages,zapata,supt,bartleby,vermeer,pinstripe,hemodialysis,artis,tov,amateursex,dailey,egret,cornhuskers,fontconfig,jordans,guildhall,hasselblad,piney,unbundled,kusastro,onclick,functioned,toca,houseware,kdebase,ysgol,griggs,nicd,mdp,umi,fullmetal,pappas,aransas,tacacs,movem,abundances,oulu,fractionation,cdb,blitzer,ruc,karte,cashflow,retouching,brattleboro,eprops,cya,ubud,fmri,infosys,displacements,jerez,dhc,ielts,fellas,mno,picturemate,unicorns,playroom,dandruff,albers,discworld,leaved,existance,unionists,bloodlines,follett,irn,ramsar,woodburn,efs,auk,lockergnome,oocytes,armadillo,bsr,captiva,rinehart,brom,tlp,gensat,filers,lle,retrievers,pacifier,thurmond,stroudsburg,dominik,vivek,nla,inmarsat,unprofessional,hydrographic,mcadams,wailea,nforce,scones,paediatrics,nzdt,ilog,finkelstein,candylist,appalachia,marist,musgrave,vakantie,varanasi,yushchenko,relativism,jardine,schuylkill,ericson,schweizer,stravinsky,keds,ananda,nsx,jud,tripwire,aves,rediscovered,headstone,depleting,junkyard,perma,copthorne,multitasking,distrib,byob,tunstall,hager,spearheaded,nacho,underlining,heshe,jcr,catalogued,rawlins,springville,differentially,powwows,tsui,inductor,chalabi,encephalopathy,grote,ebs,raipur,custodians,guardia,jlo,khalil,overstated,webtv,insulators,kass,weds,servizi,quicklink,qso,dumbest,prowler,loadings,epos,sizzle,desalination,copolymer,duplo,lawnmower,skf,nontraditional,piet,ghaziabad,dredged,vct,marcasite,kamp,scoliosis,arwen,artie,fifths,austell,fernie,carport,dubbing,weblist,maximo,bax,searls,scuk,uiuc,crustaceans,yorkville,wayback,gcg,ural,calibur,girona,haig,perk,zander,samir,freee,avia,developement,pptp,beac,urbanized,trentino,marzo,dfl,lpa,jiri,mccollum,affymetrix,bevan,ichiro,dtt,cofe,loyalist,verma,daybed,rimes,quimby,barone,thomasnet,koeln,endocrinol,evaporative,gwybodaeth,preshrunk,hezbollah,naga,mmu,februar,finalizing,printhead,blanton,zellweger,manhole,eroding,emap,searchgals,typewriters,tabasco,cpb,coffman,lsm,rhodesia,halpern,purebred,netapp,masochism,millington,bergamot,shutout,willson,chown,prosthetics,proms,zk,karol,underlines,mosh,bakelite,kirkby,intermountain,holtz,prensa,vegf,galesburg,lba,klondike,webstat,reeder,neoplastic,applesauce,fibreglass,kenji,gluon,feisty,hynes,clogging,nonverbal,etoile,orangeburg,ladybird,concat,milliken,byproduct,specializations,chaintech,swa,porterville,kbyte,bizwiz,congruent,boehm,selva,rainey,aphis,rfs,tarantula,egovernment,udf,snuggle,shang,batten,inop,lough,vigrx,trios,bvi,unallocated,nau,condiciones,wss,modi,componentartscstamp,dyk,maldon,xantrex,dlg,edx,karzai,navi,brockport,cort,softgels,engravers,wether,hangin,handicaps,associazione,khu,nfb,dohc,clu,capps,vijayawada,griffon,biologics,bluescript,instantiate,paperweight,dilation,izzy,bedspread,knudsen,jabberwacky,kiowa,overtones,gsr,faithfull,quezon,pragmatism,rct,usi,wiretapping,fabricate,exabyte,pitty,kcl,pendragon,opment,kva,meeker,bootlegs,jimbo,jarrow,mullin,gridsphere,activesync,macwarehouse,vela,wikiusername,hessen,eyelash,gob,antifreeze,beamer,feedblitz,harvick,clicker,immobilized,dalmatian,hemodynamic,reshaping,contessa,elc,stagecoach,googling,maxpreps,jessup,faisal,ruddy,magazzino,jippii,academe,fjord,flybase,alpena,psl,junebug,grissom,shiki,knockoff,kommentar,westpac,gosling,novosti,mendel,adtran,wasserman,transexuais,aslan,hoge,fouling,macfarlane,hideshow,trailhead,edg,bayshore,preprints,grs,duction,anesthetics,nalgene,iaf,khao,berhad,savedrop,magnifiers,chitty,goldwater,lesbiens,jumpin,payables,victimized,tabu,inactivated,respirators,ataxia,mssql,storylines,camaraderie,carpark,internetworking,gawk,planing,termini,avaliable,scho,buysafe,hds,iad,pleasantville,fabrications,wtd,loh,jamshedpur,denture,gaudi,bluefield,telesales,vpc,ppr,jetsons,protagonists,fjd,anoka,boliviano,curtiss,wagoner,storyboard,trol,rajiv,xfce,axons,dmso,immunotherapy,namorada,neva,zakynthos,weitz,quercus,nhhs,amara,microcosm,raia,bizarro,mehmet,christos,categorically,autoresponder,aad,adolfo,welwyn,nzlug,vci,catnip,whittington,sorel,boned,vittorio,seta,tomasz,annes,tonka,nath,toth,tomaso,ascap,livedoor,schlampen,altamonte,scotweb,pillowcases,medlineplus,ambiente,masterson,nlc,fibonacci,bridgeton,wmds,tyrrell,junky,ballasts,jbuilder,cnf,nagano,hardman,roadmate,interleaved,peirce,pusher,egm,thetford,rtm,gnostic,coreutils,uninstalling,heft,ambivalent,startpage,difranco,mmi,typist,estudio,seiu,moisturizers,cardiol,lamination,bibi,mof,carpe,scottie,blackrock,pons,fistful,somethings,itl,staffer,rhiannon,linspire,cornucopia,newsfactor,countering,worldpay,catan,almaty,appraise,runny,braunfels,reorg,icg,javax,sema,albumlist,heraklion,stressors,shg,collocation,mccauley,vesicle,stuffers,prego,ichat,lubricated,sinha,pharmacia,aggiungi,shakin,cyr,vce,vigilante,gauging,lipase,constabulary,biochim,epcot,cricketer,defibrillator,rcn,drooling,stoll,staines,tnd,adversarial,tbn,softwa,pbc,ptp,demonstrator,boingo,voyeurs,aoki,banerjee,hondo,hysteresis,workspaces,campion,lugano,mobilisation,pruitt,foals,aciphex,sculpt,iskin,soledad,bagpipes,devaluation,beastyality,segway,mineralization,grc,trafficked,stedman,gurl,mcginnis,dvips,klee,garber,wizardry,fervent,headrest,dermatol,chaperone,huygens,eurythmics,transboundary,reclassified,delusional,tosh,pimpin,husqvarna,faxpress,tinkering,unneeded,babar,pago,hussey,officeconnect,mickelson,leukocytes,wesnoth,hydride,npp,zondervan,pele,opeth,kottke,hometwat,ogm,mauna,kilns,bpi,kst,harbin,assemblers,karst,wada,selfless,gynecologists,enewsletters,willi,bip,nami,guestbooks,sharjah,aguirre,krug,dongs,drv,schoolers,kidnappers,lemmon,ilan,gnutella,deutsches,liquidator,evers,uniross,grassley,stowaway,brainer,organiza,cellog,channeled,tastings,deccan,aiaa,neurosciences,factorial,librarianship,texmacs,vocabularies,blasters,livable,tifa,nant,libjava,ramblers,counterproductive,catskill,environmentalism,ufs,gwalior,ubl,kilts,balenciaga,alamitos,newsburst,septum,animators,signifi,neoclassical,mediaeval,piezo,escudo,pineville,botanica,petter,adenine,fren,lysis,pastas,helicase,dredd,efinancialcareers,diehl,kiley,kwd,ihousing,yoruba,malformations,embarassed,alexia,checkup,commited,nanotube,becta,trados,portofino,lifesaving,danh,sctp,tayside,rani,playmobil,tualatin,razorbacks,ionized,perodua,trg,subst,cpap,molex,vitara,fostex,zmk,placental,parses,saic,newsmakers,dshield,homocysteine,juego,metamorphic,cld,otcbb,moet,rado,watchguard,sugarland,singularities,trophic,ekg,dacia,reversi,insemination,houma,quetzal,shoshone,linder,homing,highbury,eizo,podiatrists,conch,crossref,hda,poppins,chaim,cytotoxicity,xugana,weevil,integrations,clarkston,ritek,morgue,unpatched,kickers,referers,kitt,servizio,biosecurity,leviton,twl,etx,electrification,peninsular,juggle,yeshiva,sociologist,wsc,sartre,finitely,spect,kathie,ards,corny,brazilians,lundy,histocompatibility,woolwich,irp,handango,cosgrove,sulfuric,renderings,msh,trt,ldcs,lect,kollam,edgerton,bulleted,acupressure,thotbool,hiawatha,nhfb,ahps,operon,ugandan,paton,suspends,categorie,stratigraphy,howes,surfed,steins,babu,andrade,agarwal,ncd,surefire,cori,planetside,snorkelling,waterworks,luk,headlamps,anaesthetic,isomerase,fdisk,dunstable,awb,hendon,accreditations,doral,nta,macadamia,takin,marriot,bfs,disqualify,ttp,sixt,beazley,rashes,najaf,hwg,bukit,antiaging,psychol,dfe,bedingfield,equated,swig,lightscribe,unionist,lytham,clocked,duced,complementing,keycode,pennants,camas,eamon,zaurus,qnx,srx,delux,uli,grrl,bookie,boggling,skewers,richman,photodisc,oto,uav,cnhi,umberto,bautista,zooms,newsdesk,roadblocks,klum,goh,goebel,pou,homophobic,diamondback,foosball,rept,spurgeon,lumberjack,marv,epidermis,mobley,oktoberfest,photoshoot,rhinoplasty,peptic,bauman,tannins,psychotropic,tilley,malaya,hypothalamus,shostakovich,scherer,tsh,manipulator,calabasas,coromandel,pliner,timestamps,pango,edexcel,snc,nim,gwaith,breaststroke,oroville,mitsumi,ichi,mobius,deductibles,nikola,berrien,peacemaker,ilia,bookmarked,letterbox,halal,agl,noor,noll,filenet,freeland,kirsch,roadhouse,charted,microtubule,cubicles,blau,ladysmith,gatti,ection,switchable,mcminnville,hcm,interactives,altus,phospholipase,transformative,samuelson,completly,anhydrous,germplasm,gradzone,gdansk,jenner,parkin,unmoderated,wagers,beliefnet,hotbar,canis,ravioli,enrolments,walling,marblehead,dvt,cameltoes,ribosome,carnivals,srf,speedman,instrume,moffett,augustana,topsoil,latifah,isomers,pettit,lemans,telescoping,gamedesire,koha,balancer,picton,underhill,dinghies,chooser,argentinian,ahrq,apparels,timescales,cef,athenian,mcewan,sexshop,zermatt,mha,geert,bugging,trento,lyndhurst,nex,wdc,symbiotic,wds,dyslexic,nomic,tecnica,mmap,wishbone,mcad,prm,bashir,licenced,larissa,collab,squirter,infecting,penetrations,protea,argento,polyvinyl,ganglion,ruud,bunt,solgar,lipper,chimpanzees,jdo,testcases,tda,hamza,meeks,athol,centimeter,excreted,paros,azzaro,nappa,sirna,sexvideos,nonprescription,lyd,firework,crlf,localize,tablatures,jndi,vigorish,dcd,schulte,gioco,chested,universit,thrivent,jie,hydrothermal,smalley,hoke,ramen,coleoptera,intensifying,copyleft,llb,outfitted,khtml,chatterjee,adoptee,augusto,resnick,intersects,grandmaster,nusa,deadball,cksum,historiography,amistad,bellacor,trcdsembl,campagnolo,downgrades,sexbilder,scrapping,pdoc,haskins,bullhead,rhett,mimosa,wildfires,ellyn,hryvnia,halved,cfml,vatu,ecademy,dolore,shauna,multilink,funchal,ximian,bergamo,quarterfinals,hobbyist,reardon,homozygous,glyn,popset,torsten,puller,mathworks,namm,dena,mdksa,dcom,danskin,bexar,dinning,pfd,misfit,hamden,hardie,redfield,scotus,quotable,cranfield,asides,beacuse,musicstrands,kla,unternehmen,teg,roseland,pgbuildfarm,volo,zirconium,noelle,httpwww,agement,guan,tcf,opencube,shao,mears,rectification,omc,duisburg,pows,hsphere,entertai,keeler,highpoint,stratospheric,newegg,preeminent,nonparametric,mistral,percocet,zeroes,kth,divisor,wanderlust,ugc,cleat,decentralisation,shite,verna,immediacy,trak,swingin,eckert,casco,olivet,resi,bergeron,felonies,gasification,vibrio,animale,leda,artesia,casebook,nhc,gruppo,fotokasten,yaw,searing,detonation,gse,approximating,hollingsworth,obasanjo,pinewood,tangential,ridgway,headhunter,ero,sharkey,clwyd,bretton,bustier,apologizes,manoj,muskogee,pismo,resortquest,diskeeper,lathrop,pala,glebe,xterra,pml,seahorse,geneve,wpointer,softener,breaching,maelstrom,prioritizing,jsa,annunci,modelos,seraphim,raymarine,dodgeball,munity,assfuck,alopecia,singaporean,nowak,keyboarding,beachside,sparco,robeson,navbar,fsr,contribs,lineages,sumitomo,dermatologists,marbled,probleme,irv,blackmore,bothersome,draconian,troup,approver,pcgs,saville,srinivasan,poldek,perfor,articular,gwynn,trackball,asis,mansell,unf,werewolves,magazin,sible,vla,autocorrelation,waltrip,mombasa,schroder,alachua,hks,duns,ornl,cabrio,guanine,bridgetown,rhsa,luka,cpf,roadstar,creditcard,frf,michaela,willett,brews,baskin,hamel,zoids,semantically,cagliari,eggert,valkyrie,airlie,salas,gnomemeeting,benji,nent,cashew,unproven,myocardium,kap,gini,prek,cypher,paraiso,nightline,cursive,organises,hydrated,csk,schwanz,martinsburg,liguria,hsieh,forties,pgc,sayre,photosynthetic,pips,tongued,lifetips,walcott,cname,unapproved,emm,nematodes,jaclyn,kell,gremlins,bolero,togethers,dicom,paroxetine,vivien,gpr,bru,ilt,lished,tortola,mav,powertrain,telkom,immunized,nuneaton,fica,trulia,ricochet,kurosawa,aberrant,nld,ukr,wyandotte,odpm,pgk,dumber,ruptured,insoles,starlet,earner,kem,radiologists,polydor,nutraceuticals,zoomed,groupie,brinkmann,thrombin,aco,laminar,immunoglobulins,jamnagar,camber,vxi,colliery,incubators,procimagem,sweeties,landfall,seanad,intramurals,kwok,borderless,methyltransferase,suwannee,lgs,cjd,hyperlinked,birkenhead,torrevieja,purposefully,gutted,serveur,grr,morrell,ouachita,imran,slat,freeways,multithreaded,newlyweds,documentum,ebm,xiang,burnin,reelection,hales,rutter,uunet,vitreous,noord,centrelink,lempicka,iru,countable,dolomite,salvaged,soyuz,frick,lwp,afterglow,ferent,maes,mandi,secunderabad,millwork,sampo,takedown,colostrum,cfnm,judeo,wisc,lata,sexi,homies,tarmac,customisation,conservator,pipettes,goon,artefact,expository,complementarity,cosco,mercosur,tfm,benzodiazepines,mii,netmask,stalling,molnar,hmso,huw,aliso,decors,oldman,nuevos,acis,somthing,zabasearch,steuben,minicom,hausfrau,goldfields,rickey,minichamps,usagi,bisexuales,rothman,shana,srivastava,oemig,beefy,senha,pica,pucci,skits,shenyang,mussolini,kootenay,ethnology,donohue,cyc,childers,mahjongg,davao,tajik,codemasters,mydd,charade,arnhem,bobbin,istudy,rugrats,dancewear,mechanized,ject,mayes,canmore,reassigned,nnnn,crema,bursa,cfu,svm,riccardo,realvideo,lites,krall,centrifugation,welds,braunschweig,coptic,securityfocus,reorganisation,conglomerates,dehumidifiers,dumper,hamill,halston,iau,wfc,spiny,arezzo,mbeki,invisionfree,dropkick,elastomer,wahoo,anagram,fogdog,finnegan,gof,newsworthy,defs,sensitization,hyperactive,sidi,antenatal,elektro,nordsee,yuna,pluggable,hemophilia,kola,revitalizing,seepage,alitalia,orale,wri,ory,bcf,wooten,nonviolence,baume,berkman,ashdown,diciembre,purports,fcuk,shillong,mondial,brushless,technicolor,narragansett,barenaked,pandagon,rehabilitated,outdoorliving,expendable,ponca,tigard,soulmate,kaine,maxis,poppers,allposters,commercio,dods,tsl,volusia,iic,thm,elibrary,datebook,rapists,ultrasparc,seabed,orly,complicating,suzi,texturing,correspondences,groomsmen,avo,latour,manipur,arnett,suzhou,headboards,cil,palomino,kol,pomeranian,diptera,gericom,steiff,cordis,erythrocyte,myelin,fragility,drucken,reso,hov,tsukuba,kustom,invoiced,hannigan,hangul,montauk,modulators,irvington,tsang,brownian,mousepads,saml,archivists,herringbone,bodom,harrahs,daiwa,juanes,nids,moorcock,ccu,eyeliner,totalled,syp,woken,aphids,cutthroat,coincidental,lepidoptera,buda,tarrytown,vaseline,bluewater,strontium,burdick,crustal,hackman,shopnbc,aicpa,psal,albicans,seduces,epps,kroll,unambiguously,staley,cutbacks,hemet,ariana,pch,cgmp,mcas,multimeter,anubis,htr,analyte,peseta,enh,glitz,kewl,bidi,winsock,lvs,moldings,peltier,iod,ior,trackmania,ballets,doylestown,spaceflight,quicklist,proportionality,overruns,yadav,sordid,qpf,mentorship,lyx,tained,oligonucleotides,bbci,spidey,videotaped,regnow,jukeboxes,xpdf,portishead,irt,splunk,kommentare,citywire,crud,nev,febs,adu,ird,ribeiro,abrahamsson,epidemiol,coms,vdo,outro,pneumococcal,tilton,brookstone,apic,avenge,alleviating,sportif,inservice,punts,tives,sora,tgs,daugherty,yarrow,wakeup,meatloaf,mumford,datafile,buchen,zzzz,objectclass,polices,dogging,cursus,plasminogen,kinsella,lindgren,asymptotically,duce,wonderwall,crick,pvd,enveloped,mnfrs,caseiro,instabilities,muskoka,jeni,thalia,apac,reforestation,paradoxically,dren,dubbo,inductors,opin,symlinks,gamestracker,secam,gatorade,irm,cava,rupp,wacker,lanta,cres,yue,oligo,chairpersons,incesto,spca,zapper,materialized,accolade,memorized,squidoo,interpretative,roping,rauch,oxymoron,reciever,maryann,pentagram,viv,infusions,slvr,choppy,robotech,spb,servic,saya,univeristy,bahamian,gos,fwy,nocd,stipends,stirlingshire,caerphilly,riboflavin,fiu,kalb,ubiquity,vandal,romper,bitumen,nolo,shimizu,postpost,rummy,paleo,unrhyw,pinscher,constructively,sufjan,christiane,spliced,finca,gpf,iaa,iesg,brecon,kiran,trekearth,repeatability,gunning,byblos,tadpole,mitsui,storytime,berserk,wellman,cardiologist,jammin,leis,hirst,fellatio,ggc,terran,breadcrumbs,lorena,remaster,tpg,cifrada,curvy,envisage,boneca,basements,sharpton,crucially,lfn,imao,antonin,soundgarden,carrara,bron,decoupling,monroeville,environmentalist,msha,eastenders,adultfriendfinder,bein,stef,fpgas,mistreatment,rbl,qlogic,shona,sutcliffe,previousprevious,infective,estrella,gans,shards,vcds,acadian,kahului,phonetics,comittment,blix,biocompare,whimsy,frameset,kot,nyack,lolo,carboxylic,pkgconfig,dipartimento,traceback,svlug,microdermabrasion,waterbody,jeeps,tiverton,wundef,spay,gilmer,ceqa,bodog,followups,internat,biarritz,gurps,bessemer,iceman,pegged,liberator,rediscover,lovecraft,wavefront,bhangra,zuni,epm,meningococcal,ketone,glazer,yashica,geodesic,congruence,tenkaichi,omani,tenuous,reuter,surfactants,cohomology,epicenter,toke,dwf,santas,kutcher,christo,lucio,phenomenological,debriefing,miniskirts,ansmann,mfps,lentil,kannur,backer,albedo,flsa,pauli,mcewen,danner,angora,redstone,lxwxh,informacion,phyto,libpam,blo,cocky,pitchfork,stratocaster,mohegan,brazzaville,broussard,beano,interconnections,willa,toiletry,sats,beko,exchangeable,colm,arabe,stretchy,starburst,dzd,neurologist,leonards,kitties,dottie,rspb,fwrite,homicides,forde,ipf,travelpro,haemophilus,ronny,hubris,bottomline,kosova,neuropsychological,genitalia,waiving,swirls,dampers,comhairle,cheech,eigenvectors,extrapolated,chaining,defected,yurasov,gakkai,justia,campylobacter,northumbria,seidel,kenseth,pmr,kare,dumbo,holocene,jwin,superconductors,yeung,polygram,egon,distillate,unweighted,gramm,safeco,bentonville,ishikawa,vuv,strachan,bayard,escalator,periwinkle,breakin,rsmo,publishi,darmowy,outfile,choreographed,obrazki,accross,yag,gravesend,lovemaking,boucheron,farrow,annulment,kwai,tubbs,bartow,tonbridge,lesbico,panerai,spate,belladonna,lexi,sobering,carcinogenicity,djf,semis,pcv,suppressors,leachate,dingle,mbendi,celina,hydroponic,hoyer,xia,kovacs,recalculate,maltreatment,hitchin,medtronic,meerut,whsmith,fontsize,relaxes,kis,halos,cracow,saco,webcomics,ife,sauder,dioceses,uct,postdoc,biceps,leela,hydrant,hamstring,darrow,tinderbox,sify,naw,ganguly,streetwise,imprinting,dandenong,colecovision,gnuplot,nucleation,werbung,prb,blr,croce,deviance,goldfrapp,tetrahedron,materialize,homeworld,foodborne,baixar,stagg,fondness,ellicott,merchandiser,ler,djia,eastleigh,blacklisted,freetext,wxhxd,multiplicative,metis,urethra,dalrymple,retroactively,hartnett,gcd,kilos,multivitamin,vientiane,koji,scran,bwp,emoticon,mercator,lyricist,macromolecules,fungicides,amines,karcher,cssa,freetown,beneficially,tugrik,monotype,ishii,kempinski,pigmented,mipsel,ridership,athenaeum,twikiweb,mpm,faking,clsid,kenobi,endoplasmic,motorised,lomax,geraldton,eck,cssrule,auerbach,metlife,apocalyptica,masa,risotto,follicles,ashtabula,sussman,exmouth,melua,cvss,pana,stimulators,gnf,uvic,asustek,dieta,famvir,conflicted,retirements,sixers,metab,gregoire,burris,creat,rajan,brainwashed,berenstain,crittenden,antoni,gbs,associ,yankovic,gnvq,rogaine,kek,gridlock,integrable,chalkboard,dopod,unranked,karlsson,anaemia,natur,permian,bartley,unaffiliated,slrs,montreux,partici,starbuck,infractions,karon,treviso,backdrops,turkmen,standups,sowell,aktuelle,gleeson,lss,globulin,woah,nte,midob,violator,boxcar,sagan,aviso,pounder,vieira,kronor,tocopherol,keiko,newsrx,lesbe,pharmacokinetic,intercepts,tirelessly,adsorbed,ksh,plunkett,guenther,penta,phospholipid,reiterates,wuc,oversaw,arraylist,qy,outsourcer,eyeshadow,pushbutton,doujinshi,catagories,pilar,paltz,viaduct,pugster,elastomers,evenflo,mmk,wadi,secularism,cellspacing,trekker,llm,pakistanis,glyphs,neuroblastoma,loftus,gigli,thorp,seeley,producten,glandular,aligns,rejuvenate,grt,northants,ifconfig,sherrill,wintasks,xenia,whangarei,hra,expres,nadir,recoup,rnai,fyr,franchised,batchelor,relocatable,warhead,backfill,fascists,kedar,adjacency,iberostar,mancha,gorton,insta,jni,cellpadding,larnaca,carmarthen,endgame,streamlight,golan,thomann,totten,curbside,samhsa,howrah,planer,hermaphrodite,gavel,bassinets,footjoy,fairtrade,gah,prestwick,paoli,alben,laconia,berkowitz,inputting,dimming,indiatimes,arcgis,goof,landmine,boracay,appro,notifier,wirth,valerian,bucher,wts,saad,weisz,enrollee,authenticating,wheatland,zildjian,revisor,faauto,profs,pheonix,seitz,administrivia,foams,leh,orbitals,hammerhead,dotcom,xof,klezmer,fosgate,walworth,niguel,quickfind,isakmp,facia,stalemate,multimediacard,motrin,glx,classifies,ischia,ankh,mohali,incurs,feist,ldb,netzero,rationalization,eef,brokering,viewport,isas,masterbate,geneseo,grammer,garantie,sanofi,malignancies,yaesu,jpegs,spitz,chea,limassol,lobbied,splat,nostradamus,gallium,mobb,mannered,dorada,nalin,sorbet,lunenburg,phc,tdma,bodycare,jobsearch,sharia,topiary,cataloged,camsex,avm,kimber,extendable,ager,pella,optometrist,tinh,bogey,kana,pipette,bln,coveralls,teng,stayz,isolator,wicking,cph,zany,umatilla,austral,applauds,taks,interferometer,barbican,ohana,rebs,cerf,criminally,mkv,adio,psychopathology,lkr,leyton,cartoonists,appellees,indira,redraw,pictbridge,mahesh,beng,ncar,gord,nanometer,faceless,moyers,oregonian,aftershock,gena,leggett,wsdot,classique,menon,spiro,whiteboards,strategists,dnv,loti,kaos,hydrotherapy,marionette,islay,myv,typeof,igt,nitty,ddb,quintile,freightliner,monkees,lindley,dehumidifier,industrials,bouncers,transfered,mages,dmb,roseanne,chk,trigraphs,rer,bettis,cyberlink,browsable,workhorse,iterated,mcfly,kyd,pooping,preferentially,fraternities,diuretic,octubre,castell,emerg,sampras,gephardt,zimbabwean,unexpired,westmorland,biscotti,mavica,everyones,shaikh,nampa,youngblood,plana,refractor,bouldering,flemington,dysphagia,redesigning,milken,xsel,zooplankton,gsd,philatelic,modularity,parkview,keto,marrone,wallmounting,tias,marengo,quiche,epoc,resales,maduro,murrieta,fairplay,ddp,woodinville,registro,transcriber,notarized,neocons,franchisor,diab,vying,morehouse,lauper,bedspreads,pooch,morphism,gripper,tavistock,negated,javabeans,nashik,atomki,musicianship,viaggi,bbn,cady,adios,purview,bosque,xxxl,dyfed,biomaterials,overpass,berners,goaltender,speedometer,ultrium,carteret,fatwa,bottomed,superscript,rwandan,proteinase,coolermaster,maca,haircuts,crewneck,discriminant,bayfield,mishra,morey,multiplexers,pcga,stade,carnivore,codingsequence,knowledgealert,egalitarian,pombe,yamato,jenson,mortgagee,middlefield,iiyama,schell,midler,nags,caplan,anyplace,haridwar,sternberg,ventilating,retreating,shopsafe,mohave,brion,immun,zapf,mingus,prolly,trichy,microform,olsson,jdc,dosimetry,smelter,rayovac,takeda,mbt,ied,dynamism,fileattachment,rabat,devs,mellor,manmade,somaliland,hashtable,sdb,conto,furtado,statics,saleh,puja,kamera,eport,killian,rucksack,janette,powerware,phenylephrine,cupcake,karp,bodum,celular,zamora,qian,dws,psig,polycystic,titts,krzysztof,parsippany,raggedy,eason,epg,bsg,payloads,alon,cebit,wedgewood,daten,pbi,annexe,cyclen,customizations,stunningly,hugger,junio,jtc,xcd,prequel,strathmore,champloo,billerica,talley,estoppel,ameritrade,torr,cytomegalovirus,bpel,domus,madigan,supercool,ysl,contaminate,rxlist,sailormoon,ubid,plovdiv,mcsweeney,govideo,bassinet,taillights,typhimurium,dez,fci,visionaries,salesmen,nicki,skagen,hibernation,ponders,rrsp,middleburg,innkeepers,mcauliffe,gardasee,pcn,asce,aromatics,interplanetary,landcare,towneplace,downloaden,discontinuing,bork,sealers,weybridge,wusthof,interbank,hullabaloo,erratum,contreras,sandwell,novgorod,earbud,jds,coastlines,echolist,guntur,lmp,trunking,foxtrot,rosanna,patchouli,inequities,testes,defaulting,alpert,securitization,nsfw,borer,originators,postid,phx,censoring,hashimoto,oriole,chipotle,slocum,ipeople,rdg,reusing,saeed,wetzel,mensa,shiner,chal,rhesus,streptomyces,datagrams,invalidated,shenanigans,mkii,sandford,lennart,pract,npi,travelguide,championed,biosolids,billable,givers,tmdls,cockroaches,testcase,faraway,cfengine,umbc,underwritten,biofuels,cyberhome,dinh,zegna,tarps,sociologists,ellesmere,ostomy,vso,sena,ingest,gazebos,sirloin,cyclophosphamide,bitdefender,catz,bpp,giancarlo,kategorie,arjan,valery,kmc,insp,recomended,dataport,pfaff,manuale,rog,niven,mahi,ghs,atsdr,rangeland,commonality,xid,midis,cwc,regrettably,navidad,yahoogroups,kaw,ston,ves,pulau,playbook,digipak,jetblue,kavanagh,exhibitionists,armidale,arquette,copland,namib,cne,cheapflights,wyvern,lucene,muffled,vincennes,inlays,lockets,whitey,brin,wharfedale,guyanese,laryngeal,outfielder,nonattainment,softimage,cellgroupdata,literatura,myoplex,yorba,bct,pva,slapstick,cottrell,dialers,subculture,cmx,modded,skids,roselle,klub,marathons,tgt,skeet,toucan,masterclass,nnp,calcio,oxidizing,alo,kennebec,zj,intergalactic,biomolecular,cii,powweb,mcwilliams,phosphorous,photocopiers,obligor,matcher,listbox,voigt,fdl,dawley,scribus,lessors,npn,luminaries,karats,bridger,slm,hadronic,fairport,piecewise,recharging,dmm,unionville,intermedia,goetz,urinal,joystiq,grosso,sobaka,payphone,rockfish,duodenal,uninstalled,leiter,coworker,escuela,cyclades,longterm,taber,screenplays,gpt,shiites,ntop,farcry,jitsu,lactobacillus,uniontown,cloner,otaku,hoyas,kandahar,kerrville,akers,neuropsychology,multimap,allston,femininity,trask,accuweather,deferment,wam,fmp,portlets,glsa,westmont,waders,cellulare,homehome,frogger,hass,rya,seqres,hellfire,havering,montfort,chokes,eharmony,knowsley,bordellchat,cvsweb,houdini,umr,canarias,babyshambles,bridgette,cinque,drezner,hsin,alcan,stas,outlier,naira,neverending,masson,khanna,systeme,hillsong,camshaft,exotica,milburn,bijou,destdir,innervation,gga,oqo,cunha,reefer,techspot,hibernia,alpina,iarc,constraining,nym,dard,estefan,fuser,lepton,pergamon,wiktionary,razer,poznan,netscreen,manda,npv,xmb,kingstown,topix,batsman,wavelets,cogs,bigtitsroundasses,barnhart,scofield,ebrd,desorption,bellflower,watertight,stevia,photocopier,haverford,talc,penises,gwendolyn,buynow,nairn,prolab,lundberg,backordered,coh,mononuclear,unocal,brunson,greenlee,emer,txdot,prichard,conferees,renata,ternary,footballer,sisyphus,directfb,foolproof,chastain,lakshmi,dsb,megane,cdo,someones,rebelde,morrigan,mymovies,tiananmen,immunosuppressive,mcveigh,stylin,brower,mpltext,aibo,pdd,depositor,ofcourse,ecdl,redenvelope,acidophilus,deci,defensively,analytica,cnd,hrp,tnr,tryon,forgo,barca,pahrump,foros,pickabook,hellraiser,lithographs,educates,ediets,gopal,signers,digext,netbackup,dimensionality,triax,rnase,aman,angell,bochum,eyepieces,earbuds,americablog,makeovers,unprocessed,pfa,widctlpar,clausen,punbb,centra,monson,infogrames,azt,xalan,hydroxyl,medpix,interacted,gpi,polishes,canoga,numismatic,avoidable,brantley,adenoma,aah,prostaglandins,powercolor,beaconsfield,lakhs,mhd,lesbisch,flammability,truancy,jharkhand,channelweb,givn,flatiron,midlife,guerin,indianola,unavailability,rooter,wanaka,lompoc,widener,cll,kmail,websense,vmi,residencies,cablevision,pye,disrupts,onetime,kenzie,gating,boingboing,sevier,eberhard,chek,edr,kharagpur,fotze,cvp,deflated,infestations,judgmental,meiji,antipsychotic,uwm,infn,slaughterhouse,stix,asg,bagging,brainwashing,dmp,disconnecting,thera,mclellan,rong,telcos,wilmer,sphincter,orgys,newsom,infill,fairhaven,etude,stereotyping,talib,dreamstime,rearranging,geographies,tipp,programmatically,handicapper,plantar,ogaming,xss,academie,quarrying,approachable,sweetener,braised,knut,tibco,fseek,vided,burk,spigot,skilling,hunterdon,nailer,roxette,hepatocytes,coupes,universitet,mauricio,lov,hnd,roseburg,berlusconi,chloroplast,charing,kansai,buzzword,nepad,pistachio,arv,lanvin,riverbank,lilypond,predominately,metalware,saugus,nmac,giza,lancs,culpepper,rohm,pretzel,warping,twc,raitt,iyer,connotations,iiia,wilber,yardstick,neutrophil,supernatant,solu,segmental,multitudes,imperium,radley,supercharger,imagen,thicknesses,brk,spew,vestibular,klausner,riba,witten,orth,calaveras,naep,deceleration,bcn,consignee,aldehyde,pronged,baring,jacked,bigalow,gyd,centerfolds,ortofon,cropland,wnt,nazism,kingswood,operationally,trix,testicle,rioja,bhi,technolo,lindstrom,pinter,minox,wofford,guaifenesin,hup,bifida,stratigraphic,dundalk,snipers,kshirsagar,ridgecrest,placerville,gosport,sjc,ircd,rubrics,kerouac,ebx,harken,foc,cooperated,nwo,cano,kearny,shopinfo,tlb,etp,obie,greaves,versity,amoco,inzest,msdos,gabby,dumbbells,ncaaf,ximage,homotopy,ironwood,adiabatic,pend,licznik,cck,sabian,saxton,patties,hopkinton,biotherm,ethno,videochat,cantwell,accelerometer,filip,whl,productio,milli,pdi,bedava,penobscot,grav,llcs,fmr,pimsleur,micky,setcl,johnathan,alisha,gambier,enterta,crosley,usace,byrds,sgm,darrel,isola,laminator,krazy,diaryland,bhubaneshwar,quadrature,summerland,alessandra,gsn,dentry,catskills,tablecloths,herder,gec,cinematical,outfall,unzipped,plcc,osb,interchangeably,concurs,wef,deformations,farting,nonspecific,mek,ohhh,atopic,harker,culling,limon,murata,zealot,arca,jmc,toot,rino,sisley,iveco,gooey,bielefeld,parrott,veillard,lisinopril,nprm,tookie,shanti,burkett,wemon,turmeric,carnelian,zea,geom,dorman,hmac,abstracting,parietal,glyphosate,underpants,appleseed,mandating,prequalification,macross,kondo,muzi,bidet,grubb,redif,oam,domenici,transdermal,abramson,recreating,snot,ductile,dimensionless,carex,contractually,kippur,fibroids,courtyards,calderon,dogster,flattening,sterilized,pkcs,unformatted,cvr,insulate,afd,tuolumne,cobblestone,showplace,stockpiles,mandir,autore,ashish,meijer,camberley,babson,fiennes,meteorologist,colonoscopy,lofi,tryp,duromine,alkaloids,quesnel,ake,initrd,centrality,pisses,campaigned,twinning,imag,taster,greenlight,musicbrainz,sourdough,warrantless,mzm,croat,arbors,canwest,homedics,anydvd,jnr,odm,dnn,ashtrays,punters,dropper,sarkar,szabo,wack,ecx,fette,axl,yoy,spyro,kendo,surinam,suze,xenophobia,krypton,heisenberg,dvcam,nary,ninn,csis,reconfigurable,smil,courchevel,kittie,lipman,doz,bsl,chucky,schlampe,webdev,doubleclick,bushman,pornofilm,ood,conexant,hydroxylase,rme,multipass,woodwinds,telefoon,ricotta,motorways,gandhinagar,nsg,edelweiss,frampton,humidor,vacationing,naturalizer,dinesh,techassist,airdrie,schiphol,bruner,tangy,cfe,gurnee,bogdan,farina,gant,cokin,tricity,cutaway,artsy,severability,transferor,cliches,nosferatu,indycar,klimt,onetouch,dooney,oconee,smartbargains,prl,sackville,camberwell,hotlines,hazelton,nlg,reaffirms,anleitung,webalizer,libboost,golds,pfs,imei,corante,recipesource,ranching,seguin,calderdale,anzeige,toothpick,volser,westcoast,forwarders,aab,likable,ashburton,natrol,sonstiges,shoestring,vsx,hosa,brads,winsite,whirling,doghouse,displaytime,bda,ranitidine,elit,grebe,standup,playgirl,flexion,ibex,geomagnetic,lowestoft,blobs,footers,reiss,lewistown,droppings,designator,causative,brt,woolrich,gwasanaethau,keefe,tfp,loveseat,diethylpropion,karyn,handedly,uncontested,fov,doxorubicin,nerja,cardiologists,militarily,fsus,inflating,sputnik,barometric,joburg,assertequals,gladwell,regrowth,lusaka,lampwork,adultos,cybersex,banca,doughnut,martz,cribbage,mela,rondo,tigr,personel,wcpo,activ,uiconstraints,typescript,inetd,scuola,piste,pppd,enos,ondemand,altamont,steubenville,rur,danielson,barfly,vegetarianism,extractors,dictaphone,callsign,martinis,envisions,flexibly,nakd,natwest,wilsons,ccn,reposition,msci,orginal,hobbyists,anat,fleshbot,weta,sindh,pcf,glick,obsoletes,mammogram,sani,webcasting,soggy,apha,ecologist,ararat,narrowband,bph,webstore,maus,reinstalling,gendered,relateddiagram,kingsland,ssid,rackets,litigants,shimon,ducted,ebsq,crisps,modelle,wristwatches,xenadrine,linac,identifications,dressy,authenticator,arash,cristobal,stewie,depositories,pcre,setpoint,rockdale,evita,ballmer,hemphill,taormina,plath,pickers,boardgamegeek,serbo,oci,noviembre,mappoint,surn,minisd,madmums,mosher,digitallife,grahame,forecasters,linoleum,shearling,stockster,firstcall,dorint,wmc,culverts,cuticle,codebase,rdfs,lter,pimples,hdb,shorted,loghi,spunky,razz,komatsu,bietet,madisonville,readies,jovenes,deuterium,totalitarianism,trigonometric,selmer,popcap,verbosity,aashto,pavarotti,syncing,vanden,majeure,beret,fallbrook,audiovideo,muay,longshot,rollaway,yor,nonstandard,tbr,manoa,laundries,whoo,tefal,tothe,crv,amx,falign,goleta,holst,ebola,redbook,rangel,consolidates,disaggregated,chromatographic,supersport,golly,flumotion,seagrass,congratulates,anais,grievant,reinstalled,entreprises,clemons,eurovision,airplus,panchkula,shahid,phospholipids,elsinore,opendocument,ankeny,canzoni,wakeman,moana,wobbly,seagulls,megawatts,denning,temas,illuminator,marylebone,symbolically,erotico,linx,randle,nhu,unsubstantiated,centroid,monogrammed,gambian,tailgating,colville,vpu,russische,sgp,soccernet,zing,downunder,snips,allawi,lockup,cholinergic,lhr,barthelemy,babymint,benning,implantable,ligo,haddad,univariate,katia,motorcross,sangha,shn,myfonts,usuarios,caml,resiliency,barossa,astrobiology,disinfectants,kawai,uktv,dreamtime,berkshires,inhumane,trobe,unlocks,auctex,pogues,panicked,developerworks,bullitt,toed,smartcard,kushner,hardcoresex,crump,gunderson,paramus,cepr,lma,politica,randomization,rinsing,reschedule,tob,hostal,preempt,resold,cyclo,phosphor,frontenac,wipeout,mambots,unscented,ipfw,ergonomically,roosters,homologues,loring,ionosphere,belvidere,trotsky,airworthiness,sistemas,devsource,retroviral,llnl,keyloggers,amgen,marci,willey,yau,groucho,foreshore,gusset,dissapointed,dtds,mibs,metalwork,refering,punting,triphasil,scab,bhavnagar,creedence,musee,wellstone,lleol,gpib,tidbit,allyson,teriyaki,impoundment,interrelationships,gres,coffeecup,maru,joon,josephus,ulong,maputo,chev,krispy,dogtown,abernathy,raz,fermion,weltweit,fluor,bergstrom,inoperable,esrc,asdf,gollum,ceus,macintyre,srd,cyclonic,cft,unsubscribing,shawna,pinyin,ipac,ramone,fethiye,multipath,hakusho,tein,treeview,atd,wonderswan,eugenics,dustjacket,emmanuelle,dlocaledir,molotov,sandpaper,hbc,fannin,interscope,eba,melayu,hardiness,liss,phew,furuno,moynihan,johnsons,heng,dro,carbonated,waives,wraparound,jfs,ejackulation,reboots,headliner,sqr,bustin,powernetworker,vul,superposition,supremes,insite,fanzine,laney,purportedly,antigenic,rurouni,dietetics,assembles,veracruz,hausfrauen,wsf,benzo,vietcong,chairwoman,petrochemicals,pata,cntr,nettime,techies,bentyxxo,xango,radish,gatto,checkmate,gantt,valli,tuv,starlets,plavix,roomba,aficionado,motivator,bijan,riv,storrs,tabula,reigate,emmons,sandstorm,laci,taoist,nameplate,axp,wcb,mothering,billard,chrysanthemum,reconstructions,innodb,sunspot,aisha,fluorine,healdsburg,retype,fishin,likud,cyberread,pme,rothwell,kmf,creationist,wth,setlist,scrollbars,bocelli,zuckerman,vtd,ampicillin,arcy,wasn,cowbell,rater,everson,angebot,cezanne,tamagotchi,earpiece,franca,thymidine,disa,gearlog,tranche,volum,prsp,openvpn,mcentire,londra,kaur,unconstrained,datadirect,souter,redfern,tulum,nyy,pagesize,osteopathy,stavanger,cated,autry,fip,rooftops,findpage,discourages,benitez,boater,shackleton,weirdo,congresswoman,dalek,tass,itrip,myob,helloween,reperfusion,fieldhouse,manukau,libname,eucharistic,mong,homeware,ckt,winmx,mobic,farts,rourke,lackawanna,villiers,comercio,huy,brooksville,falwell,gwb,donwload,wrth,attrs,knockoffs,esm,bionicle,hygienist,nichole,quidditch,dartmoor,rowlett,stapled,gardenweb,butternut,nummer,groban,asw,arora,yatsura,warr,hainan,esg,logoff,cockroach,xanadu,computable,occup,playgroup,tintin,ethnicities,webposition,crafter,roby,disassemble,boltzmann,caos,abidjan,anise,grainy,hospitalizations,notizie,zoek,sepultura,walkabout,pepperoni,optimising,cityreview,boathouse,katt,weissman,siri,herkimer,namecite,refreshingly,aph,ryland,sculptural,neurophysiology,gsk,hermanus,mocldy,ngage,annexure,ipchains,yosef,tlds,gozo,pso,helton,outflows,saas,asthmatic,guillemot,realizations,linguistically,jaco,mckinsey,dezember,hylafax,reconstitution,amateurwebcam,lumberton,interviewee,intereco,portola,hematologic,sgc,rebbe,pinup,transcendence,surah,brendon,farberware,statisticians,swatches,perioperative,maoist,henkel,lilangeni,trapeze,lemmings,extents,spams,omagh,workcentre,sunbird,cellophane,deland,blevins,sacha,cardholders,dddd,accessori,qo,araujo,mylist,pcu,kloczek,enet,seperated,clusty,rolfe,cuttack,provantage,dominio,hyperbaric,nannofossil,logansport,bulldozer,blacksonblondes,subprime,overpayments,sharpie,modutils,whitehaven,whaley,currier,taproot,topsite,delorme,rayner,aio,rossum,urbanism,colloquia,ewr,capillaries,mountainside,menthol,blackouts,starkey,eves,hpux,canby,dragonflies,montrail,findfont,aigner,urusei,soundblaster,beatle,webzine,propranolol,inescapable,swabs,absorbance,lbw,audiofile,simba,mohd,redgoldfish,cornbread,jcaho,appendixes,aod,crestview,keynotes,fotolia,subnets,cau,espanola,busnes,froggy,decarboxylase,elfman,throughs,prioritise,oreck,schottland,bagpipe,terns,erythematosus,ftrs,excitatory,mcevoy,fujita,niagra,yq,dribble,hardwired,hosta,grambling,exten,seeger,ringgold,sondheim,interconnecting,inkjets,ebv,underpinnings,lazar,laxatives,mythos,soname,colloid,hiked,defrag,zanesville,oxidant,umbra,poppin,trebuchet,pyrite,partido,drunks,submitters,branes,mahdi,agoura,manchesteronline,blunkett,lapd,kidder,hotkey,tirupur,parkville,crediting,tmo'
mit
google/gps_building_blocks
py/gps_building_blocks/ml/statistical_inference/data_preparation.py
1
28872
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Module containing the InferenceData class.""" import copy import functools import operator from typing import Iterable, Iterator, List, Optional, Text, Tuple, Union import warnings import numpy as np import pandas as pd from sklearn import model_selection from sklearn import preprocessing from gps_building_blocks.ml.preprocessing import vif class InferenceDataError(Exception): pass class InferenceDataWarning(Warning): pass class MissingValueError(InferenceDataError): pass class MissingValueWarning(InferenceDataWarning): pass class CategoricalCovariateError(InferenceDataError): pass class CategoricalCovariateWarning(InferenceDataWarning): pass class ControlVariableError(InferenceDataError): pass class ControlVariableWarning(InferenceDataWarning): pass class CollinearityError(InferenceDataError): pass class CollinearityWarning(InferenceDataWarning): pass class LowVarianceError(InferenceDataError): pass class LowVarianceWarning(InferenceDataWarning): pass # Force custom Warnings to emitted all the time, not only once. warnings.simplefilter('always', InferenceDataWarning) class InferenceData(): """Data container to be used in a statistical inference analysis. The InferenceData container breaks down the data prepartion for statistical inference analysis into four parts. For any of these part, the InferenceData container provides internal `checks` and `methods` to address these. These parts are: * Missing Values Missing information should be addressed as most models will not work with missing values. * Identify categorical variables and one-hot encode Check if categorical columns exist in the data. If they are meant to be covariates in the model, these columns should be one-hot encoded into dummies. * Controlling for External Factors These are elements that are adding noise to the signal, for example when comparing two different ads performance, you want to control for the different targeting. It’s not always possible to control for external factors, as some of them may not be measurable or representable in the data. * Identify columns that carry little or no information (low-variance) Features that carry little to no information. These should be flagged and potentially remove before modelling phase. * Addressing Collinearity Highly correlated features may confund the result a statistical inference analysis. These should be identified and potentially removed before the modelling phase. During the data preparation pipeline, if the above will not be addressed, `InferenceDataError` will be risen or `InferenceDataWarning` if you choose to ignore these messages. These are the current available `methods` in the DataInference container to address the above parts: Missing Values * impute_missing_value Controlling for External Factors * fixed_effect Columns with Low Variance * fix_low_variance Checking and addressing collinearity with VIF * address_collinearity_with_vif # TODO(): Add list of current available methods for each part. Typical usage example: # Your experiment data some_data = pd.DataFrame( data=[[0.0, 1.0, 'a', 10.0], [0.0, 1.0, 'b', 10.0], [1.0, 1.0, 'c', 5.00], [1.0, 0.0, 'd', 0.00]], columns=['control', 'variable_1', 'variable_2', outcome']) data = inference.InferenceData( initial_data=some_data, target_column='outcome') data.encode_categorical_covariates( covariate_columns_to_encode=['variable_2'] ) data.control_with_fixed_effect( ['control'], strategy='quick', min_frequency=2) data.address_low_variance() data.address_collinearity_with_vif() data.data_check(raise_on_error=True) Attributes: initial_data: The initial DataFrame with control variables and features provided when the object is initialized. target_column: The name of the column in the `initial_data` to be used as target in your analysis. This can be binary or boolean. data: Latest version of the data after any transformation is applied. If no transformation was applied, it will be exactly the same as `initial_data`. """ def __init__( self, initial_data: pd.DataFrame, target_column: Optional[str] = None) -> None: """Initializes the Inference Data. Args: initial_data: The initial DataFrame with control variables and features provided when the object is initialized. target_column: The name of the column in the `initial_data` to be used as target in your analysis. This can be binary or boolean. Raises: KeyError: if the target_column is missing from the initial_data provided. """ self.initial_data = initial_data self.data = initial_data.copy() self.target_column = target_column self._has_control_factors = False self._checked_low_variance = False self._checked_collinearity = False if target_column and target_column not in initial_data: raise KeyError('Target "{target_column}" not in data.') self._check_missing_values(raise_on_error=False) def data_check(self, raise_on_error: bool = True) -> None: """Verify data integrity. Will perform the data checks in the following order: 1) Check for missing values. 2) Check that external factors are included and accounted for. 3) Check for low-variance and constants. 4) Check that Collinearity has been verified addressed. Args: raise_on_error: Weather to raise an exception if a problem if found with one of the above checks in the latest transformation of the data. If set to False, the integrity checks may emit InferenceDataWarning warnings. Raises: MissingValueError: If the latest transformation of the data has columns with missing values. ControlVariableError: If the latest transformation of the data hasn't gone thorugh a method to control for external factors. """ self._check_missing_values(raise_on_error) self._check_control(raise_on_error) self._check_low_variance(raise_on_error) self._check_collinearity(raise_on_error) def _check_missing_values(self, raise_on_error: bool = True) -> None: """Verifies if data have no missing values.""" missing_percentage = self.data.isnull().mean() * 100 missing_percentage = missing_percentage[missing_percentage != 0] if not missing_percentage.empty: missing = '; '.join(f'{name}: {percentage:.2f}%' for name, percentage in missing_percentage.to_dict().items()) message = f'The data has the following missing values ({missing})' if raise_on_error: raise MissingValueError(message) warnings.warn(MissingValueWarning(message)) def impute_missing_values(self, strategy: str = 'mean') -> pd.DataFrame: """Imputes any missing value with their mean or median. Replaces the missing values with their `mean` or `median`. If more complex operations are needed to impute missing values, these needs be executed in the initial data before creating the InferenceData object. Args: strategy: If strategy is 'mean', will replace the missing values with their means. For any other values, the 'median' will be used. Returns: Latest version of the data after missing value imputation is applied. """ if strategy == 'mean': impute_values = self.data.mean() else: impute_values = self.data.median() self.data = self.data.fillna(impute_values) return self.data def _check_categorical_covariates(self, raise_on_error: bool = True) -> None: """Checks if data have any categorical covariates.""" covariates = self.data.drop(columns=self._control_columns) categorical_columns = covariates.select_dtypes( include='object').columns.to_list() if categorical_columns: categorical_columns = ' , '.join(categorical_columns) message = (f'These are the categorical covariate columns in the data: ' f'[{categorical_columns}]. Use `encode_categorical_covariates`' ' to one-hot encode these columns before moving further.') if raise_on_error: raise CategoricalCovariateError(message) else: warnings.warn(CategoricalCovariateWarning(message)) def encode_categorical_covariates( self, columns: List[Text]) -> pd.DataFrame: """One-hot encode model covariates that are categorical. The control columns can be categorical because it will only be used for demeaning and removed before model function is applied to data. Covariates and Target columns must be all numeric for model function to work properly. Args: columns: List of covariate column names that will be transformed using one-hot encoding. Returns: Latest version of the data after one-hot encoding applied. """ self.data = pd.get_dummies( self.data, columns=columns, dtype=int) return self.data def discretize_numeric_covariate( self, covariate_name: str, equal_sized_bins: bool = False, bins: int = 4, numeric: bool = False): """Transforms a continuous variable into a set bins. This useful for segmenting continuous variables to a categorical variable. For example when converting ages to groups of age ranges. Args: covariate_name: Name of the column to transform. equal_sized_bins: Whether you want to create bins with equal number of observations (when set to `True`) or segmenting in equal interval looking at the values range (when set to `False`). bins: Number of bins to create. numeric: Whether the results of the transformation should be an integer or a one-hot-encoding representation of the categorical variables generated. Returning a numeric could be convenient as it would preserve the "natural" ordering of the variable. For example for age ranges, with "16-25" encoded as `1` and "26-35" encoded as `2` would preserve the ordering which would be lost otherwise in a one-hot encoding. Returns: Latest version of the data the the selected covariate transformed. """ cut_kwargs = {'labels': False if numeric else None, 'duplicates': 'drop'} if equal_sized_bins: buckets = pd.qcut(self.data[covariate_name], q=bins, **cut_kwargs) else: buckets = pd.cut(self.data[covariate_name], bins=bins, **cut_kwargs) self.data[covariate_name] = buckets if not numeric: self.data = pd.get_dummies( self.data, columns=[covariate_name], prefix=covariate_name) return self.data def _check_control(self, raise_on_error: bool = True) -> None: """Verifies if data is controlling for external variables.""" if not self._has_control_factors: message = ('The data is not controlling for external factors. Consider ' 'using `fixed_effect` indicating the columns to use as control' 'for external factors.') if raise_on_error: raise ControlVariableError(message) else: warnings.warn(ControlVariableWarning(message)) def control_with_fixed_effect( self, control_columns: Iterable[str], strategy: str = 'quick', min_frequency: int = 2 ) -> pd.DataFrame: """Control external categorical variables with Fixed Effect methodology. Fixed effects mitigate the confounding factors and help restore the underlying signal. Fixed Effects is widely used to estimate causal effects using observational data. It is designed to control for differences across individuals and/or time which could confound estimation of the variable of interest on an outcome variable. Originally, Fixed Effect model are implemented using Least Squares Dummy Variable model (LSDV), which essentially uses a dummy variable for each fixed effect. This option is available setting `strategy = 'dummy'`. When the number of fixed effects is large it is easy to incur in memory issues and some model may struggle to handle a very highly dimensional space. We can transform the data de-meaning each fixed effect, subtracting the fixed effect group mean and adding back the overall mean. Mundlak (1978)[1] has shown that this efficient fixed effects implementation is equivalent to a LSDV approach. You can use this efficient transformation setting the parameter `strategy = 'quick'`. To avoid overfitting and recover the underlying signals, rare or infrequent fixed effect group should be removed from the study. You can choose the minimum frequency a fixed effect group should have using the `min_frequency` argument. Default value is set to `2`, meaning groups with only one occurrence will be removed. Make sure your control variables are categorical as any infrequent combination will be removed. [1] https://econpapers.repec.org/article/ecmemetrp/v_3a46_3ay_3a1978_3ai_3a1_3ap_3a69-85.htm Args: control_columns: List of columns you want to use as control for you experiment. strategy: Options between 'quick' or 'dummy' strategy to apply fixed effect transformation to your data. min_frequency: Minimum frequency for a fixed effect group to be retain in the data. If `min_frequency=2`, every fixed effect group with only one observation will be removed from the data. Returns: Latest version of the data after fixed effect has been applied. When strategy is set to `quick`, the control columns will be appended to the `data` index. Raises: NotImplementedError: Currently, only the 'quick' strategy is available. Setting `strategy` to any other value will raise this exception. """ if strategy != 'quick': raise NotImplementedError( "Only 'quick' fixed effect is currently implemented.") self._control_columns = control_columns self._check_categorical_covariates() self._fixed_effect_group_id = functools.reduce( operator.add, self.data[control_columns].astype(str).values.T) frequency_mask = pd.Series(self._fixed_effect_group_id) frequency_mask = frequency_mask.groupby( self._fixed_effect_group_id).transform('size').values frequency_mask = frequency_mask >= min_frequency self.data = self.data.loc[frequency_mask] self._fixed_effect_group_id = self._fixed_effect_group_id[frequency_mask] demean_columns = [ column for column in self.data if column not in control_columns] self._demean_variable_mean = self.data[demean_columns].mean() self._demean_group_mean = self.data[demean_columns].groupby( self._fixed_effect_group_id).transform('mean') self.data[demean_columns] -= self._demean_group_mean self.data[demean_columns] += self._demean_variable_mean self.data = self.data.set_index(self._control_columns, append=True) self._has_control_factors = True self._control_strategy = strategy return self.data def _check_low_variance(self, raise_on_error: bool = True) -> None: """Verifies if data contains columns with low variance.""" if not self._checked_low_variance: message = ('The data may contain columns with low variance. Consider ' 'using `address_low_variance` identifying the columns with low' 'variance and whether to drop those.') if raise_on_error: raise LowVarianceError(message) else: warnings.warn(LowVarianceWarning(message)) def address_low_variance(self, threshold: float = 0, drop: bool = True, minmax_scaling: bool = False) -> pd.DataFrame: """Identifies low variances columns and option to drop it. Features with a variance below the threshold are identified and dropped if requested. The data is expected to be normalised to ensure variances can be compared across features. The data can be normalised with MinMax scaling on the fly setting minmax_scaling=True. Args: threshold: Threshold to use in VarianceThreshold where anything less than this threshold is dropped or used for warning. If 0, drops constants. The maximum variance possible is .25 if MinMax scaling is applied. drop: Boolean to either drop columns with low variance or print message. By default all columns with low variance is dropped. minmax_scaling: If False (default) no scaling is applied to the data and it is expected that the user has done the appropriate normalization before. If True, MinMax scaling is applied to ensure variances can be compared across features. Returns: Latest version of the data after low variance check has been applied. """ # TODO(): Address boolean and categorical columns covariates = self.data if self.target_column: covariates = covariates.drop(columns=self.target_column) if minmax_scaling: covariates = pd.DataFrame( preprocessing.minmax_scale(covariates), columns=covariates.columns) if not 0 <= threshold <= .25: message = ( 'The threshold should be between 0 and .25, with .25 being the', ' maximum variance possible, leading to all columns being dropped.') warnings.warn(LowVarianceWarning(message)) variances = covariates.var(ddof=0) unique_variances = variances.unique() if all( np.isclose(variance, 0) or np.isclose(variance, 1) for variance in unique_variances): message = ('All features have a variance of 1 or 0. Please ensure you', ' do not z-score your data before running this step.') warnings.warn(LowVarianceWarning(message)) column_var_bool = variances > threshold columns_to_delete = column_var_bool[~column_var_bool].index.to_list() if columns_to_delete: if drop: self.data = self.data.drop(columns=columns_to_delete) else: columns_to_delete = ' , '.join(columns_to_delete) message = (f'Consider removing the following columns: ' f'{columns_to_delete}') warnings.warn(LowVarianceWarning(message)) self._checked_low_variance = True return self.data def _check_collinearity(self, raise_on_error: bool = True) -> None: """Verifies if data has been checked for collinearity.""" if not self._checked_collinearity: message = ('The data may contain collinearity between covariates. ' 'Consider using `address_collinearity_with_vif` to identify ' 'columns that are collinear and whether to drop them.') if raise_on_error: raise CollinearityError(message) else: warnings.warn(CollinearityWarning(message)) def _get_list_of_correlated_features( self, vif_data: pd.DataFrame, corr_matrix: pd.DataFrame, min_absolute_corr: float) -> List[List[str]]: """Generates list of features which are correlated to features in vif_data. Args: vif_data: Table of features and VIFs corr_matrix: (Pearson) correlation matrix for all features in the original dataset (can be more features than just the features in vif_data) min_absolute_corr: Minimum value of absolute correlation; only display features with absolute correlations above this value Returns: List of lists of features, along with their correlations. """ correlated_features = [] for feature in vif_data.features: min_correlation_mask = abs(corr_matrix[feature]) > min_absolute_corr correlated_features_info = corr_matrix.loc[min_correlation_mask] correlated_features_info = correlated_features_info.drop(feature, axis=0) correlated_features_info = correlated_features_info.sort_values( feature, ascending=False) correlated_features.append([ f'{feature_name}: {round(corr_coeff, 2)}' for feature_name, corr_coeff in zip(correlated_features_info[feature].index, correlated_features_info[feature].values) ]) return correlated_features def address_collinearity_with_vif( self, vif_threshold: int = 10, sequential: bool = True, interactive: bool = False, drop: bool = True, min_absolute_corr: float = 0.4) -> pd.DataFrame: """Uses VIF to identify columns that are collinear and optionally drop them. You can customize how collinearity will be resolved with `sequential` and `interactive` parameters. By default, the VIF score will re-calculated every time the column with the highest VIF score is dropped until the threshold is met. If you wish to remove all the columns with VIF score higher than the threshold, you can set `sequential=False`. If you want to have a say on which column is going to removed, rather than automatically pick the column with the highest VIF score, you can set `interactive=True`. This will prompt for your input every time columns are found with VIF score higher than your threshold, whether `sequential` is set to True or False. In interactive mode, to assist in choosing which features to drop, each listed feature will also display the other features that correlate with that feature, for all other features with a minimum absolute (positive or negative) correlation of at least min_absolute_corr (default = 0.4). Args: vif_threshold: Threshold to identify which columns have high collinearity and anything higher than this threshold is dropped or used for warning. sequential: Whether you want to sequentially re-calculate VIF each time after a set column(s) have been removed or only once. interactive: Whether you want to manually specify which column(s) you want to remove. drop: Boolean to either drop columns with high VIF or print message, default is set to True. min_absolute_corr: Float, default = 0.4. Minimum absolute correlation required to display a feature as "correlated" to another feature in interactive mode. Only used when interactive=True. Should be between 0 and 1, though this is not currently enforced. Returns: Data after collinearity check with vif has been applied. When drop=True columns with high collinearity will not be present in the returned data. """ covariates = self.data.drop(columns=self.target_column) columns_to_drop = [] corr_matrix = covariates.corr() while True: tmp_data = covariates.drop(columns=columns_to_drop) vif_data = vif.calculate_vif(tmp_data, sort=True).reset_index(drop=True) if vif_data['VIF'][0] < vif_threshold: break if interactive: correlated_features = self._get_list_of_correlated_features( vif_data, corr_matrix.drop(columns_to_drop, axis=1), min_absolute_corr) vif_data = vif_data.sort_index() vif_data['correlated_features'] = correlated_features selected_columns = _vif_interactive_input_and_validation(vif_data) elif sequential: selected_columns = [vif_data['features'][0]] else: vif_filter = vif_data['VIF'] >= vif_threshold selected_columns = vif_data['features'][vif_filter].tolist() columns_to_drop.extend(selected_columns) if not sequential or not selected_columns: break if drop: self.data = self.data.drop(columns=columns_to_drop) else: message = ( f'Consider removing the following columns due to collinearity: ' f'{columns_to_drop}') warnings.warn(CollinearityWarning(message)) self._checked_collinearity = True return self.data def get_data_and_target(self) -> Tuple[pd.DataFrame, pd.Series]: """Returns the modelling data and the target.""" target = self.data[self.target_column] data = self.data.drop(self.target_column, axis=1) return data, target def _copy_and_index_inference_data(self, indices: np.ndarray) -> 'InferenceData': """Deep-Copies an InferenceData object on indices provided. It does a deepcopy of the current object and indexes both self.data and self.initial_data. Args: indices: List of indices to keep in the data. Returns: InferenceData with the data indicated by indices. """ subset_copy = copy.deepcopy(self) subset_copy.initial_data = self.initial_data.take(indices) subset_copy.data = self.data.take(indices) return subset_copy def split( self, cross_validation: Union[int, model_selection.BaseCrossValidator, model_selection.ShuffleSplit, model_selection.StratifiedShuffleSplit], groups: Optional[np.ndarray] = None, ) -> Iterator[Tuple['InferenceData', 'InferenceData']]: """Splits the data using the indicated cross validator. Args: cross_validation: Cross validation to be applied. If an int is passed and groups is None a sklearn Kfold is used with cross_validation as the number of splits. If an int is passed and groups is not None, sklearn GroupKFold will be used. Whena a cross validator is passed it is used directly. groups: If cross validating for non overlaping groups, this array indicates to which group each row belongs. Yields: A tuple with train and test InferenceDatas. """ if isinstance(cross_validation, int): if groups is not None: cross_validation = model_selection.GroupKFold(n_splits=cross_validation) else: cross_validation = model_selection.KFold(n_splits=cross_validation) for train_index, test_index in cross_validation.split(self.data, groups=groups): train_inference_data = self._copy_and_index_inference_data(train_index) test_inference_data = self._copy_and_index_inference_data(test_index) yield train_inference_data, test_inference_data def _input_mock(promp_message: str) -> str: """Allows 'input' to be mocked with nose test.""" # https://stackoverflow.com/questions/25878616/attributeerror-none-does-not-have-the-attribute-print return input(promp_message) def _print_mock(message: str) -> None: """Allows 'print' to be mocked with nose test.""" # https://stackoverflow.com/questions/25878616/attributeerror-none-does-not-have-the-attribute-print with pd.option_context('display.max_colwidth', None): print(message) def _vif_interactive_input_and_validation(vif_data: pd.DataFrame, max_features_to_display: int = 10 ) -> List[str]: """Prompts and validates column selection for interactive sessions. Args: vif_data: DataFrame of VIF data to display. max_features_to_display: Maximum number of features to display, in descending order of VIF score. Returns: selected_columns: A list (can be empty) of column names to remove. """ while True: _print_mock(vif_data.set_index('features').head(max_features_to_display)) selected_columns = _input_mock( 'Select one or more variables to remove separated by comma. ' 'To end the interactive session press Enter.\n') if not selected_columns: return [] selected_columns = selected_columns.split(',') valid_selection = True valid_columns = vif_data['features'].tolist() for column in selected_columns: if column not in valid_columns: _print_mock(f'Invalid column "{column}". ' f'Valid columns: {",".join(valid_columns)}') valid_selection = False if valid_selection: return selected_columns
apache-2.0