body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
c45795d3695acf8ef7a35b52220c1d23694a0db0e47cbc16c3fb95aa5b3da38f
@abc.abstractmethod def check_if_required(self): 'Should return True if check is required\n and False if is not required\n '
Should return True if check is required and False if is not required
fuel_upgrade_system/fuel_upgrade/fuel_upgrade/pre_upgrade_hooks/base.py
check_if_required
andrei4ka/fuel-web-redhat
1
python
@abc.abstractmethod def check_if_required(self): 'Should return True if check is required\n and False if is not required\n '
@abc.abstractmethod def check_if_required(self): 'Should return True if check is required\n and False if is not required\n '<|docstring|>Should return True if check is required and False if is not required<|endoftext|>
0e622e1a552f75fa3a493b03b924486414b5ab13892ebe5ff3826c06bf3e4a83
@abc.abstractmethod def run(self): 'Run pre upgrade hook\n '
Run pre upgrade hook
fuel_upgrade_system/fuel_upgrade/fuel_upgrade/pre_upgrade_hooks/base.py
run
andrei4ka/fuel-web-redhat
1
python
@abc.abstractmethod def run(self): '\n '
@abc.abstractmethod def run(self): '\n '<|docstring|>Run pre upgrade hook<|endoftext|>
f584d4e5d4d5a637d9fecade221e414d1a3b7c21e15e864da344e8e3921f0343
@abc.abstractproperty def enable_for_engines(self): 'Should return list of upgrade engines\n which the hook is required for\n '
Should return list of upgrade engines which the hook is required for
fuel_upgrade_system/fuel_upgrade/fuel_upgrade/pre_upgrade_hooks/base.py
enable_for_engines
andrei4ka/fuel-web-redhat
1
python
@abc.abstractproperty def enable_for_engines(self): 'Should return list of upgrade engines\n which the hook is required for\n '
@abc.abstractproperty def enable_for_engines(self): 'Should return list of upgrade engines\n which the hook is required for\n '<|docstring|>Should return list of upgrade engines which the hook is required for<|endoftext|>
25cd5eea445ce0e71d5414ca017f018be08c878e03082cc2a13fc6c405551c3e
@property def is_required(self): "Checks if it's required to run the hook\n\n :returns: True if required, False if is not required\n " return (self.is_enabled_for_engines and self.check_if_required())
Checks if it's required to run the hook :returns: True if required, False if is not required
fuel_upgrade_system/fuel_upgrade/fuel_upgrade/pre_upgrade_hooks/base.py
is_required
andrei4ka/fuel-web-redhat
1
python
@property def is_required(self): "Checks if it's required to run the hook\n\n :returns: True if required, False if is not required\n " return (self.is_enabled_for_engines and self.check_if_required())
@property def is_required(self): "Checks if it's required to run the hook\n\n :returns: True if required, False if is not required\n " return (self.is_enabled_for_engines and self.check_if_required())<|docstring|>Checks if it's required to run the hook :returns: True if required, False if is not required<|endoftext|>
ddf07a503ac8cd8ee32518561557cbdc169c7bba09ff05ef8860b3851bbea013
@property def is_enabled_for_engines(self): 'Checks if engine in the list\n\n :returns: True if engine in the list\n False if engine not in the list\n ' for engine in self.enable_for_engines: for upgrade in self.upgraders: if isinstance(upgrade, engine): return True return False
Checks if engine in the list :returns: True if engine in the list False if engine not in the list
fuel_upgrade_system/fuel_upgrade/fuel_upgrade/pre_upgrade_hooks/base.py
is_enabled_for_engines
andrei4ka/fuel-web-redhat
1
python
@property def is_enabled_for_engines(self): 'Checks if engine in the list\n\n :returns: True if engine in the list\n False if engine not in the list\n ' for engine in self.enable_for_engines: for upgrade in self.upgraders: if isinstance(upgrade, engine): return True return False
@property def is_enabled_for_engines(self): 'Checks if engine in the list\n\n :returns: True if engine in the list\n False if engine not in the list\n ' for engine in self.enable_for_engines: for upgrade in self.upgraders: if isinstance(upgrade, engine): return True return False<|docstring|>Checks if engine in the list :returns: True if engine in the list False if engine not in the list<|endoftext|>
e3bedc404a1ada2a8211e1a274bfc059510f752a9dcc9dd1a5dfe73377cceea7
def xml_cleaner(words: Iterable) -> Generator[(str, None, None)]: 'Enlève les tags XML résiduels pour une liste de mots' for word in words: chars = list() in_tag = False for char in word: if (char == '<'): in_tag = True elif (char == '>'): in_tag = False elif (not in_tag): chars.append(char) (yield ''.join(chars))
Enlève les tags XML résiduels pour une liste de mots
app/engine/utils.py
xml_cleaner
Plawn/petit_word_engine
0
python
def xml_cleaner(words: Iterable) -> Generator[(str, None, None)]: for word in words: chars = list() in_tag = False for char in word: if (char == '<'): in_tag = True elif (char == '>'): in_tag = False elif (not in_tag): chars.append(char) (yield .join(chars))
def xml_cleaner(words: Iterable) -> Generator[(str, None, None)]: for word in words: chars = list() in_tag = False for char in word: if (char == '<'): in_tag = True elif (char == '>'): in_tag = False elif (not in_tag): chars.append(char) (yield .join(chars))<|docstring|>Enlève les tags XML résiduels pour une liste de mots<|endoftext|>
f70168088acdb817684183d4e34e13004b166b5d412f1c8b34c1a8c877294a30
def get_url(route): 'Generate a proper URL, forcing HTTPS if not running locally' host = urllib.parse.urlparse(request.url).hostname url = url_for(route, _external=True, _scheme=('http' if (host in ('127.0.0.1', 'localhost')) else 'https')) return url
Generate a proper URL, forcing HTTPS if not running locally
app.py
get_url
sunnyrahul25/mdwebhook
95
python
def get_url(route): host = urllib.parse.urlparse(request.url).hostname url = url_for(route, _external=True, _scheme=('http' if (host in ('127.0.0.1', 'localhost')) else 'https')) return url
def get_url(route): host = urllib.parse.urlparse(request.url).hostname url = url_for(route, _external=True, _scheme=('http' if (host in ('127.0.0.1', 'localhost')) else 'https')) return url<|docstring|>Generate a proper URL, forcing HTTPS if not running locally<|endoftext|>
25e056cc1afeb465c186e570901607de93cb4025e35f12ab328e60112d6ab662
@app.route('/oauth_callback') def oauth_callback(): 'Callback function for when the user returns from OAuth.' auth_result = get_flow().finish(request.args) account = auth_result.account_id access_token = auth_result.access_token redis_client.hset('tokens', account, access_token) process_user(account) return redirect(url_for('done'))
Callback function for when the user returns from OAuth.
app.py
oauth_callback
sunnyrahul25/mdwebhook
95
python
@app.route('/oauth_callback') def oauth_callback(): auth_result = get_flow().finish(request.args) account = auth_result.account_id access_token = auth_result.access_token redis_client.hset('tokens', account, access_token) process_user(account) return redirect(url_for('done'))
@app.route('/oauth_callback') def oauth_callback(): auth_result = get_flow().finish(request.args) account = auth_result.account_id access_token = auth_result.access_token redis_client.hset('tokens', account, access_token) process_user(account) return redirect(url_for('done'))<|docstring|>Callback function for when the user returns from OAuth.<|endoftext|>
2d47d01f1cdd19ff7641598c23db8e68cbe5d5f8f7607b28cf5d8e7bd8d65091
def process_user(account): 'Call /files/list_folder for the given user ID and process any changes.' token = redis_client.hget('tokens', account) cursor = redis_client.hget('cursors', account) dbx = Dropbox(token) has_more = True while has_more: if (cursor is None): result = dbx.files_list_folder(path='') else: result = dbx.files_list_folder_continue(cursor) for entry in result.entries: if (isinstance(entry, DeletedMetadata) or isinstance(entry, FolderMetadata) or (not entry.path_lower.endswith('.md'))): continue (_, resp) = dbx.files_download(entry.path_lower) html = markdown(resp.content.decode('utf-8')) dbx.files_upload(bytes(html, encoding='utf-8'), (entry.path_lower[:(- 3)] + '.html'), mode=WriteMode('overwrite')) cursor = result.cursor redis_client.hset('cursors', account, cursor) has_more = result.has_more
Call /files/list_folder for the given user ID and process any changes.
app.py
process_user
sunnyrahul25/mdwebhook
95
python
def process_user(account): token = redis_client.hget('tokens', account) cursor = redis_client.hget('cursors', account) dbx = Dropbox(token) has_more = True while has_more: if (cursor is None): result = dbx.files_list_folder(path=) else: result = dbx.files_list_folder_continue(cursor) for entry in result.entries: if (isinstance(entry, DeletedMetadata) or isinstance(entry, FolderMetadata) or (not entry.path_lower.endswith('.md'))): continue (_, resp) = dbx.files_download(entry.path_lower) html = markdown(resp.content.decode('utf-8')) dbx.files_upload(bytes(html, encoding='utf-8'), (entry.path_lower[:(- 3)] + '.html'), mode=WriteMode('overwrite')) cursor = result.cursor redis_client.hset('cursors', account, cursor) has_more = result.has_more
def process_user(account): token = redis_client.hget('tokens', account) cursor = redis_client.hget('cursors', account) dbx = Dropbox(token) has_more = True while has_more: if (cursor is None): result = dbx.files_list_folder(path=) else: result = dbx.files_list_folder_continue(cursor) for entry in result.entries: if (isinstance(entry, DeletedMetadata) or isinstance(entry, FolderMetadata) or (not entry.path_lower.endswith('.md'))): continue (_, resp) = dbx.files_download(entry.path_lower) html = markdown(resp.content.decode('utf-8')) dbx.files_upload(bytes(html, encoding='utf-8'), (entry.path_lower[:(- 3)] + '.html'), mode=WriteMode('overwrite')) cursor = result.cursor redis_client.hset('cursors', account, cursor) has_more = result.has_more<|docstring|>Call /files/list_folder for the given user ID and process any changes.<|endoftext|>
6321457fa8d91e143a11b59cea8a90836c6d7777a183d0296539e3ec79056758
@app.route('/webhook', methods=['GET']) def challenge(): 'Respond to the webhook challenge (GET request) by echoing back the challenge parameter.' resp = Response(request.args.get('challenge')) resp.headers['Content-Type'] = 'text/plain' resp.headers['X-Content-Type-Options'] = 'nosniff' return resp
Respond to the webhook challenge (GET request) by echoing back the challenge parameter.
app.py
challenge
sunnyrahul25/mdwebhook
95
python
@app.route('/webhook', methods=['GET']) def challenge(): resp = Response(request.args.get('challenge')) resp.headers['Content-Type'] = 'text/plain' resp.headers['X-Content-Type-Options'] = 'nosniff' return resp
@app.route('/webhook', methods=['GET']) def challenge(): resp = Response(request.args.get('challenge')) resp.headers['Content-Type'] = 'text/plain' resp.headers['X-Content-Type-Options'] = 'nosniff' return resp<|docstring|>Respond to the webhook challenge (GET request) by echoing back the challenge parameter.<|endoftext|>
14462a40fe572e425b447ad41a99941a0b8042cdd137e1f62beba8433c7cab94
@app.route('/webhook', methods=['POST']) def webhook(): 'Receive a list of changed user IDs from Dropbox and process each.' signature = request.headers.get('X-Dropbox-Signature') key = bytes(APP_SECRET, encoding='ascii') computed_signature = hmac.new(key, request.data, sha256).hexdigest() if (not hmac.compare_digest(signature, computed_signature)): abort(403) for account in json.loads(request.data)['list_folder']['accounts']: threading.Thread(target=process_user, args=(account,)).start() return ''
Receive a list of changed user IDs from Dropbox and process each.
app.py
webhook
sunnyrahul25/mdwebhook
95
python
@app.route('/webhook', methods=['POST']) def webhook(): signature = request.headers.get('X-Dropbox-Signature') key = bytes(APP_SECRET, encoding='ascii') computed_signature = hmac.new(key, request.data, sha256).hexdigest() if (not hmac.compare_digest(signature, computed_signature)): abort(403) for account in json.loads(request.data)['list_folder']['accounts']: threading.Thread(target=process_user, args=(account,)).start() return
@app.route('/webhook', methods=['POST']) def webhook(): signature = request.headers.get('X-Dropbox-Signature') key = bytes(APP_SECRET, encoding='ascii') computed_signature = hmac.new(key, request.data, sha256).hexdigest() if (not hmac.compare_digest(signature, computed_signature)): abort(403) for account in json.loads(request.data)['list_folder']['accounts']: threading.Thread(target=process_user, args=(account,)).start() return <|docstring|>Receive a list of changed user IDs from Dropbox and process each.<|endoftext|>
cf4c8c461c1dbef9d93a7a91d1b3e0069a4f60eea07f34b1c2fc5ef2ecb7400f
def forward(self, observations): '\n Parameters\n ----------\n observations: torch.Tensor [num_paths x horizon x d_obs]\n \n Returns\n --------\n values: torch.Tensor [num_paths x horizon]\n \n ' features = self.feature_mat(observations) values = (features * self.weights.data).sum((- 1)) values += self.biases.data.T return values
Parameters ---------- observations: torch.Tensor [num_paths x horizon x d_obs] Returns -------- values: torch.Tensor [num_paths x horizon]
mjmpc/value_functions/quadratic_time_varying_val_func.py
forward
mohakbhardwaj/mjmpc
2
python
def forward(self, observations): '\n Parameters\n ----------\n observations: torch.Tensor [num_paths x horizon x d_obs]\n \n Returns\n --------\n values: torch.Tensor [num_paths x horizon]\n \n ' features = self.feature_mat(observations) values = (features * self.weights.data).sum((- 1)) values += self.biases.data.T return values
def forward(self, observations): '\n Parameters\n ----------\n observations: torch.Tensor [num_paths x horizon x d_obs]\n \n Returns\n --------\n values: torch.Tensor [num_paths x horizon]\n \n ' features = self.feature_mat(observations) values = (features * self.weights.data).sum((- 1)) values += self.biases.data.T return values<|docstring|>Parameters ---------- observations: torch.Tensor [num_paths x horizon x d_obs] Returns -------- values: torch.Tensor [num_paths x horizon]<|endoftext|>
1e380b276a9c2570a7e65c762ae1871a5a3ecfd0125788bf31cebcc328bb1fa3
def fit(self, observations, returns, delta_reg=0.0, return_errors=False): '\n Parameters\n -----------\n observations: torch.Tensor [num_paths x horizon x d_obs]\n returns: torch.Tensor [num_paths x horizon]\n\n ' num_paths = observations.shape[0] features = self.feature_mat(observations) if return_errors: predictions = self(observations) errors = (returns - predictions) error_before = (torch.sum((errors ** 2)) / torch.sum((returns ** 2))) feat = features.permute(1, 0, 2) ret = returns.permute(1, 0).unsqueeze((- 1)) feat = torch.cat((feat, torch.ones(self.horizon, num_paths, 1)), axis=(- 1)) I = torch.eye((self.d_input + 1)).repeat(self.horizon, 1, 1) feat_t = feat.transpose(1, 2) (X, _) = torch.solve(feat_t.bmm(ret), (feat_t.bmm(feat) + (delta_reg * I))) X = X.squeeze((- 1)) self.weights.data.copy_(X[(:, :(- 1))]) self.biases.data.copy_(X[(:, (- 1))]) if return_errors: predictions = self(observations) errors = (returns - predictions) error_after = (torch.sum((errors ** 2)) / torch.sum((returns ** 2))) return (error_before, error_after)
Parameters ----------- observations: torch.Tensor [num_paths x horizon x d_obs] returns: torch.Tensor [num_paths x horizon]
mjmpc/value_functions/quadratic_time_varying_val_func.py
fit
mohakbhardwaj/mjmpc
2
python
def fit(self, observations, returns, delta_reg=0.0, return_errors=False): '\n Parameters\n -----------\n observations: torch.Tensor [num_paths x horizon x d_obs]\n returns: torch.Tensor [num_paths x horizon]\n\n ' num_paths = observations.shape[0] features = self.feature_mat(observations) if return_errors: predictions = self(observations) errors = (returns - predictions) error_before = (torch.sum((errors ** 2)) / torch.sum((returns ** 2))) feat = features.permute(1, 0, 2) ret = returns.permute(1, 0).unsqueeze((- 1)) feat = torch.cat((feat, torch.ones(self.horizon, num_paths, 1)), axis=(- 1)) I = torch.eye((self.d_input + 1)).repeat(self.horizon, 1, 1) feat_t = feat.transpose(1, 2) (X, _) = torch.solve(feat_t.bmm(ret), (feat_t.bmm(feat) + (delta_reg * I))) X = X.squeeze((- 1)) self.weights.data.copy_(X[(:, :(- 1))]) self.biases.data.copy_(X[(:, (- 1))]) if return_errors: predictions = self(observations) errors = (returns - predictions) error_after = (torch.sum((errors ** 2)) / torch.sum((returns ** 2))) return (error_before, error_after)
def fit(self, observations, returns, delta_reg=0.0, return_errors=False): '\n Parameters\n -----------\n observations: torch.Tensor [num_paths x horizon x d_obs]\n returns: torch.Tensor [num_paths x horizon]\n\n ' num_paths = observations.shape[0] features = self.feature_mat(observations) if return_errors: predictions = self(observations) errors = (returns - predictions) error_before = (torch.sum((errors ** 2)) / torch.sum((returns ** 2))) feat = features.permute(1, 0, 2) ret = returns.permute(1, 0).unsqueeze((- 1)) feat = torch.cat((feat, torch.ones(self.horizon, num_paths, 1)), axis=(- 1)) I = torch.eye((self.d_input + 1)).repeat(self.horizon, 1, 1) feat_t = feat.transpose(1, 2) (X, _) = torch.solve(feat_t.bmm(ret), (feat_t.bmm(feat) + (delta_reg * I))) X = X.squeeze((- 1)) self.weights.data.copy_(X[(:, :(- 1))]) self.biases.data.copy_(X[(:, (- 1))]) if return_errors: predictions = self(observations) errors = (returns - predictions) error_after = (torch.sum((errors ** 2)) / torch.sum((returns ** 2))) return (error_before, error_after)<|docstring|>Parameters ----------- observations: torch.Tensor [num_paths x horizon x d_obs] returns: torch.Tensor [num_paths x horizon]<|endoftext|>
10f6a369140d76c0369f5f7cbe02aa240b956110cd493f66d9f4c291ec38fa61
def cs2coords(start, qstart, length, strand, cs, offset=1, splice_donor=['gt', 'at'], splice_acceptor=['ag', 'ac']): '\n # From minimap2 manual this is the cs flag definitions\n Op\tRegex\tDescription\n =\t[ACGTN]+\tIdentical sequence (long form)\n :\t[0-9]+\tIdentical sequence length\n *\t[acgtn][acgtn]\tSubstitution: ref to query\n +\t[acgtn]+\tInsertion to the reference\n -\t[acgtn]+\tDeletion from the reference\n ~\t[acgtn]{2}[0-9]+[acgtn]{2}\tIntron length and splice signal\n ' cs = cs.replace('cs:Z:', '') ProperSplice = True exons = [int(start)] position = int(start) query = [int(qstart)] querypos = 0 num_exons = 1 gaps = 0 indels = [] if (strand == '+'): sp_donor = splice_donor sp_acceptor = splice_acceptor sort_orientation = False elif (strand == '-'): sp_donor = [RevComp(x).lower() for x in splice_acceptor] sp_acceptor = [RevComp(x).lower() for x in splice_donor] sort_orientation = True for (s, value) in cs2tuples(cs): if (s == ':'): position += int(value) querypos += int(value) indels.append(0) elif (s == '-'): gaps += 1 position += len(value) querypos += len(value) indels.append((- len(value))) elif (s == '+'): gaps += 1 position += len(value) querypos += len(value) indels.append(len(value)) elif (s == '~'): if (value.startswith(tuple(sp_donor)) and value.endswith(tuple(sp_acceptor))): ProperSplice = True else: ProperSplice = False num_exons += 1 exons.append((position + indels[(- 1)])) query.append(querypos) query.append((querypos + 1)) intronLen = int(value[2:(- 2)]) position += intronLen exons.append(position) indels.append(0) exons.append(position) query.append(int(length)) exontmp = list(zip(exons[0::2], exons[1::2])) queryList = list(zip(query[0::2], query[1::2])) exonList = [] for x in sorted(exontmp, key=(lambda tup: tup[0]), reverse=sort_orientation): exonList.append(((x[0] + offset), x[1])) return (exonList, queryList, ProperSplice)
# From minimap2 manual this is the cs flag definitions Op Regex Description = [ACGTN]+ Identical sequence (long form) : [0-9]+ Identical sequence length * [acgtn][acgtn] Substitution: ref to query + [acgtn]+ Insertion to the reference - [acgtn]+ Deletion from the reference ~ [acgtn]{2}[0-9]+[acgtn]{2} Intron length and splice signal
gfftk/paf.py
cs2coords
nextgenusfs/gfftk
0
python
def cs2coords(start, qstart, length, strand, cs, offset=1, splice_donor=['gt', 'at'], splice_acceptor=['ag', 'ac']): '\n # From minimap2 manual this is the cs flag definitions\n Op\tRegex\tDescription\n =\t[ACGTN]+\tIdentical sequence (long form)\n :\t[0-9]+\tIdentical sequence length\n *\t[acgtn][acgtn]\tSubstitution: ref to query\n +\t[acgtn]+\tInsertion to the reference\n -\t[acgtn]+\tDeletion from the reference\n ~\t[acgtn]{2}[0-9]+[acgtn]{2}\tIntron length and splice signal\n ' cs = cs.replace('cs:Z:', ) ProperSplice = True exons = [int(start)] position = int(start) query = [int(qstart)] querypos = 0 num_exons = 1 gaps = 0 indels = [] if (strand == '+'): sp_donor = splice_donor sp_acceptor = splice_acceptor sort_orientation = False elif (strand == '-'): sp_donor = [RevComp(x).lower() for x in splice_acceptor] sp_acceptor = [RevComp(x).lower() for x in splice_donor] sort_orientation = True for (s, value) in cs2tuples(cs): if (s == ':'): position += int(value) querypos += int(value) indels.append(0) elif (s == '-'): gaps += 1 position += len(value) querypos += len(value) indels.append((- len(value))) elif (s == '+'): gaps += 1 position += len(value) querypos += len(value) indels.append(len(value)) elif (s == '~'): if (value.startswith(tuple(sp_donor)) and value.endswith(tuple(sp_acceptor))): ProperSplice = True else: ProperSplice = False num_exons += 1 exons.append((position + indels[(- 1)])) query.append(querypos) query.append((querypos + 1)) intronLen = int(value[2:(- 2)]) position += intronLen exons.append(position) indels.append(0) exons.append(position) query.append(int(length)) exontmp = list(zip(exons[0::2], exons[1::2])) queryList = list(zip(query[0::2], query[1::2])) exonList = [] for x in sorted(exontmp, key=(lambda tup: tup[0]), reverse=sort_orientation): exonList.append(((x[0] + offset), x[1])) return (exonList, queryList, ProperSplice)
def cs2coords(start, qstart, length, strand, cs, offset=1, splice_donor=['gt', 'at'], splice_acceptor=['ag', 'ac']): '\n # From minimap2 manual this is the cs flag definitions\n Op\tRegex\tDescription\n =\t[ACGTN]+\tIdentical sequence (long form)\n :\t[0-9]+\tIdentical sequence length\n *\t[acgtn][acgtn]\tSubstitution: ref to query\n +\t[acgtn]+\tInsertion to the reference\n -\t[acgtn]+\tDeletion from the reference\n ~\t[acgtn]{2}[0-9]+[acgtn]{2}\tIntron length and splice signal\n ' cs = cs.replace('cs:Z:', ) ProperSplice = True exons = [int(start)] position = int(start) query = [int(qstart)] querypos = 0 num_exons = 1 gaps = 0 indels = [] if (strand == '+'): sp_donor = splice_donor sp_acceptor = splice_acceptor sort_orientation = False elif (strand == '-'): sp_donor = [RevComp(x).lower() for x in splice_acceptor] sp_acceptor = [RevComp(x).lower() for x in splice_donor] sort_orientation = True for (s, value) in cs2tuples(cs): if (s == ':'): position += int(value) querypos += int(value) indels.append(0) elif (s == '-'): gaps += 1 position += len(value) querypos += len(value) indels.append((- len(value))) elif (s == '+'): gaps += 1 position += len(value) querypos += len(value) indels.append(len(value)) elif (s == '~'): if (value.startswith(tuple(sp_donor)) and value.endswith(tuple(sp_acceptor))): ProperSplice = True else: ProperSplice = False num_exons += 1 exons.append((position + indels[(- 1)])) query.append(querypos) query.append((querypos + 1)) intronLen = int(value[2:(- 2)]) position += intronLen exons.append(position) indels.append(0) exons.append(position) query.append(int(length)) exontmp = list(zip(exons[0::2], exons[1::2])) queryList = list(zip(query[0::2], query[1::2])) exonList = [] for x in sorted(exontmp, key=(lambda tup: tup[0]), reverse=sort_orientation): exonList.append(((x[0] + offset), x[1])) return (exonList, queryList, ProperSplice)<|docstring|># From minimap2 manual this is the cs flag definitions Op Regex Description = [ACGTN]+ Identical sequence (long form) : [0-9]+ Identical sequence length * [acgtn][acgtn] Substitution: ref to query + [acgtn]+ Insertion to the reference - [acgtn]+ Deletion from the reference ~ [acgtn]{2}[0-9]+[acgtn]{2} Intron length and splice signal<|endoftext|>
e53cf5e1f0680dde62d210cb73262da6dcb1c40d024a82dba44ef8556cca1f79
def get_callbacks(experiment_id): '\n Helper function to build the list of desired keras callbacks.\n\n :param experiment_id: experiment id.\n :return: a list of Keras Callbacks.\n ' if (not exists(log_dir)): os.makedirs(log_dir) return [ModelLoader(experiment_id=experiment_id), PredictionCallback(experiment_id=experiment_id), Checkpointer(experiment_id=experiment_id), ReduceLROnPlateau(monitor='loss', factor=0.5, patience=5, min_lr=1e-06), CSVLogger(join(log_dir, '{}.txt'.format(experiment_id)))]
Helper function to build the list of desired keras callbacks. :param experiment_id: experiment id. :return: a list of Keras Callbacks.
experiments/train/callbacks.py
get_callbacks
Tobias-Fischer/dreyeve
83
python
def get_callbacks(experiment_id): '\n Helper function to build the list of desired keras callbacks.\n\n :param experiment_id: experiment id.\n :return: a list of Keras Callbacks.\n ' if (not exists(log_dir)): os.makedirs(log_dir) return [ModelLoader(experiment_id=experiment_id), PredictionCallback(experiment_id=experiment_id), Checkpointer(experiment_id=experiment_id), ReduceLROnPlateau(monitor='loss', factor=0.5, patience=5, min_lr=1e-06), CSVLogger(join(log_dir, '{}.txt'.format(experiment_id)))]
def get_callbacks(experiment_id): '\n Helper function to build the list of desired keras callbacks.\n\n :param experiment_id: experiment id.\n :return: a list of Keras Callbacks.\n ' if (not exists(log_dir)): os.makedirs(log_dir) return [ModelLoader(experiment_id=experiment_id), PredictionCallback(experiment_id=experiment_id), Checkpointer(experiment_id=experiment_id), ReduceLROnPlateau(monitor='loss', factor=0.5, patience=5, min_lr=1e-06), CSVLogger(join(log_dir, '{}.txt'.format(experiment_id)))]<|docstring|>Helper function to build the list of desired keras callbacks. :param experiment_id: experiment id. :return: a list of Keras Callbacks.<|endoftext|>
6700f6bf504889ac46887c2f13ad764462c3b6d0ba274d641fcee7433e6d3dca
def register_connections(): '\n register nornir connection plugins\n ' from nornir.core.connections import Connections from netnir.core.connection.netmiko import Netmiko from netnir.core.connection.netconf import Netconf Connections.deregister_all() Connections.register(name='netconf', plugin=Netconf) Connections.register(name='netmiko', plugin=Netmiko)
register nornir connection plugins
netnir/core/connection/__init__.py
register_connections
jtdub/netnir
0
python
def register_connections(): '\n \n ' from nornir.core.connections import Connections from netnir.core.connection.netmiko import Netmiko from netnir.core.connection.netconf import Netconf Connections.deregister_all() Connections.register(name='netconf', plugin=Netconf) Connections.register(name='netmiko', plugin=Netmiko)
def register_connections(): '\n \n ' from nornir.core.connections import Connections from netnir.core.connection.netmiko import Netmiko from netnir.core.connection.netconf import Netconf Connections.deregister_all() Connections.register(name='netconf', plugin=Netconf) Connections.register(name='netmiko', plugin=Netmiko)<|docstring|>register nornir connection plugins<|endoftext|>
fe83847d369b3b01f9077c8c50f419901b21fc006d165b66a5e38b10f5755f8b
def _check_existing(existing_object, new_object, case, idKey): '\n Compare the new object to the existing one, warn\n about mismatches.\n ' for key in new_object.keys(): if (key not in existing_object): warning = "Found missing key '{}' at {} '{}'!".format(key, case, new_object[idKey]) warnings.warn(warning) elif (new_object[key] != existing_object[key]): warning = "Found mismatch for key '{}' at {} '{}'!".format(key, case, new_object[idKey]) warnings.warn(warning) warnings.warn('To be created: \n{}'.format(json.dumps(new_object[key]))) warnings.warn('Existing: \n{}'.format(json.dumps(existing_object[key])))
Compare the new object to the existing one, warn about mismatches.
scripts/init-realm/init-realm.py
_check_existing
sebastianbertoli/renku
151
python
def _check_existing(existing_object, new_object, case, idKey): '\n Compare the new object to the existing one, warn\n about mismatches.\n ' for key in new_object.keys(): if (key not in existing_object): warning = "Found missing key '{}' at {} '{}'!".format(key, case, new_object[idKey]) warnings.warn(warning) elif (new_object[key] != existing_object[key]): warning = "Found mismatch for key '{}' at {} '{}'!".format(key, case, new_object[idKey]) warnings.warn(warning) warnings.warn('To be created: \n{}'.format(json.dumps(new_object[key]))) warnings.warn('Existing: \n{}'.format(json.dumps(existing_object[key])))
def _check_existing(existing_object, new_object, case, idKey): '\n Compare the new object to the existing one, warn\n about mismatches.\n ' for key in new_object.keys(): if (key not in existing_object): warning = "Found missing key '{}' at {} '{}'!".format(key, case, new_object[idKey]) warnings.warn(warning) elif (new_object[key] != existing_object[key]): warning = "Found mismatch for key '{}' at {} '{}'!".format(key, case, new_object[idKey]) warnings.warn(warning) warnings.warn('To be created: \n{}'.format(json.dumps(new_object[key]))) warnings.warn('Existing: \n{}'.format(json.dumps(existing_object[key])))<|docstring|>Compare the new object to the existing one, warn about mismatches.<|endoftext|>
00a4491bc0c259aa013d21c0a418d1f5fc800cd9032119272ea3ddc4d4f2b95e
def _check_and_create_client(keycloak_admin, new_client): '\n Check if a client exists. Create it if not. Alert if\n it exists but with different details than what is provided.\n ' sys.stdout.write('Checking if {} client exists...'.format(new_client['clientId'])) realm_clients = keycloak_admin.get_clients() clientIds = [c['clientId'] for c in realm_clients] if (new_client['clientId'] in clientIds): sys.stdout.write('found\n') realm_client = realm_clients[clientIds.index(new_client['clientId'])] secret = keycloak_admin.get_client_secrets(realm_client['id']) realm_client['secret'] = secret['value'] if ('protocolMappers' in realm_client): for mapper in realm_client['protocolMappers']: del mapper['id'] mapper['config'] = json.loads(json.dumps(mapper['config']).replace('"true"', 'true').replace('"false"', 'false')) _check_existing(realm_client, new_client, 'client', 'clientId') mappers_client_ids = ['gateway'] if (realm_client['clientId'] in mappers_client_ids): mappers_missing = (('protocolMappers' not in realm_client) and ('protocolMappers' in new_client)) audience_mapper_missing = ('audience for gateway' not in [mapper['name'] for mapper in realm_client.get('protocolMappers', [])]) if (mappers_missing or audience_mapper_missing): sys.stdout.write('found, but without the necessary protocol mapper. Adding it now...') realm_client['protocolMappers'] = (new_client['protocolMappers'] + realm_client.get('protocolMappers', [])) keycloak_admin.delete_client(realm_client['id']) keycloak_admin.create_client(realm_client) sys.stdout.write('done\n') else: sys.stdout.write('not found\n') sys.stdout.write('Creating {} client...'.format(new_client['clientId'])) keycloak_admin.create_client(new_client) sys.stdout.write('done\n')
Check if a client exists. Create it if not. Alert if it exists but with different details than what is provided.
scripts/init-realm/init-realm.py
_check_and_create_client
sebastianbertoli/renku
151
python
def _check_and_create_client(keycloak_admin, new_client): '\n Check if a client exists. Create it if not. Alert if\n it exists but with different details than what is provided.\n ' sys.stdout.write('Checking if {} client exists...'.format(new_client['clientId'])) realm_clients = keycloak_admin.get_clients() clientIds = [c['clientId'] for c in realm_clients] if (new_client['clientId'] in clientIds): sys.stdout.write('found\n') realm_client = realm_clients[clientIds.index(new_client['clientId'])] secret = keycloak_admin.get_client_secrets(realm_client['id']) realm_client['secret'] = secret['value'] if ('protocolMappers' in realm_client): for mapper in realm_client['protocolMappers']: del mapper['id'] mapper['config'] = json.loads(json.dumps(mapper['config']).replace('"true"', 'true').replace('"false"', 'false')) _check_existing(realm_client, new_client, 'client', 'clientId') mappers_client_ids = ['gateway'] if (realm_client['clientId'] in mappers_client_ids): mappers_missing = (('protocolMappers' not in realm_client) and ('protocolMappers' in new_client)) audience_mapper_missing = ('audience for gateway' not in [mapper['name'] for mapper in realm_client.get('protocolMappers', [])]) if (mappers_missing or audience_mapper_missing): sys.stdout.write('found, but without the necessary protocol mapper. Adding it now...') realm_client['protocolMappers'] = (new_client['protocolMappers'] + realm_client.get('protocolMappers', [])) keycloak_admin.delete_client(realm_client['id']) keycloak_admin.create_client(realm_client) sys.stdout.write('done\n') else: sys.stdout.write('not found\n') sys.stdout.write('Creating {} client...'.format(new_client['clientId'])) keycloak_admin.create_client(new_client) sys.stdout.write('done\n')
def _check_and_create_client(keycloak_admin, new_client): '\n Check if a client exists. Create it if not. Alert if\n it exists but with different details than what is provided.\n ' sys.stdout.write('Checking if {} client exists...'.format(new_client['clientId'])) realm_clients = keycloak_admin.get_clients() clientIds = [c['clientId'] for c in realm_clients] if (new_client['clientId'] in clientIds): sys.stdout.write('found\n') realm_client = realm_clients[clientIds.index(new_client['clientId'])] secret = keycloak_admin.get_client_secrets(realm_client['id']) realm_client['secret'] = secret['value'] if ('protocolMappers' in realm_client): for mapper in realm_client['protocolMappers']: del mapper['id'] mapper['config'] = json.loads(json.dumps(mapper['config']).replace('"true"', 'true').replace('"false"', 'false')) _check_existing(realm_client, new_client, 'client', 'clientId') mappers_client_ids = ['gateway'] if (realm_client['clientId'] in mappers_client_ids): mappers_missing = (('protocolMappers' not in realm_client) and ('protocolMappers' in new_client)) audience_mapper_missing = ('audience for gateway' not in [mapper['name'] for mapper in realm_client.get('protocolMappers', [])]) if (mappers_missing or audience_mapper_missing): sys.stdout.write('found, but without the necessary protocol mapper. Adding it now...') realm_client['protocolMappers'] = (new_client['protocolMappers'] + realm_client.get('protocolMappers', [])) keycloak_admin.delete_client(realm_client['id']) keycloak_admin.create_client(realm_client) sys.stdout.write('done\n') else: sys.stdout.write('not found\n') sys.stdout.write('Creating {} client...'.format(new_client['clientId'])) keycloak_admin.create_client(new_client) sys.stdout.write('done\n')<|docstring|>Check if a client exists. Create it if not. Alert if it exists but with different details than what is provided.<|endoftext|>
719768fe6f6d2fcc1a996c7cfcefc360de14b45c95779d6f8b33a5e41bab1d37
def _check_and_create_user(keycloak_admin, new_user): '\n Check if a user exists. Create it if not. Alert if\n it exists but with different details than what is provided.\n ' sys.stdout.write('Checking if {} user exists...'.format(new_user['username'])) realm_users = keycloak_admin.get_users(query={}) usernames = [u['username'] for u in realm_users] if (new_user['username'] in usernames): del new_user['password'] sys.stdout.write('found\n') realm_user = realm_users[usernames.index(new_user['username'])] _check_existing(realm_user, new_user, 'user', 'username') else: new_user_password = new_user['password'] del new_user['password'] sys.stdout.write('not found\n') sys.stdout.write('Creating user {} ...'.format(new_user['username'])) keycloak_admin.create_user(payload=new_user) new_user_id = keycloak_admin.get_user_id(new_user['username']) keycloak_admin.set_user_password(new_user_id, new_user_password, temporary=False) sys.stdout.write('done\n')
Check if a user exists. Create it if not. Alert if it exists but with different details than what is provided.
scripts/init-realm/init-realm.py
_check_and_create_user
sebastianbertoli/renku
151
python
def _check_and_create_user(keycloak_admin, new_user): '\n Check if a user exists. Create it if not. Alert if\n it exists but with different details than what is provided.\n ' sys.stdout.write('Checking if {} user exists...'.format(new_user['username'])) realm_users = keycloak_admin.get_users(query={}) usernames = [u['username'] for u in realm_users] if (new_user['username'] in usernames): del new_user['password'] sys.stdout.write('found\n') realm_user = realm_users[usernames.index(new_user['username'])] _check_existing(realm_user, new_user, 'user', 'username') else: new_user_password = new_user['password'] del new_user['password'] sys.stdout.write('not found\n') sys.stdout.write('Creating user {} ...'.format(new_user['username'])) keycloak_admin.create_user(payload=new_user) new_user_id = keycloak_admin.get_user_id(new_user['username']) keycloak_admin.set_user_password(new_user_id, new_user_password, temporary=False) sys.stdout.write('done\n')
def _check_and_create_user(keycloak_admin, new_user): '\n Check if a user exists. Create it if not. Alert if\n it exists but with different details than what is provided.\n ' sys.stdout.write('Checking if {} user exists...'.format(new_user['username'])) realm_users = keycloak_admin.get_users(query={}) usernames = [u['username'] for u in realm_users] if (new_user['username'] in usernames): del new_user['password'] sys.stdout.write('found\n') realm_user = realm_users[usernames.index(new_user['username'])] _check_existing(realm_user, new_user, 'user', 'username') else: new_user_password = new_user['password'] del new_user['password'] sys.stdout.write('not found\n') sys.stdout.write('Creating user {} ...'.format(new_user['username'])) keycloak_admin.create_user(payload=new_user) new_user_id = keycloak_admin.get_user_id(new_user['username']) keycloak_admin.set_user_password(new_user_id, new_user_password, temporary=False) sys.stdout.write('done\n')<|docstring|>Check if a user exists. Create it if not. Alert if it exists but with different details than what is provided.<|endoftext|>
541c8cb5482c7ba173063c6cddad79d3185acc3d3e0237ad92e8e0bb5fac5942
def test_peer_status(self): '\n Test gluster peer status\n ' mock_run = MagicMock(return_value=xml_peer_present) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertDictEqual(glusterfs.peer_status(), {'uuid1': {'hostnames': ['node02', 'node02.domain.dom', '10.0.0.2']}}) mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertDictEqual(glusterfs.peer_status(), {})
Test gluster peer status
tests/unit/modules/test_glusterfs.py
test_peer_status
sys4/salt
19
python
def test_peer_status(self): '\n \n ' mock_run = MagicMock(return_value=xml_peer_present) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertDictEqual(glusterfs.peer_status(), {'uuid1': {'hostnames': ['node02', 'node02.domain.dom', '10.0.0.2']}}) mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertDictEqual(glusterfs.peer_status(), {})
def test_peer_status(self): '\n \n ' mock_run = MagicMock(return_value=xml_peer_present) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertDictEqual(glusterfs.peer_status(), {'uuid1': {'hostnames': ['node02', 'node02.domain.dom', '10.0.0.2']}}) mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertDictEqual(glusterfs.peer_status(), {})<|docstring|>Test gluster peer status<|endoftext|>
526d21db106b9eedc8ced72cd5d93d1f4f31f8fa4ee78e61fe561ba26aa01eb6
def test_peer(self): '\n Test if gluster peer call is successful.\n ' mock_run = MagicMock() with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): mock_run.return_value = xml_peer_probe_already_member self.assertTrue(glusterfs.peer('salt')) mock_run.return_value = xml_peer_probe_localhost self.assertTrue(glusterfs.peer('salt')) mock_run.return_value = xml_peer_probe_fail_cant_connect self.assertFalse(glusterfs.peer('salt'))
Test if gluster peer call is successful.
tests/unit/modules/test_glusterfs.py
test_peer
sys4/salt
19
python
def test_peer(self): '\n \n ' mock_run = MagicMock() with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): mock_run.return_value = xml_peer_probe_already_member self.assertTrue(glusterfs.peer('salt')) mock_run.return_value = xml_peer_probe_localhost self.assertTrue(glusterfs.peer('salt')) mock_run.return_value = xml_peer_probe_fail_cant_connect self.assertFalse(glusterfs.peer('salt'))
def test_peer(self): '\n \n ' mock_run = MagicMock() with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): mock_run.return_value = xml_peer_probe_already_member self.assertTrue(glusterfs.peer('salt')) mock_run.return_value = xml_peer_probe_localhost self.assertTrue(glusterfs.peer('salt')) mock_run.return_value = xml_peer_probe_fail_cant_connect self.assertFalse(glusterfs.peer('salt'))<|docstring|>Test if gluster peer call is successful.<|endoftext|>
28d6fb73a8fa844e15b346341d0b1182d9dcf0ee1871b1c87f718fd8efefca04
def test_create_volume(self): '\n Test if it creates a glusterfs volume.\n ' mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertRaises(SaltInvocationError, glusterfs.create_volume, 'newvolume', 'host1:brick') self.assertRaises(SaltInvocationError, glusterfs.create_volume, 'newvolume', 'host1/brick') self.assertFalse(mock_run.called) mock_start_volume = MagicMock(return_value=True) with patch.object(glusterfs, 'start_volume', mock_start_volume): self.assertTrue(glusterfs.create_volume('newvolume', 'host1:/brick')) self.assertFalse(mock_start_volume.called) self.assertTrue(glusterfs.create_volume('newvolume', 'host1:/brick', start=True)) self.assertTrue(mock_start_volume.called) mock_start_volume.return_value = False self.assertFalse(glusterfs.create_volume('newvolume', 'host1:/brick', start=True)) mock_run.return_value = xml_command_fail self.assertFalse(glusterfs.create_volume('newvolume', 'host1:/brick', True, True, True, 'tcp', True))
Test if it creates a glusterfs volume.
tests/unit/modules/test_glusterfs.py
test_create_volume
sys4/salt
19
python
def test_create_volume(self): '\n \n ' mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertRaises(SaltInvocationError, glusterfs.create_volume, 'newvolume', 'host1:brick') self.assertRaises(SaltInvocationError, glusterfs.create_volume, 'newvolume', 'host1/brick') self.assertFalse(mock_run.called) mock_start_volume = MagicMock(return_value=True) with patch.object(glusterfs, 'start_volume', mock_start_volume): self.assertTrue(glusterfs.create_volume('newvolume', 'host1:/brick')) self.assertFalse(mock_start_volume.called) self.assertTrue(glusterfs.create_volume('newvolume', 'host1:/brick', start=True)) self.assertTrue(mock_start_volume.called) mock_start_volume.return_value = False self.assertFalse(glusterfs.create_volume('newvolume', 'host1:/brick', start=True)) mock_run.return_value = xml_command_fail self.assertFalse(glusterfs.create_volume('newvolume', 'host1:/brick', True, True, True, 'tcp', True))
def test_create_volume(self): '\n \n ' mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertRaises(SaltInvocationError, glusterfs.create_volume, 'newvolume', 'host1:brick') self.assertRaises(SaltInvocationError, glusterfs.create_volume, 'newvolume', 'host1/brick') self.assertFalse(mock_run.called) mock_start_volume = MagicMock(return_value=True) with patch.object(glusterfs, 'start_volume', mock_start_volume): self.assertTrue(glusterfs.create_volume('newvolume', 'host1:/brick')) self.assertFalse(mock_start_volume.called) self.assertTrue(glusterfs.create_volume('newvolume', 'host1:/brick', start=True)) self.assertTrue(mock_start_volume.called) mock_start_volume.return_value = False self.assertFalse(glusterfs.create_volume('newvolume', 'host1:/brick', start=True)) mock_run.return_value = xml_command_fail self.assertFalse(glusterfs.create_volume('newvolume', 'host1:/brick', True, True, True, 'tcp', True))<|docstring|>Test if it creates a glusterfs volume.<|endoftext|>
b48dd0e4f16895b183f2d4f1674db1c9dca9eb8e41cab6bc4c0a6620321101d0
def test_list_volumes(self): '\n Test if it list configured volumes\n ' mock = MagicMock(return_value=xml_volume_absent) with patch.dict(glusterfs.__salt__, {'cmd.run': mock}): self.assertListEqual(glusterfs.list_volumes(), []) mock = MagicMock(return_value=xml_volume_present) with patch.dict(glusterfs.__salt__, {'cmd.run': mock}): self.assertListEqual(glusterfs.list_volumes(), ['Newvolume1', 'Newvolume2'])
Test if it list configured volumes
tests/unit/modules/test_glusterfs.py
test_list_volumes
sys4/salt
19
python
def test_list_volumes(self): '\n \n ' mock = MagicMock(return_value=xml_volume_absent) with patch.dict(glusterfs.__salt__, {'cmd.run': mock}): self.assertListEqual(glusterfs.list_volumes(), []) mock = MagicMock(return_value=xml_volume_present) with patch.dict(glusterfs.__salt__, {'cmd.run': mock}): self.assertListEqual(glusterfs.list_volumes(), ['Newvolume1', 'Newvolume2'])
def test_list_volumes(self): '\n \n ' mock = MagicMock(return_value=xml_volume_absent) with patch.dict(glusterfs.__salt__, {'cmd.run': mock}): self.assertListEqual(glusterfs.list_volumes(), []) mock = MagicMock(return_value=xml_volume_present) with patch.dict(glusterfs.__salt__, {'cmd.run': mock}): self.assertListEqual(glusterfs.list_volumes(), ['Newvolume1', 'Newvolume2'])<|docstring|>Test if it list configured volumes<|endoftext|>
834c126bc210da4dcf22c9dbe17ceb3d45de5a5a0b266247ef20814d7bf02b52
def test_status(self): '\n Test if it check the status of a gluster volume.\n ' mock_run = MagicMock(return_value=xml_command_fail) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertIsNone(glusterfs.status('myvol1')) res = {'bricks': {'node01:/tmp/foo': {'host': 'node01', 'hostname': 'node01', 'online': True, 'path': '/tmp/foo', 'peerid': '830700d7-0684-497c-a12c-c02e365fb90b', 'pid': '2470', 'port': '49155', 'ports': {'rdma': 'N/A', 'tcp': '49155'}, 'status': '1'}}, 'healers': {}, 'nfs': {'node01': {'host': 'NFS Server', 'hostname': 'NFS Server', 'online': False, 'path': 'localhost', 'peerid': '830700d7-0684-497c-a12c-c02e365fb90b', 'pid': '-1', 'port': 'N/A', 'ports': {'rdma': 'N/A', 'tcp': 'N/A'}, 'status': '0'}}} mock = MagicMock(return_value=xml_volume_status) with patch.dict(glusterfs.__salt__, {'cmd.run': mock}): self.assertDictEqual(glusterfs.status('myvol1'), res)
Test if it check the status of a gluster volume.
tests/unit/modules/test_glusterfs.py
test_status
sys4/salt
19
python
def test_status(self): '\n \n ' mock_run = MagicMock(return_value=xml_command_fail) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertIsNone(glusterfs.status('myvol1')) res = {'bricks': {'node01:/tmp/foo': {'host': 'node01', 'hostname': 'node01', 'online': True, 'path': '/tmp/foo', 'peerid': '830700d7-0684-497c-a12c-c02e365fb90b', 'pid': '2470', 'port': '49155', 'ports': {'rdma': 'N/A', 'tcp': '49155'}, 'status': '1'}}, 'healers': {}, 'nfs': {'node01': {'host': 'NFS Server', 'hostname': 'NFS Server', 'online': False, 'path': 'localhost', 'peerid': '830700d7-0684-497c-a12c-c02e365fb90b', 'pid': '-1', 'port': 'N/A', 'ports': {'rdma': 'N/A', 'tcp': 'N/A'}, 'status': '0'}}} mock = MagicMock(return_value=xml_volume_status) with patch.dict(glusterfs.__salt__, {'cmd.run': mock}): self.assertDictEqual(glusterfs.status('myvol1'), res)
def test_status(self): '\n \n ' mock_run = MagicMock(return_value=xml_command_fail) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertIsNone(glusterfs.status('myvol1')) res = {'bricks': {'node01:/tmp/foo': {'host': 'node01', 'hostname': 'node01', 'online': True, 'path': '/tmp/foo', 'peerid': '830700d7-0684-497c-a12c-c02e365fb90b', 'pid': '2470', 'port': '49155', 'ports': {'rdma': 'N/A', 'tcp': '49155'}, 'status': '1'}}, 'healers': {}, 'nfs': {'node01': {'host': 'NFS Server', 'hostname': 'NFS Server', 'online': False, 'path': 'localhost', 'peerid': '830700d7-0684-497c-a12c-c02e365fb90b', 'pid': '-1', 'port': 'N/A', 'ports': {'rdma': 'N/A', 'tcp': 'N/A'}, 'status': '0'}}} mock = MagicMock(return_value=xml_volume_status) with patch.dict(glusterfs.__salt__, {'cmd.run': mock}): self.assertDictEqual(glusterfs.status('myvol1'), res)<|docstring|>Test if it check the status of a gluster volume.<|endoftext|>
187eff309c8fb84c6e247f1144775e2aca178cde31fa5011c460acde1a583d5e
def test_volume_info(self): '\n Test if it returns the volume info.\n ' res = {'myvol1': {'brickCount': '1', 'bricks': {'brick1': {'hostUuid': '830700d7-0684-497c-a12c-c02e365fb90b', 'path': 'node01:/tmp/foo', 'uuid': '830700d7-0684-497c-a12c-c02e365fb90b'}}, 'disperseCount': '0', 'distCount': '1', 'id': 'f03c2180-cf55-4f77-ae0b-3650f57c82a1', 'name': 'myvol1', 'optCount': '1', 'options': {'performance.readdir-ahead': 'on'}, 'redundancyCount': '0', 'replicaCount': '1', 'status': '1', 'statusStr': 'Started', 'stripeCount': '1', 'transport': '0', 'type': '0', 'typeStr': 'Distribute'}} mock = MagicMock(return_value=xml_volume_info_running) with patch.dict(glusterfs.__salt__, {'cmd.run': mock}): self.assertDictEqual(glusterfs.info('myvol1'), res)
Test if it returns the volume info.
tests/unit/modules/test_glusterfs.py
test_volume_info
sys4/salt
19
python
def test_volume_info(self): '\n \n ' res = {'myvol1': {'brickCount': '1', 'bricks': {'brick1': {'hostUuid': '830700d7-0684-497c-a12c-c02e365fb90b', 'path': 'node01:/tmp/foo', 'uuid': '830700d7-0684-497c-a12c-c02e365fb90b'}}, 'disperseCount': '0', 'distCount': '1', 'id': 'f03c2180-cf55-4f77-ae0b-3650f57c82a1', 'name': 'myvol1', 'optCount': '1', 'options': {'performance.readdir-ahead': 'on'}, 'redundancyCount': '0', 'replicaCount': '1', 'status': '1', 'statusStr': 'Started', 'stripeCount': '1', 'transport': '0', 'type': '0', 'typeStr': 'Distribute'}} mock = MagicMock(return_value=xml_volume_info_running) with patch.dict(glusterfs.__salt__, {'cmd.run': mock}): self.assertDictEqual(glusterfs.info('myvol1'), res)
def test_volume_info(self): '\n \n ' res = {'myvol1': {'brickCount': '1', 'bricks': {'brick1': {'hostUuid': '830700d7-0684-497c-a12c-c02e365fb90b', 'path': 'node01:/tmp/foo', 'uuid': '830700d7-0684-497c-a12c-c02e365fb90b'}}, 'disperseCount': '0', 'distCount': '1', 'id': 'f03c2180-cf55-4f77-ae0b-3650f57c82a1', 'name': 'myvol1', 'optCount': '1', 'options': {'performance.readdir-ahead': 'on'}, 'redundancyCount': '0', 'replicaCount': '1', 'status': '1', 'statusStr': 'Started', 'stripeCount': '1', 'transport': '0', 'type': '0', 'typeStr': 'Distribute'}} mock = MagicMock(return_value=xml_volume_info_running) with patch.dict(glusterfs.__salt__, {'cmd.run': mock}): self.assertDictEqual(glusterfs.info('myvol1'), res)<|docstring|>Test if it returns the volume info.<|endoftext|>
83fa1c9eae1187598b97e886a3f896fcf739509305d82140b4c7353cec831e98
def test_start_volume(self): '\n Test if it start a gluster volume.\n ' mock_info = MagicMock(return_value={'Newvolume1': {'status': '0'}}) with patch.object(glusterfs, 'info', mock_info): mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.start_volume('Newvolume1'), True) self.assertEqual(glusterfs.start_volume('nonExisting'), False) mock_run = MagicMock(return_value=xml_command_fail) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.start_volume('Newvolume1'), False) mock_info = MagicMock(return_value={'Newvolume1': {'status': '1'}}) with patch.object(glusterfs, 'info', mock_info): mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.start_volume('Newvolume1', force=True), True) mock_run = MagicMock(return_value=xml_command_fail) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.start_volume('Newvolume1'), True) self.assertEqual(glusterfs.start_volume('Newvolume1', force=True), False)
Test if it start a gluster volume.
tests/unit/modules/test_glusterfs.py
test_start_volume
sys4/salt
19
python
def test_start_volume(self): '\n \n ' mock_info = MagicMock(return_value={'Newvolume1': {'status': '0'}}) with patch.object(glusterfs, 'info', mock_info): mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.start_volume('Newvolume1'), True) self.assertEqual(glusterfs.start_volume('nonExisting'), False) mock_run = MagicMock(return_value=xml_command_fail) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.start_volume('Newvolume1'), False) mock_info = MagicMock(return_value={'Newvolume1': {'status': '1'}}) with patch.object(glusterfs, 'info', mock_info): mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.start_volume('Newvolume1', force=True), True) mock_run = MagicMock(return_value=xml_command_fail) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.start_volume('Newvolume1'), True) self.assertEqual(glusterfs.start_volume('Newvolume1', force=True), False)
def test_start_volume(self): '\n \n ' mock_info = MagicMock(return_value={'Newvolume1': {'status': '0'}}) with patch.object(glusterfs, 'info', mock_info): mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.start_volume('Newvolume1'), True) self.assertEqual(glusterfs.start_volume('nonExisting'), False) mock_run = MagicMock(return_value=xml_command_fail) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.start_volume('Newvolume1'), False) mock_info = MagicMock(return_value={'Newvolume1': {'status': '1'}}) with patch.object(glusterfs, 'info', mock_info): mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.start_volume('Newvolume1', force=True), True) mock_run = MagicMock(return_value=xml_command_fail) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.start_volume('Newvolume1'), True) self.assertEqual(glusterfs.start_volume('Newvolume1', force=True), False)<|docstring|>Test if it start a gluster volume.<|endoftext|>
41cec7ca8a5d950bb0293dfe3736e7518218055c33c2b38d26a3e83a6e6d085f
def test_stop_volume(self): '\n Test if it stop a gluster volume.\n ' mock_info = MagicMock(return_value={'Newvolume1': {'status': '0'}}) with patch.object(glusterfs, 'info', mock_info): mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.stop_volume('Newvolume1'), True) self.assertEqual(glusterfs.stop_volume('nonExisting'), False) mock_run = MagicMock(return_value=xml_command_fail) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.stop_volume('Newvolume1'), True) mock_info = MagicMock(return_value={'Newvolume1': {'status': '1'}}) with patch.object(glusterfs, 'info', mock_info): mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.stop_volume('Newvolume1'), True) self.assertEqual(glusterfs.stop_volume('nonExisting'), False) mock_run = MagicMock(return_value=xml_command_fail) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.stop_volume('Newvolume1'), False)
Test if it stop a gluster volume.
tests/unit/modules/test_glusterfs.py
test_stop_volume
sys4/salt
19
python
def test_stop_volume(self): '\n \n ' mock_info = MagicMock(return_value={'Newvolume1': {'status': '0'}}) with patch.object(glusterfs, 'info', mock_info): mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.stop_volume('Newvolume1'), True) self.assertEqual(glusterfs.stop_volume('nonExisting'), False) mock_run = MagicMock(return_value=xml_command_fail) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.stop_volume('Newvolume1'), True) mock_info = MagicMock(return_value={'Newvolume1': {'status': '1'}}) with patch.object(glusterfs, 'info', mock_info): mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.stop_volume('Newvolume1'), True) self.assertEqual(glusterfs.stop_volume('nonExisting'), False) mock_run = MagicMock(return_value=xml_command_fail) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.stop_volume('Newvolume1'), False)
def test_stop_volume(self): '\n \n ' mock_info = MagicMock(return_value={'Newvolume1': {'status': '0'}}) with patch.object(glusterfs, 'info', mock_info): mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.stop_volume('Newvolume1'), True) self.assertEqual(glusterfs.stop_volume('nonExisting'), False) mock_run = MagicMock(return_value=xml_command_fail) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.stop_volume('Newvolume1'), True) mock_info = MagicMock(return_value={'Newvolume1': {'status': '1'}}) with patch.object(glusterfs, 'info', mock_info): mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.stop_volume('Newvolume1'), True) self.assertEqual(glusterfs.stop_volume('nonExisting'), False) mock_run = MagicMock(return_value=xml_command_fail) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.stop_volume('Newvolume1'), False)<|docstring|>Test if it stop a gluster volume.<|endoftext|>
251c5068a4d0a9efc5369fc8055b957379b2f3d5f3ae918cf862ac8d33bc11e8
def test_delete_volume(self): '\n Test if it deletes a gluster volume.\n ' mock_info = MagicMock(return_value={'Newvolume1': {'status': '1'}}) with patch.object(glusterfs, 'info', mock_info): self.assertFalse(glusterfs.delete_volume('Newvolume3')) mock_stop_volume = MagicMock(return_value=True) mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): with patch.object(glusterfs, 'stop_volume', mock_stop_volume): self.assertFalse(glusterfs.delete_volume('Newvolume1', False)) self.assertFalse(mock_run.called) self.assertFalse(mock_stop_volume.called) self.assertTrue(glusterfs.delete_volume('Newvolume1')) self.assertTrue(mock_run.called) self.assertTrue(mock_stop_volume.called) mock_info = MagicMock(return_value={'Newvolume1': {'status': '2'}}) with patch.object(glusterfs, 'info', mock_info): mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertTrue(glusterfs.delete_volume('Newvolume1')) mock_run.return_value = xml_command_fail self.assertFalse(glusterfs.delete_volume('Newvolume1'))
Test if it deletes a gluster volume.
tests/unit/modules/test_glusterfs.py
test_delete_volume
sys4/salt
19
python
def test_delete_volume(self): '\n \n ' mock_info = MagicMock(return_value={'Newvolume1': {'status': '1'}}) with patch.object(glusterfs, 'info', mock_info): self.assertFalse(glusterfs.delete_volume('Newvolume3')) mock_stop_volume = MagicMock(return_value=True) mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): with patch.object(glusterfs, 'stop_volume', mock_stop_volume): self.assertFalse(glusterfs.delete_volume('Newvolume1', False)) self.assertFalse(mock_run.called) self.assertFalse(mock_stop_volume.called) self.assertTrue(glusterfs.delete_volume('Newvolume1')) self.assertTrue(mock_run.called) self.assertTrue(mock_stop_volume.called) mock_info = MagicMock(return_value={'Newvolume1': {'status': '2'}}) with patch.object(glusterfs, 'info', mock_info): mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertTrue(glusterfs.delete_volume('Newvolume1')) mock_run.return_value = xml_command_fail self.assertFalse(glusterfs.delete_volume('Newvolume1'))
def test_delete_volume(self): '\n \n ' mock_info = MagicMock(return_value={'Newvolume1': {'status': '1'}}) with patch.object(glusterfs, 'info', mock_info): self.assertFalse(glusterfs.delete_volume('Newvolume3')) mock_stop_volume = MagicMock(return_value=True) mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): with patch.object(glusterfs, 'stop_volume', mock_stop_volume): self.assertFalse(glusterfs.delete_volume('Newvolume1', False)) self.assertFalse(mock_run.called) self.assertFalse(mock_stop_volume.called) self.assertTrue(glusterfs.delete_volume('Newvolume1')) self.assertTrue(mock_run.called) self.assertTrue(mock_stop_volume.called) mock_info = MagicMock(return_value={'Newvolume1': {'status': '2'}}) with patch.object(glusterfs, 'info', mock_info): mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertTrue(glusterfs.delete_volume('Newvolume1')) mock_run.return_value = xml_command_fail self.assertFalse(glusterfs.delete_volume('Newvolume1'))<|docstring|>Test if it deletes a gluster volume.<|endoftext|>
152ff83583e3fc07f148a027f32194ab1ee5ecb536b6f6832308aedd7b293126
def test_add_volume_bricks(self): '\n Test if it add brick(s) to an existing volume\n ' mock_info = MagicMock(return_value={'Newvolume1': {'status': '1', 'bricks': {'brick1': {'path': 'host:/path1'}, 'brick2': {'path': 'host:/path2'}}}}) with patch.object(glusterfs, 'info', mock_info): mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertFalse(glusterfs.add_volume_bricks('nonExisting', ['bricks'])) self.assertTrue(glusterfs.add_volume_bricks('Newvolume1', ['host:/path2'])) self.assertTrue(glusterfs.add_volume_bricks('Newvolume1', 'host:/path2')) self.assertFalse(mock_run.called) self.assertTrue(glusterfs.add_volume_bricks('Newvolume1', ['host:/new1'])) self.assertTrue(mock_run.called) mock_run.return_value = xml_command_fail self.assertFalse(glusterfs.add_volume_bricks('Newvolume1', ['new:/path']))
Test if it add brick(s) to an existing volume
tests/unit/modules/test_glusterfs.py
test_add_volume_bricks
sys4/salt
19
python
def test_add_volume_bricks(self): '\n \n ' mock_info = MagicMock(return_value={'Newvolume1': {'status': '1', 'bricks': {'brick1': {'path': 'host:/path1'}, 'brick2': {'path': 'host:/path2'}}}}) with patch.object(glusterfs, 'info', mock_info): mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertFalse(glusterfs.add_volume_bricks('nonExisting', ['bricks'])) self.assertTrue(glusterfs.add_volume_bricks('Newvolume1', ['host:/path2'])) self.assertTrue(glusterfs.add_volume_bricks('Newvolume1', 'host:/path2')) self.assertFalse(mock_run.called) self.assertTrue(glusterfs.add_volume_bricks('Newvolume1', ['host:/new1'])) self.assertTrue(mock_run.called) mock_run.return_value = xml_command_fail self.assertFalse(glusterfs.add_volume_bricks('Newvolume1', ['new:/path']))
def test_add_volume_bricks(self): '\n \n ' mock_info = MagicMock(return_value={'Newvolume1': {'status': '1', 'bricks': {'brick1': {'path': 'host:/path1'}, 'brick2': {'path': 'host:/path2'}}}}) with patch.object(glusterfs, 'info', mock_info): mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertFalse(glusterfs.add_volume_bricks('nonExisting', ['bricks'])) self.assertTrue(glusterfs.add_volume_bricks('Newvolume1', ['host:/path2'])) self.assertTrue(glusterfs.add_volume_bricks('Newvolume1', 'host:/path2')) self.assertFalse(mock_run.called) self.assertTrue(glusterfs.add_volume_bricks('Newvolume1', ['host:/new1'])) self.assertTrue(mock_run.called) mock_run.return_value = xml_command_fail self.assertFalse(glusterfs.add_volume_bricks('Newvolume1', ['new:/path']))<|docstring|>Test if it add brick(s) to an existing volume<|endoftext|>
c81efe887f49f39b3cfe4e4c504ee75773af6225db53213529d8d3a65a9c6d56
def test_get_op_version(self): '\n Test retrieving the glusterfs op-version\n ' mock_run = MagicMock(return_value=xml_op_version_37) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.get_op_version('test'), '30707') mock_run = MagicMock(return_value=xml_op_version_312) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.get_op_version('test'), '30707')
Test retrieving the glusterfs op-version
tests/unit/modules/test_glusterfs.py
test_get_op_version
sys4/salt
19
python
def test_get_op_version(self): '\n \n ' mock_run = MagicMock(return_value=xml_op_version_37) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.get_op_version('test'), '30707') mock_run = MagicMock(return_value=xml_op_version_312) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.get_op_version('test'), '30707')
def test_get_op_version(self): '\n \n ' mock_run = MagicMock(return_value=xml_op_version_37) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.get_op_version('test'), '30707') mock_run = MagicMock(return_value=xml_op_version_312) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_run}): self.assertEqual(glusterfs.get_op_version('test'), '30707')<|docstring|>Test retrieving the glusterfs op-version<|endoftext|>
5dc03bce288f25ec9f2e4210608123359f50c3637d196f25dc7d9874dd271a0d
def test_get_max_op_version(self): '\n Test retrieving the glusterfs max-op-version.\n ' mock_xml = MagicMock(return_value=xml_max_op_version) mock_version = MagicMock(return_value='glusterfs 3.9.1') with patch.dict(glusterfs.__salt__, {'cmd.run': mock_version}): self.assertFalse(glusterfs.get_max_op_version()[0]) with patch.object(glusterfs, '_get_version', return_value=(3, 12, 0)): with patch.dict(glusterfs.__salt__, {'cmd.run': mock_xml}): self.assertEqual(glusterfs.get_max_op_version(), '31200')
Test retrieving the glusterfs max-op-version.
tests/unit/modules/test_glusterfs.py
test_get_max_op_version
sys4/salt
19
python
def test_get_max_op_version(self): '\n \n ' mock_xml = MagicMock(return_value=xml_max_op_version) mock_version = MagicMock(return_value='glusterfs 3.9.1') with patch.dict(glusterfs.__salt__, {'cmd.run': mock_version}): self.assertFalse(glusterfs.get_max_op_version()[0]) with patch.object(glusterfs, '_get_version', return_value=(3, 12, 0)): with patch.dict(glusterfs.__salt__, {'cmd.run': mock_xml}): self.assertEqual(glusterfs.get_max_op_version(), '31200')
def test_get_max_op_version(self): '\n \n ' mock_xml = MagicMock(return_value=xml_max_op_version) mock_version = MagicMock(return_value='glusterfs 3.9.1') with patch.dict(glusterfs.__salt__, {'cmd.run': mock_version}): self.assertFalse(glusterfs.get_max_op_version()[0]) with patch.object(glusterfs, '_get_version', return_value=(3, 12, 0)): with patch.dict(glusterfs.__salt__, {'cmd.run': mock_xml}): self.assertEqual(glusterfs.get_max_op_version(), '31200')<|docstring|>Test retrieving the glusterfs max-op-version.<|endoftext|>
84fc7fb4be6f8b02569b6a719af6bee87abc8a44bfcf679de89970f47b1409b8
def test_set_op_version(self): '\n Test setting the glusterfs op-version\n ' mock_failure = MagicMock(return_value=xml_set_op_version_failure) mock_success = MagicMock(return_value=xml_set_op_version_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_failure}): self.assertFalse(glusterfs.set_op_version(30707)[0]) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_success}): self.assertEqual(glusterfs.set_op_version(31200), 'Set volume successful')
Test setting the glusterfs op-version
tests/unit/modules/test_glusterfs.py
test_set_op_version
sys4/salt
19
python
def test_set_op_version(self): '\n \n ' mock_failure = MagicMock(return_value=xml_set_op_version_failure) mock_success = MagicMock(return_value=xml_set_op_version_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_failure}): self.assertFalse(glusterfs.set_op_version(30707)[0]) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_success}): self.assertEqual(glusterfs.set_op_version(31200), 'Set volume successful')
def test_set_op_version(self): '\n \n ' mock_failure = MagicMock(return_value=xml_set_op_version_failure) mock_success = MagicMock(return_value=xml_set_op_version_success) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_failure}): self.assertFalse(glusterfs.set_op_version(30707)[0]) with patch.dict(glusterfs.__salt__, {'cmd.run': mock_success}): self.assertEqual(glusterfs.set_op_version(31200), 'Set volume successful')<|docstring|>Test setting the glusterfs op-version<|endoftext|>
c40d075088673aedc80efba64b6e5d7a785971ebfe84ad8f8a079229aee3f44c
def test_100_create_vector_mixed_list(self): 'Test vector creation from a mixed list' input_array = [1.0, '2', '3.0', 'four', 5.65] out_array = [1.0, 2.0, 3.0, 5.65] self.assertListEqual(out_array, Vector(input_array).data.tolist())
Test vector creation from a mixed list
sci_analysis/test/test_vector.py
test_100_create_vector_mixed_list
cmmorrow/sci-analysis
17
python
def test_100_create_vector_mixed_list(self): input_array = [1.0, '2', '3.0', 'four', 5.65] out_array = [1.0, 2.0, 3.0, 5.65] self.assertListEqual(out_array, Vector(input_array).data.tolist())
def test_100_create_vector_mixed_list(self): input_array = [1.0, '2', '3.0', 'four', 5.65] out_array = [1.0, 2.0, 3.0, 5.65] self.assertListEqual(out_array, Vector(input_array).data.tolist())<|docstring|>Test vector creation from a mixed list<|endoftext|>
4a5df91b5fc02eed40034580e50a2db32bc0d8f649ebf4c12862a1b50ebd2f40
def test_101_create_vector_missing_val(self): 'Test vector creation from a missing value list' input_array = ['1.0', '', 3, '4.1', ''] out_array = [1.0, 3.0, 4.1] self.assertListEqual(out_array, Vector(input_array).data.tolist())
Test vector creation from a missing value list
sci_analysis/test/test_vector.py
test_101_create_vector_missing_val
cmmorrow/sci-analysis
17
python
def test_101_create_vector_missing_val(self): input_array = ['1.0', , 3, '4.1', ] out_array = [1.0, 3.0, 4.1] self.assertListEqual(out_array, Vector(input_array).data.tolist())
def test_101_create_vector_missing_val(self): input_array = ['1.0', , 3, '4.1', ] out_array = [1.0, 3.0, 4.1] self.assertListEqual(out_array, Vector(input_array).data.tolist())<|docstring|>Test vector creation from a missing value list<|endoftext|>
461b7e52850d267b4529c52e2421700d30717015ad5723cde9f29e889db4544a
def test_102_create_vector_empty_list(self): 'Test vector creation from an empty list' self.assertTrue(Vector().data.empty)
Test vector creation from an empty list
sci_analysis/test/test_vector.py
test_102_create_vector_empty_list
cmmorrow/sci-analysis
17
python
def test_102_create_vector_empty_list(self): self.assertTrue(Vector().data.empty)
def test_102_create_vector_empty_list(self): self.assertTrue(Vector().data.empty)<|docstring|>Test vector creation from an empty list<|endoftext|>
1906d5f9b50dccadca13e60fc923d101eab31c34be8c8bee17b967681cf6e3a4
def test_103_create_vector_2dim_array(self): 'Test vector creation from a 2dim array' input_array = np.array([[1, 2, 3], [1, 2, 3]]) out_array = [1.0, 2.0, 3.0, 1.0, 2.0, 3.0] self.assertListEqual(out_array, Vector(input_array).data.tolist())
Test vector creation from a 2dim array
sci_analysis/test/test_vector.py
test_103_create_vector_2dim_array
cmmorrow/sci-analysis
17
python
def test_103_create_vector_2dim_array(self): input_array = np.array([[1, 2, 3], [1, 2, 3]]) out_array = [1.0, 2.0, 3.0, 1.0, 2.0, 3.0] self.assertListEqual(out_array, Vector(input_array).data.tolist())
def test_103_create_vector_2dim_array(self): input_array = np.array([[1, 2, 3], [1, 2, 3]]) out_array = [1.0, 2.0, 3.0, 1.0, 2.0, 3.0] self.assertListEqual(out_array, Vector(input_array).data.tolist())<|docstring|>Test vector creation from a 2dim array<|endoftext|>
354ac3bc43548826b907913209e67ea38f8b7bbfc017fac2e29cd35a1cae8d4d
def test_104_create_vector_dict(self): 'Test vector creation from a dict' input_array = {'one': 1, 'two': 2.0, 'three': '3', 'four': 'four'} self.assertTrue(Vector(input_array).is_empty())
Test vector creation from a dict
sci_analysis/test/test_vector.py
test_104_create_vector_dict
cmmorrow/sci-analysis
17
python
def test_104_create_vector_dict(self): input_array = {'one': 1, 'two': 2.0, 'three': '3', 'four': 'four'} self.assertTrue(Vector(input_array).is_empty())
def test_104_create_vector_dict(self): input_array = {'one': 1, 'two': 2.0, 'three': '3', 'four': 'four'} self.assertTrue(Vector(input_array).is_empty())<|docstring|>Test vector creation from a dict<|endoftext|>
74eecf3c03ff789468e6b134b4de743c4e1bb1a99e1f33a6d3e7791722ce9a92
def test_105_create_vector_tuple(self): 'Test vector creation from a tuple' input_array = (1, 2, 3, 4, 5) out_array = [1.0, 2.0, 3.0, 4.0, 5.0] self.assertListEqual(out_array, Vector(input_array).data.tolist())
Test vector creation from a tuple
sci_analysis/test/test_vector.py
test_105_create_vector_tuple
cmmorrow/sci-analysis
17
python
def test_105_create_vector_tuple(self): input_array = (1, 2, 3, 4, 5) out_array = [1.0, 2.0, 3.0, 4.0, 5.0] self.assertListEqual(out_array, Vector(input_array).data.tolist())
def test_105_create_vector_tuple(self): input_array = (1, 2, 3, 4, 5) out_array = [1.0, 2.0, 3.0, 4.0, 5.0] self.assertListEqual(out_array, Vector(input_array).data.tolist())<|docstring|>Test vector creation from a tuple<|endoftext|>
9eacc641bc426fc6a6461d6d6cbd903953fc1d805f68fa9ab4386c8fdbf8c3d0
def test_106_create_vector_array(self): 'Test vector creation from an array' np.random.seed(987654321) input_array = st.norm.rvs(size=100) test_obj = Vector(input_array) self.assertEqual(len(test_obj), 100) self.assertIsInstance(test_obj, Vector) self.assertIsInstance(test_obj.data, pd.Series) self.assertEqual(test_obj.data_type, np.dtype('float64')) self.assertTrue(all(pd.isna(test_obj._values['lbl']))) self.assertEqual((['None'] * 100), test_obj.labels.tolist())
Test vector creation from an array
sci_analysis/test/test_vector.py
test_106_create_vector_array
cmmorrow/sci-analysis
17
python
def test_106_create_vector_array(self): np.random.seed(987654321) input_array = st.norm.rvs(size=100) test_obj = Vector(input_array) self.assertEqual(len(test_obj), 100) self.assertIsInstance(test_obj, Vector) self.assertIsInstance(test_obj.data, pd.Series) self.assertEqual(test_obj.data_type, np.dtype('float64')) self.assertTrue(all(pd.isna(test_obj._values['lbl']))) self.assertEqual((['None'] * 100), test_obj.labels.tolist())
def test_106_create_vector_array(self): np.random.seed(987654321) input_array = st.norm.rvs(size=100) test_obj = Vector(input_array) self.assertEqual(len(test_obj), 100) self.assertIsInstance(test_obj, Vector) self.assertIsInstance(test_obj.data, pd.Series) self.assertEqual(test_obj.data_type, np.dtype('float64')) self.assertTrue(all(pd.isna(test_obj._values['lbl']))) self.assertEqual((['None'] * 100), test_obj.labels.tolist())<|docstring|>Test vector creation from an array<|endoftext|>
94c4a00282264af56e0ab4ad35bcf33bbf7fd20535f501889506fd268691d283
def test_107_create_vector_array_large(self): 'Test vector creation from a large array' np.random.seed(987654321) input_array = st.norm.rvs(size=1000000) test_obj = Vector(input_array) self.assertEqual(len(test_obj), 1000000) self.assertIsInstance(test_obj, Vector) self.assertIsInstance(test_obj.data, pd.Series) self.assertEqual(test_obj.data_type, np.dtype('float64'))
Test vector creation from a large array
sci_analysis/test/test_vector.py
test_107_create_vector_array_large
cmmorrow/sci-analysis
17
python
def test_107_create_vector_array_large(self): np.random.seed(987654321) input_array = st.norm.rvs(size=1000000) test_obj = Vector(input_array) self.assertEqual(len(test_obj), 1000000) self.assertIsInstance(test_obj, Vector) self.assertIsInstance(test_obj.data, pd.Series) self.assertEqual(test_obj.data_type, np.dtype('float64'))
def test_107_create_vector_array_large(self): np.random.seed(987654321) input_array = st.norm.rvs(size=1000000) test_obj = Vector(input_array) self.assertEqual(len(test_obj), 1000000) self.assertIsInstance(test_obj, Vector) self.assertIsInstance(test_obj.data, pd.Series) self.assertEqual(test_obj.data_type, np.dtype('float64'))<|docstring|>Test vector creation from a large array<|endoftext|>
f4e5fd18a48a032ab011ace573925e85c64aaeb3f8d404557df0c4f9312d86dc
def test_108_create_vector_from_vector(self): 'Test vector creation from a vector' np.random.seed(987654321) input_array = Vector(st.norm.rvs(size=100)) second_array = Vector(input_array) self.assertEqual(second_array.data_type, np.dtype('float64'))
Test vector creation from a vector
sci_analysis/test/test_vector.py
test_108_create_vector_from_vector
cmmorrow/sci-analysis
17
python
def test_108_create_vector_from_vector(self): np.random.seed(987654321) input_array = Vector(st.norm.rvs(size=100)) second_array = Vector(input_array) self.assertEqual(second_array.data_type, np.dtype('float64'))
def test_108_create_vector_from_vector(self): np.random.seed(987654321) input_array = Vector(st.norm.rvs(size=100)) second_array = Vector(input_array) self.assertEqual(second_array.data_type, np.dtype('float64'))<|docstring|>Test vector creation from a vector<|endoftext|>
eeac2b54a66b4440167e5fde63f1df1ebd93e797a998ad29303aeb56a2a85583
def test_109_create_vector_2dim_list(self): 'Test vector creation from a 2dim list' input_array = [[1, 2, 3], [1, 2, 3]] out_array = [1.0, 2.0, 3.0, 1.0, 2.0, 3.0] self.assertListEqual(out_array, Vector(input_array).data.tolist())
Test vector creation from a 2dim list
sci_analysis/test/test_vector.py
test_109_create_vector_2dim_list
cmmorrow/sci-analysis
17
python
def test_109_create_vector_2dim_list(self): input_array = [[1, 2, 3], [1, 2, 3]] out_array = [1.0, 2.0, 3.0, 1.0, 2.0, 3.0] self.assertListEqual(out_array, Vector(input_array).data.tolist())
def test_109_create_vector_2dim_list(self): input_array = [[1, 2, 3], [1, 2, 3]] out_array = [1.0, 2.0, 3.0, 1.0, 2.0, 3.0] self.assertListEqual(out_array, Vector(input_array).data.tolist())<|docstring|>Test vector creation from a 2dim list<|endoftext|>
6bb374e2697a872ae4fb60bf95a51aac5c35ef40b93c07c0ce3b4e4f57a99317
def test_120_create_vector_none(self): 'Test vector creation from None' self.assertTrue(Vector(None).is_empty())
Test vector creation from None
sci_analysis/test/test_vector.py
test_120_create_vector_none
cmmorrow/sci-analysis
17
python
def test_120_create_vector_none(self): self.assertTrue(Vector(None).is_empty())
def test_120_create_vector_none(self): self.assertTrue(Vector(None).is_empty())<|docstring|>Test vector creation from None<|endoftext|>
51e73d790bb1204276fa7f39de1eb415afee90cca9ad3622fbbcf652a2cbb4ef
def test_121_vector_is_empty_empty_list(self): 'Test the vector is_empty method' input_array = [] self.assertTrue(Vector(input_array).is_empty())
Test the vector is_empty method
sci_analysis/test/test_vector.py
test_121_vector_is_empty_empty_list
cmmorrow/sci-analysis
17
python
def test_121_vector_is_empty_empty_list(self): input_array = [] self.assertTrue(Vector(input_array).is_empty())
def test_121_vector_is_empty_empty_list(self): input_array = [] self.assertTrue(Vector(input_array).is_empty())<|docstring|>Test the vector is_empty method<|endoftext|>
91301e35029f10d4b911d805a5683cd50d39566dfae2d3dba044d986a3879036
def test_122_vector_is_empty_empty_array(self): 'Test the vector is_empty method' input_array = np.array([]) self.assertTrue(Vector(input_array).is_empty())
Test the vector is_empty method
sci_analysis/test/test_vector.py
test_122_vector_is_empty_empty_array
cmmorrow/sci-analysis
17
python
def test_122_vector_is_empty_empty_array(self): input_array = np.array([]) self.assertTrue(Vector(input_array).is_empty())
def test_122_vector_is_empty_empty_array(self): input_array = np.array([]) self.assertTrue(Vector(input_array).is_empty())<|docstring|>Test the vector is_empty method<|endoftext|>
5fb35bf9df0cb45a375ded7cc0a7d3f2b838580637fbd4ff62441f4ee6049d13
def test_124_drop_nan(self): 'Test the drop_nan method' input_array = ['1.0', '', 3, '4.1', ''] out_array = [1.0, 3.0, 4.1] self.assertListEqual(out_array, Vector(input_array).data.tolist())
Test the drop_nan method
sci_analysis/test/test_vector.py
test_124_drop_nan
cmmorrow/sci-analysis
17
python
def test_124_drop_nan(self): input_array = ['1.0', , 3, '4.1', ] out_array = [1.0, 3.0, 4.1] self.assertListEqual(out_array, Vector(input_array).data.tolist())
def test_124_drop_nan(self): input_array = ['1.0', , 3, '4.1', ] out_array = [1.0, 3.0, 4.1] self.assertListEqual(out_array, Vector(input_array).data.tolist())<|docstring|>Test the drop_nan method<|endoftext|>
4ba64c60651979c36b436b86978932fcd94e35eba0d46fd81901ddccba3746c4
def test_125_drop_nan_empty(self): 'Test the drop_nan method on an empty array' input_array = ['one', 'two', 'three', 'four'] self.assertTrue(Vector(input_array).is_empty())
Test the drop_nan method on an empty array
sci_analysis/test/test_vector.py
test_125_drop_nan_empty
cmmorrow/sci-analysis
17
python
def test_125_drop_nan_empty(self): input_array = ['one', 'two', 'three', 'four'] self.assertTrue(Vector(input_array).is_empty())
def test_125_drop_nan_empty(self): input_array = ['one', 'two', 'three', 'four'] self.assertTrue(Vector(input_array).is_empty())<|docstring|>Test the drop_nan method on an empty array<|endoftext|>
f6da1a52ef02a78021ad18f9ca69eef5cf437f83507f45a29ac6fc98c5dc4a3e
def test_126_drop_nan_intersect(self): 'Test the drop_nan_intersect method' input_array_1 = [1.0, np.nan, 3.0, np.nan, 5.0] input_array_2 = [11.0, np.nan, 13.0, 14.0, 15.0] out1 = [1.0, 3.0, 5.0] out2 = [11.0, 13.0, 15.0] vector = Vector(input_array_1, input_array_2) self.assertListEqual(out1, vector.data.tolist()) self.assertListEqual(out2, vector.other.tolist())
Test the drop_nan_intersect method
sci_analysis/test/test_vector.py
test_126_drop_nan_intersect
cmmorrow/sci-analysis
17
python
def test_126_drop_nan_intersect(self): input_array_1 = [1.0, np.nan, 3.0, np.nan, 5.0] input_array_2 = [11.0, np.nan, 13.0, 14.0, 15.0] out1 = [1.0, 3.0, 5.0] out2 = [11.0, 13.0, 15.0] vector = Vector(input_array_1, input_array_2) self.assertListEqual(out1, vector.data.tolist()) self.assertListEqual(out2, vector.other.tolist())
def test_126_drop_nan_intersect(self): input_array_1 = [1.0, np.nan, 3.0, np.nan, 5.0] input_array_2 = [11.0, np.nan, 13.0, 14.0, 15.0] out1 = [1.0, 3.0, 5.0] out2 = [11.0, 13.0, 15.0] vector = Vector(input_array_1, input_array_2) self.assertListEqual(out1, vector.data.tolist()) self.assertListEqual(out2, vector.other.tolist())<|docstring|>Test the drop_nan_intersect method<|endoftext|>
d7e9146601a3dd0c64caced4996f5e3a8c26d5737d153f0576e2269dd18f1e6c
def test_127_drop_nan_intersect_empty(self): 'Test the drop_nan_intersect method with one empty array' input_array_2 = ['one', 'two', 'three', 'four', 'five'] input_array_1 = [11.0, np.nan, 13.0, 14.0, 15.0] self.assertTrue(Vector(input_array_1, input_array_2).other.empty)
Test the drop_nan_intersect method with one empty array
sci_analysis/test/test_vector.py
test_127_drop_nan_intersect_empty
cmmorrow/sci-analysis
17
python
def test_127_drop_nan_intersect_empty(self): input_array_2 = ['one', 'two', 'three', 'four', 'five'] input_array_1 = [11.0, np.nan, 13.0, 14.0, 15.0] self.assertTrue(Vector(input_array_1, input_array_2).other.empty)
def test_127_drop_nan_intersect_empty(self): input_array_2 = ['one', 'two', 'three', 'four', 'five'] input_array_1 = [11.0, np.nan, 13.0, 14.0, 15.0] self.assertTrue(Vector(input_array_1, input_array_2).other.empty)<|docstring|>Test the drop_nan_intersect method with one empty array<|endoftext|>
8546f83b14224d3e33f9439e6a4e4255d54871b48b87b9e65079a06f0d9d0820
def test_129_vector_data_prep(self): 'Test the vector data_prep method' np.random.seed(987654321) input_array = st.norm.rvs(size=100) input_array[4] = np.nan input_array[16] = np.nan input_array[32] = np.nan input_array[64] = np.nan self.assertEqual(len(Vector(input_array)), 96)
Test the vector data_prep method
sci_analysis/test/test_vector.py
test_129_vector_data_prep
cmmorrow/sci-analysis
17
python
def test_129_vector_data_prep(self): np.random.seed(987654321) input_array = st.norm.rvs(size=100) input_array[4] = np.nan input_array[16] = np.nan input_array[32] = np.nan input_array[64] = np.nan self.assertEqual(len(Vector(input_array)), 96)
def test_129_vector_data_prep(self): np.random.seed(987654321) input_array = st.norm.rvs(size=100) input_array[4] = np.nan input_array[16] = np.nan input_array[32] = np.nan input_array[64] = np.nan self.assertEqual(len(Vector(input_array)), 96)<|docstring|>Test the vector data_prep method<|endoftext|>
e56bdef50722a8e1e1c9fddf6d2dfb936668ec5061e6b9a5dfcbe7d9a0284c28
def test_131_vector_data_prep_two_arrays(self): 'Test the vector data_prep method when there are two vectors' np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) input_array_1[8] = np.nan input_array_1[16] = np.nan input_array_1[32] = np.nan input_array_1[64] = np.nan input_array_2[1] = np.nan input_array_2[2] = np.nan input_array_2[4] = np.nan input_array_2[8] = np.nan vector = Vector(input_array_1, input_array_2) (x, y) = (vector.data, vector.other) self.assertEqual((len(x), len(y)), (93, 93))
Test the vector data_prep method when there are two vectors
sci_analysis/test/test_vector.py
test_131_vector_data_prep_two_arrays
cmmorrow/sci-analysis
17
python
def test_131_vector_data_prep_two_arrays(self): np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) input_array_1[8] = np.nan input_array_1[16] = np.nan input_array_1[32] = np.nan input_array_1[64] = np.nan input_array_2[1] = np.nan input_array_2[2] = np.nan input_array_2[4] = np.nan input_array_2[8] = np.nan vector = Vector(input_array_1, input_array_2) (x, y) = (vector.data, vector.other) self.assertEqual((len(x), len(y)), (93, 93))
def test_131_vector_data_prep_two_arrays(self): np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) input_array_1[8] = np.nan input_array_1[16] = np.nan input_array_1[32] = np.nan input_array_1[64] = np.nan input_array_2[1] = np.nan input_array_2[2] = np.nan input_array_2[4] = np.nan input_array_2[8] = np.nan vector = Vector(input_array_1, input_array_2) (x, y) = (vector.data, vector.other) self.assertEqual((len(x), len(y)), (93, 93))<|docstring|>Test the vector data_prep method when there are two vectors<|endoftext|>
5dade039189dfb790f8d134ac0cfc7a582d2f6708eebf7602629779959891068
def test_132_vector_data_prep_two_unequal_arrays(self): 'Test the vector data_prep method when there are two vectors with different lengths' np.random.seed(987654321) input_array_1 = st.norm.rvs(size=92) input_array_2 = st.norm.rvs(size=100) self.assertRaises(UnequalVectorLengthError, (lambda : Vector(input_array_1, input_array_2)))
Test the vector data_prep method when there are two vectors with different lengths
sci_analysis/test/test_vector.py
test_132_vector_data_prep_two_unequal_arrays
cmmorrow/sci-analysis
17
python
def test_132_vector_data_prep_two_unequal_arrays(self): np.random.seed(987654321) input_array_1 = st.norm.rvs(size=92) input_array_2 = st.norm.rvs(size=100) self.assertRaises(UnequalVectorLengthError, (lambda : Vector(input_array_1, input_array_2)))
def test_132_vector_data_prep_two_unequal_arrays(self): np.random.seed(987654321) input_array_1 = st.norm.rvs(size=92) input_array_2 = st.norm.rvs(size=100) self.assertRaises(UnequalVectorLengthError, (lambda : Vector(input_array_1, input_array_2)))<|docstring|>Test the vector data_prep method when there are two vectors with different lengths<|endoftext|>
c0867dc836a5db03c34432d5a8103fe9a535c5fd331bd6aea356610dc1629736
def test_133_vector_data_prep_two_empty_arrays(self): 'Test the vector data_prep method when there are two empty vectors' input_array_1 = ['one', 'two', 'three', 'four', 'five'] input_array_2 = ['three', 'four', 'five', 'six', 'seven'] self.assertTrue(Vector(input_array_1, input_array_2).is_empty())
Test the vector data_prep method when there are two empty vectors
sci_analysis/test/test_vector.py
test_133_vector_data_prep_two_empty_arrays
cmmorrow/sci-analysis
17
python
def test_133_vector_data_prep_two_empty_arrays(self): input_array_1 = ['one', 'two', 'three', 'four', 'five'] input_array_2 = ['three', 'four', 'five', 'six', 'seven'] self.assertTrue(Vector(input_array_1, input_array_2).is_empty())
def test_133_vector_data_prep_two_empty_arrays(self): input_array_1 = ['one', 'two', 'three', 'four', 'five'] input_array_2 = ['three', 'four', 'five', 'six', 'seven'] self.assertTrue(Vector(input_array_1, input_array_2).is_empty())<|docstring|>Test the vector data_prep method when there are two empty vectors<|endoftext|>
7781af0d7038d421f84fc80b1b57e5b37ed33c46e196994263db71d7a40719b3
def test_134_vector_data_prep_int(self): 'Test the vector data_prep method on an int value' self.assertTrue(Vector(4).data.equals(pd.Series([4.0], name='ind')))
Test the vector data_prep method on an int value
sci_analysis/test/test_vector.py
test_134_vector_data_prep_int
cmmorrow/sci-analysis
17
python
def test_134_vector_data_prep_int(self): self.assertTrue(Vector(4).data.equals(pd.Series([4.0], name='ind')))
def test_134_vector_data_prep_int(self): self.assertTrue(Vector(4).data.equals(pd.Series([4.0], name='ind')))<|docstring|>Test the vector data_prep method on an int value<|endoftext|>
69e01e891ad5618e9063bcbac8c70a59ec426b566bebbc1869e77aa1c08fb84e
def test_135_vector_data_prep_float(self): 'Test the vector data_prep method on an int value' self.assertTrue(Vector(4.0).data.equals(pd.Series([4.0], name='ind')))
Test the vector data_prep method on an int value
sci_analysis/test/test_vector.py
test_135_vector_data_prep_float
cmmorrow/sci-analysis
17
python
def test_135_vector_data_prep_float(self): self.assertTrue(Vector(4.0).data.equals(pd.Series([4.0], name='ind')))
def test_135_vector_data_prep_float(self): self.assertTrue(Vector(4.0).data.equals(pd.Series([4.0], name='ind')))<|docstring|>Test the vector data_prep method on an int value<|endoftext|>
a49b2674f11892f55a17379eb2632c83b32c28c30a69c2a595fde8672d00a1ba
def test_136_vector_data_prep_string(self): 'Test the vector data_prep method on an int value' self.assertTrue(Vector('four').is_empty())
Test the vector data_prep method on an int value
sci_analysis/test/test_vector.py
test_136_vector_data_prep_string
cmmorrow/sci-analysis
17
python
def test_136_vector_data_prep_string(self): self.assertTrue(Vector('four').is_empty())
def test_136_vector_data_prep_string(self): self.assertTrue(Vector('four').is_empty())<|docstring|>Test the vector data_prep method on an int value<|endoftext|>
bf49fa7182b7d3051215a54a1964b3a738ce3caf98285c3dad73898f1c45e2dc
def test_137_basic_groupby(self): 'Test the group property produces the correct dictionary' ind = [1, 2, 3, 1, 2, 3, 1, 2, 3] grp = ['a', 'b', 'c', 'c', 'a', 'b', 'b', 'c', 'a'] groups = Vector(ind, groups=grp).groups self.assertTrue(groups['a'].equals(pd.Series([1.0, 2.0, 3.0], index=[0, 4, 8], name='a'))) self.assertTrue(groups['b'].equals(pd.Series([2.0, 3.0, 1.0], index=[1, 5, 6], name='b'))) self.assertTrue(groups['c'].equals(pd.Series([3.0, 1.0, 2.0], index=[2, 3, 7], name='c')))
Test the group property produces the correct dictionary
sci_analysis/test/test_vector.py
test_137_basic_groupby
cmmorrow/sci-analysis
17
python
def test_137_basic_groupby(self): ind = [1, 2, 3, 1, 2, 3, 1, 2, 3] grp = ['a', 'b', 'c', 'c', 'a', 'b', 'b', 'c', 'a'] groups = Vector(ind, groups=grp).groups self.assertTrue(groups['a'].equals(pd.Series([1.0, 2.0, 3.0], index=[0, 4, 8], name='a'))) self.assertTrue(groups['b'].equals(pd.Series([2.0, 3.0, 1.0], index=[1, 5, 6], name='b'))) self.assertTrue(groups['c'].equals(pd.Series([3.0, 1.0, 2.0], index=[2, 3, 7], name='c')))
def test_137_basic_groupby(self): ind = [1, 2, 3, 1, 2, 3, 1, 2, 3] grp = ['a', 'b', 'c', 'c', 'a', 'b', 'b', 'c', 'a'] groups = Vector(ind, groups=grp).groups self.assertTrue(groups['a'].equals(pd.Series([1.0, 2.0, 3.0], index=[0, 4, 8], name='a'))) self.assertTrue(groups['b'].equals(pd.Series([2.0, 3.0, 1.0], index=[1, 5, 6], name='b'))) self.assertTrue(groups['c'].equals(pd.Series([3.0, 1.0, 2.0], index=[2, 3, 7], name='c')))<|docstring|>Test the group property produces the correct dictionary<|endoftext|>
8370bac711a7a0f6e22b18fb2807cb8403d4a066c6871f85de05ad75811d44a2
def test_138_nan_groupby(self): 'Test the group property where certain values in data are NaN.' ind = [1, np.nan, 3, np.nan, 2, 3, np.nan, 2, 3] grp = ['a', 'b', 'c', 'c', 'a', 'b', 'b', 'c', 'a'] groups = Vector(ind, groups=grp).groups self.assertTrue(groups['a'].equals(pd.Series([1.0, 2.0, 3.0], index=[0, 4, 8], name='a'))) self.assertTrue(groups['b'].equals(pd.Series([3.0], index=[5], name='b'))) self.assertTrue(groups['c'].equals(pd.Series([3.0, 2.0], index=[2, 7], name='c')))
Test the group property where certain values in data are NaN.
sci_analysis/test/test_vector.py
test_138_nan_groupby
cmmorrow/sci-analysis
17
python
def test_138_nan_groupby(self): ind = [1, np.nan, 3, np.nan, 2, 3, np.nan, 2, 3] grp = ['a', 'b', 'c', 'c', 'a', 'b', 'b', 'c', 'a'] groups = Vector(ind, groups=grp).groups self.assertTrue(groups['a'].equals(pd.Series([1.0, 2.0, 3.0], index=[0, 4, 8], name='a'))) self.assertTrue(groups['b'].equals(pd.Series([3.0], index=[5], name='b'))) self.assertTrue(groups['c'].equals(pd.Series([3.0, 2.0], index=[2, 7], name='c')))
def test_138_nan_groupby(self): ind = [1, np.nan, 3, np.nan, 2, 3, np.nan, 2, 3] grp = ['a', 'b', 'c', 'c', 'a', 'b', 'b', 'c', 'a'] groups = Vector(ind, groups=grp).groups self.assertTrue(groups['a'].equals(pd.Series([1.0, 2.0, 3.0], index=[0, 4, 8], name='a'))) self.assertTrue(groups['b'].equals(pd.Series([3.0], index=[5], name='b'))) self.assertTrue(groups['c'].equals(pd.Series([3.0, 2.0], index=[2, 7], name='c')))<|docstring|>Test the group property where certain values in data are NaN.<|endoftext|>
0885a6da2f236f32762e866cd49ba9845ed19a02b01583bf970abe51b583d02c
def test_139_nan_drop_groupby(self): 'Test the group property where certain values in data are NaN which causes a group to be dropped.' ind = [1, np.nan, 3, 1, 2, np.nan, np.nan, 2, 3] grp = ['a', 'b', 'c', 'c', 'a', 'b', 'b', 'c', 'a'] groups = Vector(ind, groups=grp).groups self.assertTrue(groups['a'].equals(pd.Series([1.0, 2.0, 3.0], index=[0, 4, 8], name='a'))) self.assertTrue(groups['c'].equals(pd.Series([3.0, 1.0, 2.0], index=[2, 3, 7], name='c'))) self.assertNotIn('b', groups.keys())
Test the group property where certain values in data are NaN which causes a group to be dropped.
sci_analysis/test/test_vector.py
test_139_nan_drop_groupby
cmmorrow/sci-analysis
17
python
def test_139_nan_drop_groupby(self): ind = [1, np.nan, 3, 1, 2, np.nan, np.nan, 2, 3] grp = ['a', 'b', 'c', 'c', 'a', 'b', 'b', 'c', 'a'] groups = Vector(ind, groups=grp).groups self.assertTrue(groups['a'].equals(pd.Series([1.0, 2.0, 3.0], index=[0, 4, 8], name='a'))) self.assertTrue(groups['c'].equals(pd.Series([3.0, 1.0, 2.0], index=[2, 3, 7], name='c'))) self.assertNotIn('b', groups.keys())
def test_139_nan_drop_groupby(self): ind = [1, np.nan, 3, 1, 2, np.nan, np.nan, 2, 3] grp = ['a', 'b', 'c', 'c', 'a', 'b', 'b', 'c', 'a'] groups = Vector(ind, groups=grp).groups self.assertTrue(groups['a'].equals(pd.Series([1.0, 2.0, 3.0], index=[0, 4, 8], name='a'))) self.assertTrue(groups['c'].equals(pd.Series([3.0, 1.0, 2.0], index=[2, 3, 7], name='c'))) self.assertNotIn('b', groups.keys())<|docstring|>Test the group property where certain values in data are NaN which causes a group to be dropped.<|endoftext|>
1d4cede191b9ae3fe5be283e3f2de29bb472c2070ccc801587d8a7c7139c0312
def test_140_vector_groups_dtype_passed_group_names(self): 'Test to make sure the dtype of the groups column is categorical.' ind = [1, 2, 3, 1, 2, 3, 1, 2, 3] grp = ['a', 'b', 'c', 'c', 'a', 'b', 'b', 'c', 'a'] groups = Vector(ind, groups=grp) self.assertEqual(groups.values['grp'].dtype, 'category')
Test to make sure the dtype of the groups column is categorical.
sci_analysis/test/test_vector.py
test_140_vector_groups_dtype_passed_group_names
cmmorrow/sci-analysis
17
python
def test_140_vector_groups_dtype_passed_group_names(self): ind = [1, 2, 3, 1, 2, 3, 1, 2, 3] grp = ['a', 'b', 'c', 'c', 'a', 'b', 'b', 'c', 'a'] groups = Vector(ind, groups=grp) self.assertEqual(groups.values['grp'].dtype, 'category')
def test_140_vector_groups_dtype_passed_group_names(self): ind = [1, 2, 3, 1, 2, 3, 1, 2, 3] grp = ['a', 'b', 'c', 'c', 'a', 'b', 'b', 'c', 'a'] groups = Vector(ind, groups=grp) self.assertEqual(groups.values['grp'].dtype, 'category')<|docstring|>Test to make sure the dtype of the groups column is categorical.<|endoftext|>
e07acd3204061d6fd4806d86301e3ff8530ad1951b2829a63b2be6092e12b983
def test_141_vector_groups_dtype_passed_no_group(self): 'Test to make sure the dtype of the groups column is categorical.' ind = st.norm.rvs(size=1000) groups = Vector(ind) self.assertEqual(groups.values['grp'].dtype, 'category')
Test to make sure the dtype of the groups column is categorical.
sci_analysis/test/test_vector.py
test_141_vector_groups_dtype_passed_no_group
cmmorrow/sci-analysis
17
python
def test_141_vector_groups_dtype_passed_no_group(self): ind = st.norm.rvs(size=1000) groups = Vector(ind) self.assertEqual(groups.values['grp'].dtype, 'category')
def test_141_vector_groups_dtype_passed_no_group(self): ind = st.norm.rvs(size=1000) groups = Vector(ind) self.assertEqual(groups.values['grp'].dtype, 'category')<|docstring|>Test to make sure the dtype of the groups column is categorical.<|endoftext|>
02205db955710dc1f792a5dd4b5939632169c13e607cb05e823ec01f4a85aae2
def test_142_vector_append_existing_groups_with_new_groups(self): 'Test appending a new vector to an existing one.' ind1 = [1, 2, 3, 1, 2, 3, 1, 2, 3] grp1 = ['a', 'b', 'c', 'c', 'a', 'b', 'b', 'c', 'a'] ind2 = [1, 2, 3] grp2 = ['d', 'd', 'd'] input1 = Vector(ind1, groups=grp1) input2 = Vector(ind2, groups=grp2) new_input = input1.append(input2) groups = new_input.groups self.assertTrue(groups['a'].equals(pd.Series([1.0, 2.0, 3.0], index=[0, 4, 8], name='a'))) self.assertTrue(groups['b'].equals(pd.Series([2.0, 3.0, 1.0], index=[1, 5, 6], name='b'))) self.assertTrue(groups['c'].equals(pd.Series([3.0, 1.0, 2.0], index=[2, 3, 7], name='c'))) self.assertTrue(groups['d'].equals(pd.Series([1.0, 2.0, 3.0], index=[9, 10, 11], name='d'))) self.assertIn('d', groups.keys())
Test appending a new vector to an existing one.
sci_analysis/test/test_vector.py
test_142_vector_append_existing_groups_with_new_groups
cmmorrow/sci-analysis
17
python
def test_142_vector_append_existing_groups_with_new_groups(self): ind1 = [1, 2, 3, 1, 2, 3, 1, 2, 3] grp1 = ['a', 'b', 'c', 'c', 'a', 'b', 'b', 'c', 'a'] ind2 = [1, 2, 3] grp2 = ['d', 'd', 'd'] input1 = Vector(ind1, groups=grp1) input2 = Vector(ind2, groups=grp2) new_input = input1.append(input2) groups = new_input.groups self.assertTrue(groups['a'].equals(pd.Series([1.0, 2.0, 3.0], index=[0, 4, 8], name='a'))) self.assertTrue(groups['b'].equals(pd.Series([2.0, 3.0, 1.0], index=[1, 5, 6], name='b'))) self.assertTrue(groups['c'].equals(pd.Series([3.0, 1.0, 2.0], index=[2, 3, 7], name='c'))) self.assertTrue(groups['d'].equals(pd.Series([1.0, 2.0, 3.0], index=[9, 10, 11], name='d'))) self.assertIn('d', groups.keys())
def test_142_vector_append_existing_groups_with_new_groups(self): ind1 = [1, 2, 3, 1, 2, 3, 1, 2, 3] grp1 = ['a', 'b', 'c', 'c', 'a', 'b', 'b', 'c', 'a'] ind2 = [1, 2, 3] grp2 = ['d', 'd', 'd'] input1 = Vector(ind1, groups=grp1) input2 = Vector(ind2, groups=grp2) new_input = input1.append(input2) groups = new_input.groups self.assertTrue(groups['a'].equals(pd.Series([1.0, 2.0, 3.0], index=[0, 4, 8], name='a'))) self.assertTrue(groups['b'].equals(pd.Series([2.0, 3.0, 1.0], index=[1, 5, 6], name='b'))) self.assertTrue(groups['c'].equals(pd.Series([3.0, 1.0, 2.0], index=[2, 3, 7], name='c'))) self.assertTrue(groups['d'].equals(pd.Series([1.0, 2.0, 3.0], index=[9, 10, 11], name='d'))) self.assertIn('d', groups.keys())<|docstring|>Test appending a new vector to an existing one.<|endoftext|>
128bd344eaf6277d64a6a5ccdf9c9d1a5a856f1134f8694397a0e0d76e454b88
def test_143_vector_append_existing_groups_with_existing_groups(self): 'Test appending a new vector to an existing one.' ind1 = [1, 2, 3, 1, 2, 3, 1, 2, 3] grp1 = ['a', 'b', 'c', 'c', 'a', 'b', 'b', 'c', 'a'] ind2 = [1, 2, 3] grp2 = ['a', 'b', 'c'] input1 = Vector(ind1, groups=grp1) input2 = Vector(ind2, groups=grp2) new_input = input1.append(input2) groups = new_input.groups self.assertTrue(groups['a'].equals(pd.Series([1.0, 2.0, 3.0, 1.0], index=[0, 4, 8, 9], name='a'))) self.assertTrue(groups['b'].equals(pd.Series([2.0, 3.0, 1.0, 2.0], index=[1, 5, 6, 10], name='b'))) self.assertTrue(groups['c'].equals(pd.Series([3.0, 1.0, 2.0, 3.0], index=[2, 3, 7, 11], name='c')))
Test appending a new vector to an existing one.
sci_analysis/test/test_vector.py
test_143_vector_append_existing_groups_with_existing_groups
cmmorrow/sci-analysis
17
python
def test_143_vector_append_existing_groups_with_existing_groups(self): ind1 = [1, 2, 3, 1, 2, 3, 1, 2, 3] grp1 = ['a', 'b', 'c', 'c', 'a', 'b', 'b', 'c', 'a'] ind2 = [1, 2, 3] grp2 = ['a', 'b', 'c'] input1 = Vector(ind1, groups=grp1) input2 = Vector(ind2, groups=grp2) new_input = input1.append(input2) groups = new_input.groups self.assertTrue(groups['a'].equals(pd.Series([1.0, 2.0, 3.0, 1.0], index=[0, 4, 8, 9], name='a'))) self.assertTrue(groups['b'].equals(pd.Series([2.0, 3.0, 1.0, 2.0], index=[1, 5, 6, 10], name='b'))) self.assertTrue(groups['c'].equals(pd.Series([3.0, 1.0, 2.0, 3.0], index=[2, 3, 7, 11], name='c')))
def test_143_vector_append_existing_groups_with_existing_groups(self): ind1 = [1, 2, 3, 1, 2, 3, 1, 2, 3] grp1 = ['a', 'b', 'c', 'c', 'a', 'b', 'b', 'c', 'a'] ind2 = [1, 2, 3] grp2 = ['a', 'b', 'c'] input1 = Vector(ind1, groups=grp1) input2 = Vector(ind2, groups=grp2) new_input = input1.append(input2) groups = new_input.groups self.assertTrue(groups['a'].equals(pd.Series([1.0, 2.0, 3.0, 1.0], index=[0, 4, 8, 9], name='a'))) self.assertTrue(groups['b'].equals(pd.Series([2.0, 3.0, 1.0, 2.0], index=[1, 5, 6, 10], name='b'))) self.assertTrue(groups['c'].equals(pd.Series([3.0, 1.0, 2.0, 3.0], index=[2, 3, 7, 11], name='c')))<|docstring|>Test appending a new vector to an existing one.<|endoftext|>
df7b78d4ad0f5b5aac67bf776840424da20c1521c937a1b1b0fd3d5637eec078
def test_144_vector_append_generated_groups_1(self): 'Test appending a new vector to an existing one.' ind1 = [0, 1, 2, 3, 4] ind2 = [5, 6, 7, 8, 9] input1 = Vector(ind1) input2 = Vector(ind2) new_input = input1.append(input2) groups = new_input.groups self.assertTrue(groups[1].equals(pd.Series([0.0, 1.0, 2.0, 3.0, 4.0], index=[0, 1, 2, 3, 4], name=1))) self.assertTrue(groups[2].equals(pd.Series([5.0, 6.0, 7.0, 8.0, 9.0], index=[5, 6, 7, 8, 9], name=2))) self.assertListEqual([1, 2], list(groups.keys()))
Test appending a new vector to an existing one.
sci_analysis/test/test_vector.py
test_144_vector_append_generated_groups_1
cmmorrow/sci-analysis
17
python
def test_144_vector_append_generated_groups_1(self): ind1 = [0, 1, 2, 3, 4] ind2 = [5, 6, 7, 8, 9] input1 = Vector(ind1) input2 = Vector(ind2) new_input = input1.append(input2) groups = new_input.groups self.assertTrue(groups[1].equals(pd.Series([0.0, 1.0, 2.0, 3.0, 4.0], index=[0, 1, 2, 3, 4], name=1))) self.assertTrue(groups[2].equals(pd.Series([5.0, 6.0, 7.0, 8.0, 9.0], index=[5, 6, 7, 8, 9], name=2))) self.assertListEqual([1, 2], list(groups.keys()))
def test_144_vector_append_generated_groups_1(self): ind1 = [0, 1, 2, 3, 4] ind2 = [5, 6, 7, 8, 9] input1 = Vector(ind1) input2 = Vector(ind2) new_input = input1.append(input2) groups = new_input.groups self.assertTrue(groups[1].equals(pd.Series([0.0, 1.0, 2.0, 3.0, 4.0], index=[0, 1, 2, 3, 4], name=1))) self.assertTrue(groups[2].equals(pd.Series([5.0, 6.0, 7.0, 8.0, 9.0], index=[5, 6, 7, 8, 9], name=2))) self.assertListEqual([1, 2], list(groups.keys()))<|docstring|>Test appending a new vector to an existing one.<|endoftext|>
b8b3c6b1672ea20f0301ee82fdff8d0ad4af72357414916dfde0f803de17a888
def test_145_vector_append_generated_groups_2(self): 'Test appending a new vector to an existing one.' ind1 = [0, 1, 2, 3, 4] ind2 = [5, 6, 7, 8, 9] ind3 = [10, 11, 12, 13, 14] input1 = Vector(ind1) input2 = Vector(ind2) input3 = Vector(ind3) new_input = input1.append(input2).append(input3) groups = new_input.groups self.assertTrue(groups[1].equals(pd.Series([0.0, 1.0, 2.0, 3.0, 4.0], index=[0, 1, 2, 3, 4], name=1))) self.assertTrue(groups[2].equals(pd.Series([5.0, 6.0, 7.0, 8.0, 9.0], index=[5, 6, 7, 8, 9], name=2))) self.assertTrue(groups[3].equals(pd.Series([10.0, 11.0, 12.0, 13.0, 14.0], index=[10, 11, 12, 13, 14], name=3))) self.assertListEqual([1, 2, 3], list(groups.keys()))
Test appending a new vector to an existing one.
sci_analysis/test/test_vector.py
test_145_vector_append_generated_groups_2
cmmorrow/sci-analysis
17
python
def test_145_vector_append_generated_groups_2(self): ind1 = [0, 1, 2, 3, 4] ind2 = [5, 6, 7, 8, 9] ind3 = [10, 11, 12, 13, 14] input1 = Vector(ind1) input2 = Vector(ind2) input3 = Vector(ind3) new_input = input1.append(input2).append(input3) groups = new_input.groups self.assertTrue(groups[1].equals(pd.Series([0.0, 1.0, 2.0, 3.0, 4.0], index=[0, 1, 2, 3, 4], name=1))) self.assertTrue(groups[2].equals(pd.Series([5.0, 6.0, 7.0, 8.0, 9.0], index=[5, 6, 7, 8, 9], name=2))) self.assertTrue(groups[3].equals(pd.Series([10.0, 11.0, 12.0, 13.0, 14.0], index=[10, 11, 12, 13, 14], name=3))) self.assertListEqual([1, 2, 3], list(groups.keys()))
def test_145_vector_append_generated_groups_2(self): ind1 = [0, 1, 2, 3, 4] ind2 = [5, 6, 7, 8, 9] ind3 = [10, 11, 12, 13, 14] input1 = Vector(ind1) input2 = Vector(ind2) input3 = Vector(ind3) new_input = input1.append(input2).append(input3) groups = new_input.groups self.assertTrue(groups[1].equals(pd.Series([0.0, 1.0, 2.0, 3.0, 4.0], index=[0, 1, 2, 3, 4], name=1))) self.assertTrue(groups[2].equals(pd.Series([5.0, 6.0, 7.0, 8.0, 9.0], index=[5, 6, 7, 8, 9], name=2))) self.assertTrue(groups[3].equals(pd.Series([10.0, 11.0, 12.0, 13.0, 14.0], index=[10, 11, 12, 13, 14], name=3))) self.assertListEqual([1, 2, 3], list(groups.keys()))<|docstring|>Test appending a new vector to an existing one.<|endoftext|>
c0759977ede9796af362a6dc6ae4f4889ef8d7b6c5d7118878ed679a4dd14494
def test_146_vector_append_not_a_vector(self): 'Test the error raised by appending a non-vector object.' input1 = [1, 2, 3, 4, 5] input2 = [6, 7, 8, 9, 10] self.assertRaises(ValueError, (lambda : Vector(input1).append(input2)))
Test the error raised by appending a non-vector object.
sci_analysis/test/test_vector.py
test_146_vector_append_not_a_vector
cmmorrow/sci-analysis
17
python
def test_146_vector_append_not_a_vector(self): input1 = [1, 2, 3, 4, 5] input2 = [6, 7, 8, 9, 10] self.assertRaises(ValueError, (lambda : Vector(input1).append(input2)))
def test_146_vector_append_not_a_vector(self): input1 = [1, 2, 3, 4, 5] input2 = [6, 7, 8, 9, 10] self.assertRaises(ValueError, (lambda : Vector(input1).append(input2)))<|docstring|>Test the error raised by appending a non-vector object.<|endoftext|>
af3f86289f80dfe56cb1eafff15095cfbb2f7eda40f3a75ce08089d09e27b0c6
def test_147_empty_vector_append_none(self): 'Test to make sure appending an empty Vector returns the original Vector.' input_array = [] self.assertTrue(Vector(input_array).append(Vector(None)).data.empty)
Test to make sure appending an empty Vector returns the original Vector.
sci_analysis/test/test_vector.py
test_147_empty_vector_append_none
cmmorrow/sci-analysis
17
python
def test_147_empty_vector_append_none(self): input_array = [] self.assertTrue(Vector(input_array).append(Vector(None)).data.empty)
def test_147_empty_vector_append_none(self): input_array = [] self.assertTrue(Vector(input_array).append(Vector(None)).data.empty)<|docstring|>Test to make sure appending an empty Vector returns the original Vector.<|endoftext|>
ec437817f08a2694e33d1979eb9658cc764dd3987ca4cb18d194b612ab605ced
def test_148_vector_append_none(self): 'Test to make sure appending an empty Vector returns the original Vector.' input_array = [1, 2, 3, 4, 5] self.assertTrue(Vector(input_array).append(Vector(None)).data.equals(pd.Series(input_array).astype('float')))
Test to make sure appending an empty Vector returns the original Vector.
sci_analysis/test/test_vector.py
test_148_vector_append_none
cmmorrow/sci-analysis
17
python
def test_148_vector_append_none(self): input_array = [1, 2, 3, 4, 5] self.assertTrue(Vector(input_array).append(Vector(None)).data.equals(pd.Series(input_array).astype('float')))
def test_148_vector_append_none(self): input_array = [1, 2, 3, 4, 5] self.assertTrue(Vector(input_array).append(Vector(None)).data.equals(pd.Series(input_array).astype('float')))<|docstring|>Test to make sure appending an empty Vector returns the original Vector.<|endoftext|>
da10c0a1e839d8ccb363bc8b6cc147d5bf8db6d6af716dea653ee1fbd1f5fac1
def test_149_vector_paired_groups(self): "Test that paired groups doesn't return empty groups.." ind_x_1 = [0, 1, 2, 3, 4] ind_y_1 = [5, 6, 7, 8, 9] ind_x_2 = [10, 11, 12, 13, 14] ind_y_2 = [15, 16, 17, 18, 19] input1 = Vector(ind_x_1, other=ind_y_1) input2 = Vector(ind_x_2, other=ind_y_2) new_input = input1.append(Vector(pd.Series([]))).append(input2) groups = new_input.paired_groups self.assertTrue(groups[1][0].equals(pd.Series([0.0, 1.0, 2.0, 3.0, 4.0]))) self.assertTrue(groups[1][1].equals(pd.Series([5.0, 6.0, 7.0, 8.0, 9.0]))) self.assertTrue(groups[2][0].equals(pd.Series([10.0, 11.0, 12.0, 13.0, 14.0], index=[5, 6, 7, 8, 9]))) self.assertTrue(groups[2][1].equals(pd.Series([15.0, 16.0, 17.0, 18.0, 19.0], index=[5, 6, 7, 8, 9]))) self.assertListEqual([1, 2], list(groups.keys()))
Test that paired groups doesn't return empty groups..
sci_analysis/test/test_vector.py
test_149_vector_paired_groups
cmmorrow/sci-analysis
17
python
def test_149_vector_paired_groups(self): ind_x_1 = [0, 1, 2, 3, 4] ind_y_1 = [5, 6, 7, 8, 9] ind_x_2 = [10, 11, 12, 13, 14] ind_y_2 = [15, 16, 17, 18, 19] input1 = Vector(ind_x_1, other=ind_y_1) input2 = Vector(ind_x_2, other=ind_y_2) new_input = input1.append(Vector(pd.Series([]))).append(input2) groups = new_input.paired_groups self.assertTrue(groups[1][0].equals(pd.Series([0.0, 1.0, 2.0, 3.0, 4.0]))) self.assertTrue(groups[1][1].equals(pd.Series([5.0, 6.0, 7.0, 8.0, 9.0]))) self.assertTrue(groups[2][0].equals(pd.Series([10.0, 11.0, 12.0, 13.0, 14.0], index=[5, 6, 7, 8, 9]))) self.assertTrue(groups[2][1].equals(pd.Series([15.0, 16.0, 17.0, 18.0, 19.0], index=[5, 6, 7, 8, 9]))) self.assertListEqual([1, 2], list(groups.keys()))
def test_149_vector_paired_groups(self): ind_x_1 = [0, 1, 2, 3, 4] ind_y_1 = [5, 6, 7, 8, 9] ind_x_2 = [10, 11, 12, 13, 14] ind_y_2 = [15, 16, 17, 18, 19] input1 = Vector(ind_x_1, other=ind_y_1) input2 = Vector(ind_x_2, other=ind_y_2) new_input = input1.append(Vector(pd.Series([]))).append(input2) groups = new_input.paired_groups self.assertTrue(groups[1][0].equals(pd.Series([0.0, 1.0, 2.0, 3.0, 4.0]))) self.assertTrue(groups[1][1].equals(pd.Series([5.0, 6.0, 7.0, 8.0, 9.0]))) self.assertTrue(groups[2][0].equals(pd.Series([10.0, 11.0, 12.0, 13.0, 14.0], index=[5, 6, 7, 8, 9]))) self.assertTrue(groups[2][1].equals(pd.Series([15.0, 16.0, 17.0, 18.0, 19.0], index=[5, 6, 7, 8, 9]))) self.assertListEqual([1, 2], list(groups.keys()))<|docstring|>Test that paired groups doesn't return empty groups..<|endoftext|>
68c658224ff337bd4ec0920abbfeb65d8bc9cf522812b231cca7c5a4fc430034
def test_150_vector_flatten_singled(self): 'Test the Vector flatten method on a single vector.' np.random.seed(987654321) input_array = Vector(st.norm.rvs(size=100)) self.assertEqual(len(input_array.flatten()), 1) self.assertTrue(input_array.data.equals(input_array.flatten()[0]))
Test the Vector flatten method on a single vector.
sci_analysis/test/test_vector.py
test_150_vector_flatten_singled
cmmorrow/sci-analysis
17
python
def test_150_vector_flatten_singled(self): np.random.seed(987654321) input_array = Vector(st.norm.rvs(size=100)) self.assertEqual(len(input_array.flatten()), 1) self.assertTrue(input_array.data.equals(input_array.flatten()[0]))
def test_150_vector_flatten_singled(self): np.random.seed(987654321) input_array = Vector(st.norm.rvs(size=100)) self.assertEqual(len(input_array.flatten()), 1) self.assertTrue(input_array.data.equals(input_array.flatten()[0]))<|docstring|>Test the Vector flatten method on a single vector.<|endoftext|>
cec52c7b0efb0cc3f4f0c61a6027d831732966fe3c63876d309b0c49491c4fbc
def test_151_vector_flatten_several_groups(self): 'Test the Vector flatten method on a a single vector with multiple groups.' np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) input_array_3 = st.norm.rvs(size=100) input_array = Vector(input_array_1).append(Vector(input_array_2)).append(Vector(input_array_3)) self.assertEqual(len(input_array.flatten()), 3) self.assertEqual(type(input_array.flatten()), tuple) self.assertTrue(input_array.groups[1].equals(input_array.flatten()[0])) self.assertTrue(input_array.groups[2].equals(input_array.flatten()[1])) self.assertTrue(input_array.groups[3].equals(input_array.flatten()[2]))
Test the Vector flatten method on a a single vector with multiple groups.
sci_analysis/test/test_vector.py
test_151_vector_flatten_several_groups
cmmorrow/sci-analysis
17
python
def test_151_vector_flatten_several_groups(self): np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) input_array_3 = st.norm.rvs(size=100) input_array = Vector(input_array_1).append(Vector(input_array_2)).append(Vector(input_array_3)) self.assertEqual(len(input_array.flatten()), 3) self.assertEqual(type(input_array.flatten()), tuple) self.assertTrue(input_array.groups[1].equals(input_array.flatten()[0])) self.assertTrue(input_array.groups[2].equals(input_array.flatten()[1])) self.assertTrue(input_array.groups[3].equals(input_array.flatten()[2]))
def test_151_vector_flatten_several_groups(self): np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) input_array_3 = st.norm.rvs(size=100) input_array = Vector(input_array_1).append(Vector(input_array_2)).append(Vector(input_array_3)) self.assertEqual(len(input_array.flatten()), 3) self.assertEqual(type(input_array.flatten()), tuple) self.assertTrue(input_array.groups[1].equals(input_array.flatten()[0])) self.assertTrue(input_array.groups[2].equals(input_array.flatten()[1])) self.assertTrue(input_array.groups[3].equals(input_array.flatten()[2]))<|docstring|>Test the Vector flatten method on a a single vector with multiple groups.<|endoftext|>
cf4d52e204ca0b0fb78ca25fa9dea5abf4fb12c2dc4c827b06773d54e204dc5a
def test_152_vector_flatten_several_paired_groups(self): 'Test the Vector flatten method on a paired vector with multiple groups.' np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) input_array_3 = st.norm.rvs(size=100) input_array_4 = st.norm.rvs(size=100) input_array = Vector(input_array_1, other=input_array_2).append(Vector(input_array_3, other=input_array_4)) self.assertEqual(len(input_array.flatten()), 4) self.assertTrue(input_array.groups[1].equals(input_array.flatten()[0])) self.assertTrue(input_array.groups[2].equals(input_array.flatten()[1])) self.assertTrue(input_array.paired_groups[1][1].equals(input_array.flatten()[2])) self.assertTrue(input_array.paired_groups[2][1].equals(input_array.flatten()[3]))
Test the Vector flatten method on a paired vector with multiple groups.
sci_analysis/test/test_vector.py
test_152_vector_flatten_several_paired_groups
cmmorrow/sci-analysis
17
python
def test_152_vector_flatten_several_paired_groups(self): np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) input_array_3 = st.norm.rvs(size=100) input_array_4 = st.norm.rvs(size=100) input_array = Vector(input_array_1, other=input_array_2).append(Vector(input_array_3, other=input_array_4)) self.assertEqual(len(input_array.flatten()), 4) self.assertTrue(input_array.groups[1].equals(input_array.flatten()[0])) self.assertTrue(input_array.groups[2].equals(input_array.flatten()[1])) self.assertTrue(input_array.paired_groups[1][1].equals(input_array.flatten()[2])) self.assertTrue(input_array.paired_groups[2][1].equals(input_array.flatten()[3]))
def test_152_vector_flatten_several_paired_groups(self): np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) input_array_3 = st.norm.rvs(size=100) input_array_4 = st.norm.rvs(size=100) input_array = Vector(input_array_1, other=input_array_2).append(Vector(input_array_3, other=input_array_4)) self.assertEqual(len(input_array.flatten()), 4) self.assertTrue(input_array.groups[1].equals(input_array.flatten()[0])) self.assertTrue(input_array.groups[2].equals(input_array.flatten()[1])) self.assertTrue(input_array.paired_groups[1][1].equals(input_array.flatten()[2])) self.assertTrue(input_array.paired_groups[2][1].equals(input_array.flatten()[3]))<|docstring|>Test the Vector flatten method on a paired vector with multiple groups.<|endoftext|>
ebbb1c873cf8c66448a7c6cffc35480359c7cc5456e24878df583806da03ead5
def test_153_vector_data_frame(self): 'Test that a ValueError is raised when the input array is a pandas DataFrame.' input_array = pd.DataFrame([1, 2, 3], [4, 5, 6]) self.assertRaises(ValueError, (lambda : Vector(input_array)))
Test that a ValueError is raised when the input array is a pandas DataFrame.
sci_analysis/test/test_vector.py
test_153_vector_data_frame
cmmorrow/sci-analysis
17
python
def test_153_vector_data_frame(self): input_array = pd.DataFrame([1, 2, 3], [4, 5, 6]) self.assertRaises(ValueError, (lambda : Vector(input_array)))
def test_153_vector_data_frame(self): input_array = pd.DataFrame([1, 2, 3], [4, 5, 6]) self.assertRaises(ValueError, (lambda : Vector(input_array)))<|docstring|>Test that a ValueError is raised when the input array is a pandas DataFrame.<|endoftext|>
c39c479cba85c0f39fc4c63a096acda6e60a578e89c3763b9ad84375faf8fcc3
def test_154_vector_with_labels(self): 'Test that labels are created correctly and available with the labels property.' np.random.seed(987654321) input_array = st.norm.rvs(size=100) labels = np.random.randint(10000, 50000, size=100) test_obj = Vector(input_array, labels=labels) self.assertListEqual(pd.Series(labels).tolist(), test_obj.labels.tolist()) self.assertIsInstance(test_obj.labels, pd.Series)
Test that labels are created correctly and available with the labels property.
sci_analysis/test/test_vector.py
test_154_vector_with_labels
cmmorrow/sci-analysis
17
python
def test_154_vector_with_labels(self): np.random.seed(987654321) input_array = st.norm.rvs(size=100) labels = np.random.randint(10000, 50000, size=100) test_obj = Vector(input_array, labels=labels) self.assertListEqual(pd.Series(labels).tolist(), test_obj.labels.tolist()) self.assertIsInstance(test_obj.labels, pd.Series)
def test_154_vector_with_labels(self): np.random.seed(987654321) input_array = st.norm.rvs(size=100) labels = np.random.randint(10000, 50000, size=100) test_obj = Vector(input_array, labels=labels) self.assertListEqual(pd.Series(labels).tolist(), test_obj.labels.tolist()) self.assertIsInstance(test_obj.labels, pd.Series)<|docstring|>Test that labels are created correctly and available with the labels property.<|endoftext|>
53ecad2e8267a060f447235283871d97936ae596601d40d3911a54fd9e64db63
def test_155_vector_drop_nan_with_labels(self): 'Test to make sure labels are properly dropped when drop_nan is called.' np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) labels = np.random.randint(10000, 50000, size=100) input_array_1[8] = np.nan input_array_1[16] = np.nan input_array_1[32] = np.nan input_array_1[64] = np.nan input_array_1[17] = np.nan input_array_1[22] = np.nan input_array_1[43] = np.nan input_array_1[89] = np.nan test_obj = Vector(input_array_1, labels=labels) self.assertEqual(len(test_obj.labels), 92) self.assertRaises(KeyError, (lambda : test_obj.labels[32]))
Test to make sure labels are properly dropped when drop_nan is called.
sci_analysis/test/test_vector.py
test_155_vector_drop_nan_with_labels
cmmorrow/sci-analysis
17
python
def test_155_vector_drop_nan_with_labels(self): np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) labels = np.random.randint(10000, 50000, size=100) input_array_1[8] = np.nan input_array_1[16] = np.nan input_array_1[32] = np.nan input_array_1[64] = np.nan input_array_1[17] = np.nan input_array_1[22] = np.nan input_array_1[43] = np.nan input_array_1[89] = np.nan test_obj = Vector(input_array_1, labels=labels) self.assertEqual(len(test_obj.labels), 92) self.assertRaises(KeyError, (lambda : test_obj.labels[32]))
def test_155_vector_drop_nan_with_labels(self): np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) labels = np.random.randint(10000, 50000, size=100) input_array_1[8] = np.nan input_array_1[16] = np.nan input_array_1[32] = np.nan input_array_1[64] = np.nan input_array_1[17] = np.nan input_array_1[22] = np.nan input_array_1[43] = np.nan input_array_1[89] = np.nan test_obj = Vector(input_array_1, labels=labels) self.assertEqual(len(test_obj.labels), 92) self.assertRaises(KeyError, (lambda : test_obj.labels[32]))<|docstring|>Test to make sure labels are properly dropped when drop_nan is called.<|endoftext|>
851db680f9e31389fb48b5c826e6007ccbc867ec268d33e6e0df3faed6354bc3
def test_156_vector_drop_nan_intersect_with_labels(self): 'Test to make sure labels are properly dropped when drop_nan_intersect is called.' np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) labels = np.random.randint(10000, 50000, size=100) input_array_1[8] = np.nan input_array_1[16] = np.nan input_array_1[32] = np.nan input_array_1[64] = np.nan input_array_2[1] = np.nan input_array_2[2] = np.nan input_array_2[4] = np.nan input_array_2[8] = np.nan test_obj = Vector(input_array_1, input_array_2, labels=labels) self.assertEqual(len(test_obj.labels), 93) self.assertRaises(KeyError, (lambda : test_obj.labels[32])) self.assertRaises(KeyError, (lambda : test_obj.labels[8]))
Test to make sure labels are properly dropped when drop_nan_intersect is called.
sci_analysis/test/test_vector.py
test_156_vector_drop_nan_intersect_with_labels
cmmorrow/sci-analysis
17
python
def test_156_vector_drop_nan_intersect_with_labels(self): np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) labels = np.random.randint(10000, 50000, size=100) input_array_1[8] = np.nan input_array_1[16] = np.nan input_array_1[32] = np.nan input_array_1[64] = np.nan input_array_2[1] = np.nan input_array_2[2] = np.nan input_array_2[4] = np.nan input_array_2[8] = np.nan test_obj = Vector(input_array_1, input_array_2, labels=labels) self.assertEqual(len(test_obj.labels), 93) self.assertRaises(KeyError, (lambda : test_obj.labels[32])) self.assertRaises(KeyError, (lambda : test_obj.labels[8]))
def test_156_vector_drop_nan_intersect_with_labels(self): np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) labels = np.random.randint(10000, 50000, size=100) input_array_1[8] = np.nan input_array_1[16] = np.nan input_array_1[32] = np.nan input_array_1[64] = np.nan input_array_2[1] = np.nan input_array_2[2] = np.nan input_array_2[4] = np.nan input_array_2[8] = np.nan test_obj = Vector(input_array_1, input_array_2, labels=labels) self.assertEqual(len(test_obj.labels), 93) self.assertRaises(KeyError, (lambda : test_obj.labels[32])) self.assertRaises(KeyError, (lambda : test_obj.labels[8]))<|docstring|>Test to make sure labels are properly dropped when drop_nan_intersect is called.<|endoftext|>
47c0d4184b367ec4ed63bbfaf081aa791b39a23de1da50347040549aad760599
def test_157_vector_labels_single_value(self): 'Test that if a single value is passed in to labels, the value is applied to all rows.' np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) labels = 42 test_obj = Vector(input_array_1, input_array_2, labels=labels) self.assertListEqual(([42] * 100), test_obj.labels.tolist())
Test that if a single value is passed in to labels, the value is applied to all rows.
sci_analysis/test/test_vector.py
test_157_vector_labels_single_value
cmmorrow/sci-analysis
17
python
def test_157_vector_labels_single_value(self): np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) labels = 42 test_obj = Vector(input_array_1, input_array_2, labels=labels) self.assertListEqual(([42] * 100), test_obj.labels.tolist())
def test_157_vector_labels_single_value(self): np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) labels = 42 test_obj = Vector(input_array_1, input_array_2, labels=labels) self.assertListEqual(([42] * 100), test_obj.labels.tolist())<|docstring|>Test that if a single value is passed in to labels, the value is applied to all rows.<|endoftext|>
ae314069647fe0b0485ff56f59a01b2a17d18928edffa5a9c47b2b332104839a
def test_158_vector_label_as_None(self): "Test that missing label values are converted to the string 'None'." np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) labels = np.random.randint(10000, 50000, size=100).astype('str') input_array_1[8] = np.nan input_array_1[16] = np.nan input_array_1[32] = np.nan input_array_1[64] = np.nan input_array_2[1] = np.nan input_array_2[2] = np.nan input_array_2[4] = np.nan input_array_2[8] = np.nan labels[24] = None labels[48] = None labels[72] = None labels[96] = None test_obj = Vector(input_array_1, input_array_2, labels=labels) self.assertEqual(len(test_obj.labels), 93) self.assertEqual('None', test_obj.labels[24])
Test that missing label values are converted to the string 'None'.
sci_analysis/test/test_vector.py
test_158_vector_label_as_None
cmmorrow/sci-analysis
17
python
def test_158_vector_label_as_None(self): np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) labels = np.random.randint(10000, 50000, size=100).astype('str') input_array_1[8] = np.nan input_array_1[16] = np.nan input_array_1[32] = np.nan input_array_1[64] = np.nan input_array_2[1] = np.nan input_array_2[2] = np.nan input_array_2[4] = np.nan input_array_2[8] = np.nan labels[24] = None labels[48] = None labels[72] = None labels[96] = None test_obj = Vector(input_array_1, input_array_2, labels=labels) self.assertEqual(len(test_obj.labels), 93) self.assertEqual('None', test_obj.labels[24])
def test_158_vector_label_as_None(self): np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) labels = np.random.randint(10000, 50000, size=100).astype('str') input_array_1[8] = np.nan input_array_1[16] = np.nan input_array_1[32] = np.nan input_array_1[64] = np.nan input_array_2[1] = np.nan input_array_2[2] = np.nan input_array_2[4] = np.nan input_array_2[8] = np.nan labels[24] = None labels[48] = None labels[72] = None labels[96] = None test_obj = Vector(input_array_1, input_array_2, labels=labels) self.assertEqual(len(test_obj.labels), 93) self.assertEqual('None', test_obj.labels[24])<|docstring|>Test that missing label values are converted to the string 'None'.<|endoftext|>
88abcc9978df954eb3d56e5baf9c958ba07539e4dbbd82ce9a973b2b96ff341e
def test_159_vector_unequal_labels_length(self): 'Test to make sure that an error is raised if the length of labels is unequal to the input data.' np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) labels = np.random.randint(10000, 50000, size=50) self.assertRaises(UnequalVectorLengthError, (lambda : Vector(input_array_1, input_array_2, labels=labels)))
Test to make sure that an error is raised if the length of labels is unequal to the input data.
sci_analysis/test/test_vector.py
test_159_vector_unequal_labels_length
cmmorrow/sci-analysis
17
python
def test_159_vector_unequal_labels_length(self): np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) labels = np.random.randint(10000, 50000, size=50) self.assertRaises(UnequalVectorLengthError, (lambda : Vector(input_array_1, input_array_2, labels=labels)))
def test_159_vector_unequal_labels_length(self): np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) labels = np.random.randint(10000, 50000, size=50) self.assertRaises(UnequalVectorLengthError, (lambda : Vector(input_array_1, input_array_2, labels=labels)))<|docstring|>Test to make sure that an error is raised if the length of labels is unequal to the input data.<|endoftext|>
9b3a862b7eb0ebf2dab2429ef9b6c7419ab948f7d3b425bd86176bf41d4e07c6
def test_160_vector_groups_with_labels(self): 'Test to make sure group_labels returns the expected output.' ind_x_1 = [0, 1, 2, 3, 4] ind_y_1 = [5, 6, 7, 8, 9] ind_x_2 = [10, 11, 12, 13, 14] ind_y_2 = [15, 16, 17, 18, 19] labels_1 = ['A', 'B', 'C', 'D', 'E'] labels_2 = ['AA', 'BB', 'CC', 'DD', 'EE'] input1 = Vector(ind_x_1, other=ind_y_1, labels=labels_1) input2 = Vector(ind_x_2, other=ind_y_2, labels=labels_2) new_input = input1.append(Vector(pd.Series([]))).append(input2) groups = new_input.group_labels self.assertDictEqual({1: labels_1, 2: labels_2}, {grp: l.tolist() for (grp, l) in groups.items()}) self.assertListEqual([1, 2], list(groups.keys()))
Test to make sure group_labels returns the expected output.
sci_analysis/test/test_vector.py
test_160_vector_groups_with_labels
cmmorrow/sci-analysis
17
python
def test_160_vector_groups_with_labels(self): ind_x_1 = [0, 1, 2, 3, 4] ind_y_1 = [5, 6, 7, 8, 9] ind_x_2 = [10, 11, 12, 13, 14] ind_y_2 = [15, 16, 17, 18, 19] labels_1 = ['A', 'B', 'C', 'D', 'E'] labels_2 = ['AA', 'BB', 'CC', 'DD', 'EE'] input1 = Vector(ind_x_1, other=ind_y_1, labels=labels_1) input2 = Vector(ind_x_2, other=ind_y_2, labels=labels_2) new_input = input1.append(Vector(pd.Series([]))).append(input2) groups = new_input.group_labels self.assertDictEqual({1: labels_1, 2: labels_2}, {grp: l.tolist() for (grp, l) in groups.items()}) self.assertListEqual([1, 2], list(groups.keys()))
def test_160_vector_groups_with_labels(self): ind_x_1 = [0, 1, 2, 3, 4] ind_y_1 = [5, 6, 7, 8, 9] ind_x_2 = [10, 11, 12, 13, 14] ind_y_2 = [15, 16, 17, 18, 19] labels_1 = ['A', 'B', 'C', 'D', 'E'] labels_2 = ['AA', 'BB', 'CC', 'DD', 'EE'] input1 = Vector(ind_x_1, other=ind_y_1, labels=labels_1) input2 = Vector(ind_x_2, other=ind_y_2, labels=labels_2) new_input = input1.append(Vector(pd.Series([]))).append(input2) groups = new_input.group_labels self.assertDictEqual({1: labels_1, 2: labels_2}, {grp: l.tolist() for (grp, l) in groups.items()}) self.assertListEqual([1, 2], list(groups.keys()))<|docstring|>Test to make sure group_labels returns the expected output.<|endoftext|>
7fdce3bf1022d2ae76bb7f1624eeadfbe0710063a98ad829f5f261da107496a8
def test_161_vector_has_labels(self): 'Test to verify the logic for the has_labels property is working as expected.' np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) labels = np.random.randint(10000, 50000, size=100).astype(str) self.assertTrue(Vector(input_array_1, labels=labels).has_labels) self.assertFalse(Vector(input_array_1).has_labels) labels[5] = None labels[10] = None self.assertTrue(Vector(input_array_1, labels=labels).has_labels) labels = ([None] * 100) labels[5] = 'hi' self.assertTrue(Vector(input_array_1, labels=labels).has_labels)
Test to verify the logic for the has_labels property is working as expected.
sci_analysis/test/test_vector.py
test_161_vector_has_labels
cmmorrow/sci-analysis
17
python
def test_161_vector_has_labels(self): np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) labels = np.random.randint(10000, 50000, size=100).astype(str) self.assertTrue(Vector(input_array_1, labels=labels).has_labels) self.assertFalse(Vector(input_array_1).has_labels) labels[5] = None labels[10] = None self.assertTrue(Vector(input_array_1, labels=labels).has_labels) labels = ([None] * 100) labels[5] = 'hi' self.assertTrue(Vector(input_array_1, labels=labels).has_labels)
def test_161_vector_has_labels(self): np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) labels = np.random.randint(10000, 50000, size=100).astype(str) self.assertTrue(Vector(input_array_1, labels=labels).has_labels) self.assertFalse(Vector(input_array_1).has_labels) labels[5] = None labels[10] = None self.assertTrue(Vector(input_array_1, labels=labels).has_labels) labels = ([None] * 100) labels[5] = 'hi' self.assertTrue(Vector(input_array_1, labels=labels).has_labels)<|docstring|>Test to verify the logic for the has_labels property is working as expected.<|endoftext|>
665c0a10c8e906b04dcc76ac60aca6d154b2a9526f43a969a8ba4a36b8badd73
def test_162_vector_drop_group(self): 'Test the normal use case for dropping a group from the Vector.' np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) input_array_3 = st.norm.rvs(size=100) input_array_4 = st.norm.rvs(size=4) vec1 = Vector(input_array_1).append(Vector(input_array_2)).append(Vector(input_array_3)).append(Vector(input_array_4)) self.assertEqual(len(vec1.drop_groups(4)), 300) self.assertEqual(len(vec1.drop_groups(2)), 200) self.assertListEqual([1, 3], vec1.values['grp'].cat.categories.tolist()) vec1_1 = Vector(input_array_1).append(Vector(input_array_2)).append(Vector(input_array_3)).append(Vector(input_array_4)) self.assertEqual(len(vec1_1.drop_groups([2, 4])), 200) self.assertListEqual([1, 3], vec1_1.values['grp'].cat.categories.tolist()) vec2 = Vector(input_array_1, groups=(['a'] * 100)).append(Vector(input_array_2, groups=(['b'] * 100))).append(Vector(input_array_3, groups=(['c'] * 100))).append(Vector(input_array_4, groups=(['d'] * 4))) self.assertEqual(len(vec2.drop_groups('b')), 204) self.assertEqual(len(vec2.drop_groups('d')), 200) self.assertListEqual(['a', 'c'], vec2.values['grp'].cat.categories.tolist()) vec2_1 = Vector(input_array_1, groups=(['a'] * 100)).append(Vector(input_array_2, groups=(['b'] * 100))).append(Vector(input_array_3, groups=(['c'] * 100))).append(Vector(input_array_4, groups=(['d'] * 4))) self.assertEqual(len(vec2_1.drop_groups(['b', 'd'])), 200) self.assertListEqual(['a', 'c'], vec2_1.values['grp'].cat.categories.tolist())
Test the normal use case for dropping a group from the Vector.
sci_analysis/test/test_vector.py
test_162_vector_drop_group
cmmorrow/sci-analysis
17
python
def test_162_vector_drop_group(self): np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) input_array_3 = st.norm.rvs(size=100) input_array_4 = st.norm.rvs(size=4) vec1 = Vector(input_array_1).append(Vector(input_array_2)).append(Vector(input_array_3)).append(Vector(input_array_4)) self.assertEqual(len(vec1.drop_groups(4)), 300) self.assertEqual(len(vec1.drop_groups(2)), 200) self.assertListEqual([1, 3], vec1.values['grp'].cat.categories.tolist()) vec1_1 = Vector(input_array_1).append(Vector(input_array_2)).append(Vector(input_array_3)).append(Vector(input_array_4)) self.assertEqual(len(vec1_1.drop_groups([2, 4])), 200) self.assertListEqual([1, 3], vec1_1.values['grp'].cat.categories.tolist()) vec2 = Vector(input_array_1, groups=(['a'] * 100)).append(Vector(input_array_2, groups=(['b'] * 100))).append(Vector(input_array_3, groups=(['c'] * 100))).append(Vector(input_array_4, groups=(['d'] * 4))) self.assertEqual(len(vec2.drop_groups('b')), 204) self.assertEqual(len(vec2.drop_groups('d')), 200) self.assertListEqual(['a', 'c'], vec2.values['grp'].cat.categories.tolist()) vec2_1 = Vector(input_array_1, groups=(['a'] * 100)).append(Vector(input_array_2, groups=(['b'] * 100))).append(Vector(input_array_3, groups=(['c'] * 100))).append(Vector(input_array_4, groups=(['d'] * 4))) self.assertEqual(len(vec2_1.drop_groups(['b', 'd'])), 200) self.assertListEqual(['a', 'c'], vec2_1.values['grp'].cat.categories.tolist())
def test_162_vector_drop_group(self): np.random.seed(987654321) input_array_1 = st.norm.rvs(size=100) input_array_2 = st.norm.rvs(size=100) input_array_3 = st.norm.rvs(size=100) input_array_4 = st.norm.rvs(size=4) vec1 = Vector(input_array_1).append(Vector(input_array_2)).append(Vector(input_array_3)).append(Vector(input_array_4)) self.assertEqual(len(vec1.drop_groups(4)), 300) self.assertEqual(len(vec1.drop_groups(2)), 200) self.assertListEqual([1, 3], vec1.values['grp'].cat.categories.tolist()) vec1_1 = Vector(input_array_1).append(Vector(input_array_2)).append(Vector(input_array_3)).append(Vector(input_array_4)) self.assertEqual(len(vec1_1.drop_groups([2, 4])), 200) self.assertListEqual([1, 3], vec1_1.values['grp'].cat.categories.tolist()) vec2 = Vector(input_array_1, groups=(['a'] * 100)).append(Vector(input_array_2, groups=(['b'] * 100))).append(Vector(input_array_3, groups=(['c'] * 100))).append(Vector(input_array_4, groups=(['d'] * 4))) self.assertEqual(len(vec2.drop_groups('b')), 204) self.assertEqual(len(vec2.drop_groups('d')), 200) self.assertListEqual(['a', 'c'], vec2.values['grp'].cat.categories.tolist()) vec2_1 = Vector(input_array_1, groups=(['a'] * 100)).append(Vector(input_array_2, groups=(['b'] * 100))).append(Vector(input_array_3, groups=(['c'] * 100))).append(Vector(input_array_4, groups=(['d'] * 4))) self.assertEqual(len(vec2_1.drop_groups(['b', 'd'])), 200) self.assertListEqual(['a', 'c'], vec2_1.values['grp'].cat.categories.tolist())<|docstring|>Test the normal use case for dropping a group from the Vector.<|endoftext|>
935df23126fe82d89cc372cf94d765bc2ae9fa589fd8b11c90f8f09439db5393
def create_test_device(name): '\n Convenience method for creating a Device (e.g. for component testing).\n ' (site, _) = Site.objects.get_or_create(name='Site 1', slug='site-1') (manufacturer, _) = Manufacturer.objects.get_or_create(name='Manufacturer 1', slug='manufacturer-1') (devicetype, _) = DeviceType.objects.get_or_create(model='Device Type 1', manufacturer=manufacturer) (devicerole, _) = DeviceRole.objects.get_or_create(name='Device Role 1', slug='device-role-1') device = Device.objects.create(name=name, site=site, device_type=devicetype, device_role=devicerole) return device
Convenience method for creating a Device (e.g. for component testing).
nautobot/dcim/tests/test_views.py
create_test_device
johannwagner/nautobot
384
python
def create_test_device(name): '\n \n ' (site, _) = Site.objects.get_or_create(name='Site 1', slug='site-1') (manufacturer, _) = Manufacturer.objects.get_or_create(name='Manufacturer 1', slug='manufacturer-1') (devicetype, _) = DeviceType.objects.get_or_create(model='Device Type 1', manufacturer=manufacturer) (devicerole, _) = DeviceRole.objects.get_or_create(name='Device Role 1', slug='device-role-1') device = Device.objects.create(name=name, site=site, device_type=devicetype, device_role=devicerole) return device
def create_test_device(name): '\n \n ' (site, _) = Site.objects.get_or_create(name='Site 1', slug='site-1') (manufacturer, _) = Manufacturer.objects.get_or_create(name='Manufacturer 1', slug='manufacturer-1') (devicetype, _) = DeviceType.objects.get_or_create(model='Device Type 1', manufacturer=manufacturer) (devicerole, _) = DeviceRole.objects.get_or_create(name='Device Role 1', slug='device-role-1') device = Device.objects.create(name=name, site=site, device_type=devicetype, device_role=devicerole) return device<|docstring|>Convenience method for creating a Device (e.g. for component testing).<|endoftext|>
232de17c2eeea233f44a8308d0ec3d1afaede67e40befaf8bfc25a91ced62291
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_list_rack_elevations(self): '\n Test viewing the list of rack elevations.\n ' response = self.client.get(reverse('dcim:rack_elevation_list')) self.assertHttpStatus(response, 200)
Test viewing the list of rack elevations.
nautobot/dcim/tests/test_views.py
test_list_rack_elevations
johannwagner/nautobot
384
python
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_list_rack_elevations(self): '\n \n ' response = self.client.get(reverse('dcim:rack_elevation_list')) self.assertHttpStatus(response, 200)
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_list_rack_elevations(self): '\n \n ' response = self.client.get(reverse('dcim:rack_elevation_list')) self.assertHttpStatus(response, 200)<|docstring|>Test viewing the list of rack elevations.<|endoftext|>
202dfda4ffd4b26a9738c4e4f5aa4ec1aac7a0e3d9d6dde500ebfc9f7963bf8b
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_import_objects(self): '\n Custom import test for YAML-based imports (versus CSV)\n ' IMPORT_DATA = '\nmanufacturer: Generic\nmodel: TEST-1000\nslug: test-1000\nu_height: 2\nsubdevice_role: parent\ncomments: test comment\nconsole-ports:\n - name: Console Port 1\n type: de-9\n - name: Console Port 2\n type: de-9\n - name: Console Port 3\n type: de-9\nconsole-server-ports:\n - name: Console Server Port 1\n type: rj-45\n - name: Console Server Port 2\n type: rj-45\n - name: Console Server Port 3\n type: rj-45\npower-ports:\n - name: Power Port 1\n type: iec-60320-c14\n - name: Power Port 2\n type: iec-60320-c14\n - name: Power Port 3\n type: iec-60320-c14\npower-outlets:\n - name: Power Outlet 1\n type: iec-60320-c13\n power_port: Power Port 1\n feed_leg: A\n - name: Power Outlet 2\n type: iec-60320-c13\n power_port: Power Port 1\n feed_leg: A\n - name: Power Outlet 3\n type: iec-60320-c13\n power_port: Power Port 1\n feed_leg: A\ninterfaces:\n - name: Interface 1\n type: 1000base-t\n mgmt_only: true\n - name: Interface 2\n type: 1000base-t\n - name: Interface 3\n type: 1000base-t\nrear-ports:\n - name: Rear Port 1\n type: 8p8c\n - name: Rear Port 2\n type: 8p8c\n - name: Rear Port 3\n type: 8p8c\nfront-ports:\n - name: Front Port 1\n type: 8p8c\n rear_port: Rear Port 1\n - name: Front Port 2\n type: 8p8c\n rear_port: Rear Port 2\n - name: Front Port 3\n type: 8p8c\n rear_port: Rear Port 3\ndevice-bays:\n - name: Device Bay 1\n - name: Device Bay 2\n - name: Device Bay 3\n' Manufacturer.objects.create(name='Generic', slug='generic') self.add_permissions('dcim.view_devicetype', 'dcim.add_devicetype', 'dcim.add_consoleporttemplate', 'dcim.add_consoleserverporttemplate', 'dcim.add_powerporttemplate', 'dcim.add_poweroutlettemplate', 'dcim.add_interfacetemplate', 'dcim.add_frontporttemplate', 'dcim.add_rearporttemplate', 'dcim.add_devicebaytemplate') form_data = {'data': IMPORT_DATA, 'format': 'yaml'} response = self.client.post(reverse('dcim:devicetype_import'), data=form_data, follow=True) self.assertHttpStatus(response, 200) dt = DeviceType.objects.get(model='TEST-1000') self.assertEqual(dt.comments, 'test comment') self.assertEqual(dt.consoleporttemplates.count(), 3) cp1 = ConsolePortTemplate.objects.first() self.assertEqual(cp1.name, 'Console Port 1') self.assertEqual(cp1.type, ConsolePortTypeChoices.TYPE_DE9) self.assertEqual(dt.consoleserverporttemplates.count(), 3) csp1 = ConsoleServerPortTemplate.objects.first() self.assertEqual(csp1.name, 'Console Server Port 1') self.assertEqual(csp1.type, ConsolePortTypeChoices.TYPE_RJ45) self.assertEqual(dt.powerporttemplates.count(), 3) pp1 = PowerPortTemplate.objects.first() self.assertEqual(pp1.name, 'Power Port 1') self.assertEqual(pp1.type, PowerPortTypeChoices.TYPE_IEC_C14) self.assertEqual(dt.poweroutlettemplates.count(), 3) po1 = PowerOutletTemplate.objects.first() self.assertEqual(po1.name, 'Power Outlet 1') self.assertEqual(po1.type, PowerOutletTypeChoices.TYPE_IEC_C13) self.assertEqual(po1.power_port, pp1) self.assertEqual(po1.feed_leg, PowerOutletFeedLegChoices.FEED_LEG_A) self.assertEqual(dt.interfacetemplates.count(), 3) iface1 = InterfaceTemplate.objects.first() self.assertEqual(iface1.name, 'Interface 1') self.assertEqual(iface1.type, InterfaceTypeChoices.TYPE_1GE_FIXED) self.assertTrue(iface1.mgmt_only) self.assertEqual(dt.rearporttemplates.count(), 3) rp1 = RearPortTemplate.objects.first() self.assertEqual(rp1.name, 'Rear Port 1') self.assertEqual(dt.frontporttemplates.count(), 3) fp1 = FrontPortTemplate.objects.first() self.assertEqual(fp1.name, 'Front Port 1') self.assertEqual(fp1.rear_port, rp1) self.assertEqual(fp1.rear_port_position, 1) self.assertEqual(dt.devicebaytemplates.count(), 3) db1 = DeviceBayTemplate.objects.first() self.assertEqual(db1.name, 'Device Bay 1')
Custom import test for YAML-based imports (versus CSV)
nautobot/dcim/tests/test_views.py
test_import_objects
johannwagner/nautobot
384
python
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_import_objects(self): '\n \n ' IMPORT_DATA = '\nmanufacturer: Generic\nmodel: TEST-1000\nslug: test-1000\nu_height: 2\nsubdevice_role: parent\ncomments: test comment\nconsole-ports:\n - name: Console Port 1\n type: de-9\n - name: Console Port 2\n type: de-9\n - name: Console Port 3\n type: de-9\nconsole-server-ports:\n - name: Console Server Port 1\n type: rj-45\n - name: Console Server Port 2\n type: rj-45\n - name: Console Server Port 3\n type: rj-45\npower-ports:\n - name: Power Port 1\n type: iec-60320-c14\n - name: Power Port 2\n type: iec-60320-c14\n - name: Power Port 3\n type: iec-60320-c14\npower-outlets:\n - name: Power Outlet 1\n type: iec-60320-c13\n power_port: Power Port 1\n feed_leg: A\n - name: Power Outlet 2\n type: iec-60320-c13\n power_port: Power Port 1\n feed_leg: A\n - name: Power Outlet 3\n type: iec-60320-c13\n power_port: Power Port 1\n feed_leg: A\ninterfaces:\n - name: Interface 1\n type: 1000base-t\n mgmt_only: true\n - name: Interface 2\n type: 1000base-t\n - name: Interface 3\n type: 1000base-t\nrear-ports:\n - name: Rear Port 1\n type: 8p8c\n - name: Rear Port 2\n type: 8p8c\n - name: Rear Port 3\n type: 8p8c\nfront-ports:\n - name: Front Port 1\n type: 8p8c\n rear_port: Rear Port 1\n - name: Front Port 2\n type: 8p8c\n rear_port: Rear Port 2\n - name: Front Port 3\n type: 8p8c\n rear_port: Rear Port 3\ndevice-bays:\n - name: Device Bay 1\n - name: Device Bay 2\n - name: Device Bay 3\n' Manufacturer.objects.create(name='Generic', slug='generic') self.add_permissions('dcim.view_devicetype', 'dcim.add_devicetype', 'dcim.add_consoleporttemplate', 'dcim.add_consoleserverporttemplate', 'dcim.add_powerporttemplate', 'dcim.add_poweroutlettemplate', 'dcim.add_interfacetemplate', 'dcim.add_frontporttemplate', 'dcim.add_rearporttemplate', 'dcim.add_devicebaytemplate') form_data = {'data': IMPORT_DATA, 'format': 'yaml'} response = self.client.post(reverse('dcim:devicetype_import'), data=form_data, follow=True) self.assertHttpStatus(response, 200) dt = DeviceType.objects.get(model='TEST-1000') self.assertEqual(dt.comments, 'test comment') self.assertEqual(dt.consoleporttemplates.count(), 3) cp1 = ConsolePortTemplate.objects.first() self.assertEqual(cp1.name, 'Console Port 1') self.assertEqual(cp1.type, ConsolePortTypeChoices.TYPE_DE9) self.assertEqual(dt.consoleserverporttemplates.count(), 3) csp1 = ConsoleServerPortTemplate.objects.first() self.assertEqual(csp1.name, 'Console Server Port 1') self.assertEqual(csp1.type, ConsolePortTypeChoices.TYPE_RJ45) self.assertEqual(dt.powerporttemplates.count(), 3) pp1 = PowerPortTemplate.objects.first() self.assertEqual(pp1.name, 'Power Port 1') self.assertEqual(pp1.type, PowerPortTypeChoices.TYPE_IEC_C14) self.assertEqual(dt.poweroutlettemplates.count(), 3) po1 = PowerOutletTemplate.objects.first() self.assertEqual(po1.name, 'Power Outlet 1') self.assertEqual(po1.type, PowerOutletTypeChoices.TYPE_IEC_C13) self.assertEqual(po1.power_port, pp1) self.assertEqual(po1.feed_leg, PowerOutletFeedLegChoices.FEED_LEG_A) self.assertEqual(dt.interfacetemplates.count(), 3) iface1 = InterfaceTemplate.objects.first() self.assertEqual(iface1.name, 'Interface 1') self.assertEqual(iface1.type, InterfaceTypeChoices.TYPE_1GE_FIXED) self.assertTrue(iface1.mgmt_only) self.assertEqual(dt.rearporttemplates.count(), 3) rp1 = RearPortTemplate.objects.first() self.assertEqual(rp1.name, 'Rear Port 1') self.assertEqual(dt.frontporttemplates.count(), 3) fp1 = FrontPortTemplate.objects.first() self.assertEqual(fp1.name, 'Front Port 1') self.assertEqual(fp1.rear_port, rp1) self.assertEqual(fp1.rear_port_position, 1) self.assertEqual(dt.devicebaytemplates.count(), 3) db1 = DeviceBayTemplate.objects.first() self.assertEqual(db1.name, 'Device Bay 1')
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_import_objects(self): '\n \n ' IMPORT_DATA = '\nmanufacturer: Generic\nmodel: TEST-1000\nslug: test-1000\nu_height: 2\nsubdevice_role: parent\ncomments: test comment\nconsole-ports:\n - name: Console Port 1\n type: de-9\n - name: Console Port 2\n type: de-9\n - name: Console Port 3\n type: de-9\nconsole-server-ports:\n - name: Console Server Port 1\n type: rj-45\n - name: Console Server Port 2\n type: rj-45\n - name: Console Server Port 3\n type: rj-45\npower-ports:\n - name: Power Port 1\n type: iec-60320-c14\n - name: Power Port 2\n type: iec-60320-c14\n - name: Power Port 3\n type: iec-60320-c14\npower-outlets:\n - name: Power Outlet 1\n type: iec-60320-c13\n power_port: Power Port 1\n feed_leg: A\n - name: Power Outlet 2\n type: iec-60320-c13\n power_port: Power Port 1\n feed_leg: A\n - name: Power Outlet 3\n type: iec-60320-c13\n power_port: Power Port 1\n feed_leg: A\ninterfaces:\n - name: Interface 1\n type: 1000base-t\n mgmt_only: true\n - name: Interface 2\n type: 1000base-t\n - name: Interface 3\n type: 1000base-t\nrear-ports:\n - name: Rear Port 1\n type: 8p8c\n - name: Rear Port 2\n type: 8p8c\n - name: Rear Port 3\n type: 8p8c\nfront-ports:\n - name: Front Port 1\n type: 8p8c\n rear_port: Rear Port 1\n - name: Front Port 2\n type: 8p8c\n rear_port: Rear Port 2\n - name: Front Port 3\n type: 8p8c\n rear_port: Rear Port 3\ndevice-bays:\n - name: Device Bay 1\n - name: Device Bay 2\n - name: Device Bay 3\n' Manufacturer.objects.create(name='Generic', slug='generic') self.add_permissions('dcim.view_devicetype', 'dcim.add_devicetype', 'dcim.add_consoleporttemplate', 'dcim.add_consoleserverporttemplate', 'dcim.add_powerporttemplate', 'dcim.add_poweroutlettemplate', 'dcim.add_interfacetemplate', 'dcim.add_frontporttemplate', 'dcim.add_rearporttemplate', 'dcim.add_devicebaytemplate') form_data = {'data': IMPORT_DATA, 'format': 'yaml'} response = self.client.post(reverse('dcim:devicetype_import'), data=form_data, follow=True) self.assertHttpStatus(response, 200) dt = DeviceType.objects.get(model='TEST-1000') self.assertEqual(dt.comments, 'test comment') self.assertEqual(dt.consoleporttemplates.count(), 3) cp1 = ConsolePortTemplate.objects.first() self.assertEqual(cp1.name, 'Console Port 1') self.assertEqual(cp1.type, ConsolePortTypeChoices.TYPE_DE9) self.assertEqual(dt.consoleserverporttemplates.count(), 3) csp1 = ConsoleServerPortTemplate.objects.first() self.assertEqual(csp1.name, 'Console Server Port 1') self.assertEqual(csp1.type, ConsolePortTypeChoices.TYPE_RJ45) self.assertEqual(dt.powerporttemplates.count(), 3) pp1 = PowerPortTemplate.objects.first() self.assertEqual(pp1.name, 'Power Port 1') self.assertEqual(pp1.type, PowerPortTypeChoices.TYPE_IEC_C14) self.assertEqual(dt.poweroutlettemplates.count(), 3) po1 = PowerOutletTemplate.objects.first() self.assertEqual(po1.name, 'Power Outlet 1') self.assertEqual(po1.type, PowerOutletTypeChoices.TYPE_IEC_C13) self.assertEqual(po1.power_port, pp1) self.assertEqual(po1.feed_leg, PowerOutletFeedLegChoices.FEED_LEG_A) self.assertEqual(dt.interfacetemplates.count(), 3) iface1 = InterfaceTemplate.objects.first() self.assertEqual(iface1.name, 'Interface 1') self.assertEqual(iface1.type, InterfaceTypeChoices.TYPE_1GE_FIXED) self.assertTrue(iface1.mgmt_only) self.assertEqual(dt.rearporttemplates.count(), 3) rp1 = RearPortTemplate.objects.first() self.assertEqual(rp1.name, 'Rear Port 1') self.assertEqual(dt.frontporttemplates.count(), 3) fp1 = FrontPortTemplate.objects.first() self.assertEqual(fp1.name, 'Front Port 1') self.assertEqual(fp1.rear_port, rp1) self.assertEqual(fp1.rear_port_position, 1) self.assertEqual(dt.devicebaytemplates.count(), 3) db1 = DeviceBayTemplate.objects.first() self.assertEqual(db1.name, 'Device Bay 1')<|docstring|>Custom import test for YAML-based imports (versus CSV)<|endoftext|>
e232368957413b465d26a8f5bc6a6cd013be08f02c95e7a6d4dc267507412b90
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_device_primary_ips(self): 'Test assigning a primary IP to a device.' self.add_permissions('dcim.change_device') device = Device.objects.first() interface = Interface.objects.create(device=device, name='Interface 1') ip_address = IPAddress.objects.create(address='1.2.3.4/32') interface.ip_addresses.add(ip_address) form_data = self.form_data.copy() form_data['primary_ip4'] = ip_address.pk request = {'path': self._get_url('edit', device), 'data': post_data(form_data)} self.assertHttpStatus(self.client.post(**request), 302) self.assertInstanceEqual(self._get_queryset().order_by('last_updated').last(), form_data)
Test assigning a primary IP to a device.
nautobot/dcim/tests/test_views.py
test_device_primary_ips
johannwagner/nautobot
384
python
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_device_primary_ips(self): self.add_permissions('dcim.change_device') device = Device.objects.first() interface = Interface.objects.create(device=device, name='Interface 1') ip_address = IPAddress.objects.create(address='1.2.3.4/32') interface.ip_addresses.add(ip_address) form_data = self.form_data.copy() form_data['primary_ip4'] = ip_address.pk request = {'path': self._get_url('edit', device), 'data': post_data(form_data)} self.assertHttpStatus(self.client.post(**request), 302) self.assertInstanceEqual(self._get_queryset().order_by('last_updated').last(), form_data)
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_device_primary_ips(self): self.add_permissions('dcim.change_device') device = Device.objects.first() interface = Interface.objects.create(device=device, name='Interface 1') ip_address = IPAddress.objects.create(address='1.2.3.4/32') interface.ip_addresses.add(ip_address) form_data = self.form_data.copy() form_data['primary_ip4'] = ip_address.pk request = {'path': self._get_url('edit', device), 'data': post_data(form_data)} self.assertHttpStatus(self.client.post(**request), 302) self.assertInstanceEqual(self._get_queryset().order_by('last_updated').last(), form_data)<|docstring|>Test assigning a primary IP to a device.<|endoftext|>
52362a8ab5eb0c6934e1e4139327570c0c2733d8bb6fa2953d056ace0d32876f
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_local_context_schema_validation_pass(self): '\n Given a config context schema\n And a device with local context that conforms to that schema\n Assert that the local context passes schema validation via full_clean()\n ' schema = ConfigContextSchema.objects.create(name='Schema 1', slug='schema-1', data_schema={'type': 'object', 'properties': {'foo': {'type': 'string'}}}) self.add_permissions('dcim.add_device') form_data = self.form_data.copy() form_data['local_context_schema'] = schema.pk form_data['local_context_data'] = '{"foo": "bar"}' request = {'path': self._get_url('add'), 'data': post_data(form_data)} self.assertHttpStatus(self.client.post(**request), 302) self.assertEqual(self._get_queryset().get(name='Device X').local_context_schema.pk, schema.pk)
Given a config context schema And a device with local context that conforms to that schema Assert that the local context passes schema validation via full_clean()
nautobot/dcim/tests/test_views.py
test_local_context_schema_validation_pass
johannwagner/nautobot
384
python
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_local_context_schema_validation_pass(self): '\n Given a config context schema\n And a device with local context that conforms to that schema\n Assert that the local context passes schema validation via full_clean()\n ' schema = ConfigContextSchema.objects.create(name='Schema 1', slug='schema-1', data_schema={'type': 'object', 'properties': {'foo': {'type': 'string'}}}) self.add_permissions('dcim.add_device') form_data = self.form_data.copy() form_data['local_context_schema'] = schema.pk form_data['local_context_data'] = '{"foo": "bar"}' request = {'path': self._get_url('add'), 'data': post_data(form_data)} self.assertHttpStatus(self.client.post(**request), 302) self.assertEqual(self._get_queryset().get(name='Device X').local_context_schema.pk, schema.pk)
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_local_context_schema_validation_pass(self): '\n Given a config context schema\n And a device with local context that conforms to that schema\n Assert that the local context passes schema validation via full_clean()\n ' schema = ConfigContextSchema.objects.create(name='Schema 1', slug='schema-1', data_schema={'type': 'object', 'properties': {'foo': {'type': 'string'}}}) self.add_permissions('dcim.add_device') form_data = self.form_data.copy() form_data['local_context_schema'] = schema.pk form_data['local_context_data'] = '{"foo": "bar"}' request = {'path': self._get_url('add'), 'data': post_data(form_data)} self.assertHttpStatus(self.client.post(**request), 302) self.assertEqual(self._get_queryset().get(name='Device X').local_context_schema.pk, schema.pk)<|docstring|>Given a config context schema And a device with local context that conforms to that schema Assert that the local context passes schema validation via full_clean()<|endoftext|>
f34519d547fd9878dae6857091dc03d30b5acb1ddcd869758270106b2fdb788b
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_local_context_schema_validation_fails(self): '\n Given a config context schema\n And a device with local context that *does not* conform to that schema\n Assert that the local context fails schema validation via full_clean()\n ' schema = ConfigContextSchema.objects.create(name='Schema 1', slug='schema-1', data_schema={'type': 'object', 'properties': {'foo': {'type': 'integer'}}}) self.add_permissions('dcim.add_device') form_data = self.form_data.copy() form_data['local_context_schema'] = schema.pk form_data['local_context_data'] = '{"foo": "bar"}' request = {'path': self._get_url('add'), 'data': post_data(form_data)} self.assertHttpStatus(self.client.post(**request), 200) self.assertEqual(self._get_queryset().filter(name='Device X').count(), 0)
Given a config context schema And a device with local context that *does not* conform to that schema Assert that the local context fails schema validation via full_clean()
nautobot/dcim/tests/test_views.py
test_local_context_schema_validation_fails
johannwagner/nautobot
384
python
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_local_context_schema_validation_fails(self): '\n Given a config context schema\n And a device with local context that *does not* conform to that schema\n Assert that the local context fails schema validation via full_clean()\n ' schema = ConfigContextSchema.objects.create(name='Schema 1', slug='schema-1', data_schema={'type': 'object', 'properties': {'foo': {'type': 'integer'}}}) self.add_permissions('dcim.add_device') form_data = self.form_data.copy() form_data['local_context_schema'] = schema.pk form_data['local_context_data'] = '{"foo": "bar"}' request = {'path': self._get_url('add'), 'data': post_data(form_data)} self.assertHttpStatus(self.client.post(**request), 200) self.assertEqual(self._get_queryset().filter(name='Device X').count(), 0)
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_local_context_schema_validation_fails(self): '\n Given a config context schema\n And a device with local context that *does not* conform to that schema\n Assert that the local context fails schema validation via full_clean()\n ' schema = ConfigContextSchema.objects.create(name='Schema 1', slug='schema-1', data_schema={'type': 'object', 'properties': {'foo': {'type': 'integer'}}}) self.add_permissions('dcim.add_device') form_data = self.form_data.copy() form_data['local_context_schema'] = schema.pk form_data['local_context_data'] = '{"foo": "bar"}' request = {'path': self._get_url('add'), 'data': post_data(form_data)} self.assertHttpStatus(self.client.post(**request), 200) self.assertEqual(self._get_queryset().filter(name='Device X').count(), 0)<|docstring|>Given a config context schema And a device with local context that *does not* conform to that schema Assert that the local context fails schema validation via full_clean()<|endoftext|>
7b44351ddfd88232917840d522b6946c6470fdd4cc81b45e1807c9d641b219d5
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_queryset_to_csv(self): 'This view has a custom queryset_to_csv() implementation.' response = self.client.get('{}?export'.format(self._get_url('list'))) self.assertHttpStatus(response, 200) self.assertEqual(response.get('Content-Type'), 'text/csv') self.assertEqual('device,console_port,console_server,port,reachable\nDevice 1,Console Port 1,Device 2,Console Server Port 1,True\nDevice 1,Console Port 2,Device 2,Console Server Port 2,True\nDevice 1,Console Port 3,,,False', response.content.decode(response.charset))
This view has a custom queryset_to_csv() implementation.
nautobot/dcim/tests/test_views.py
test_queryset_to_csv
johannwagner/nautobot
384
python
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_queryset_to_csv(self): response = self.client.get('{}?export'.format(self._get_url('list'))) self.assertHttpStatus(response, 200) self.assertEqual(response.get('Content-Type'), 'text/csv') self.assertEqual('device,console_port,console_server,port,reachable\nDevice 1,Console Port 1,Device 2,Console Server Port 1,True\nDevice 1,Console Port 2,Device 2,Console Server Port 2,True\nDevice 1,Console Port 3,,,False', response.content.decode(response.charset))
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_queryset_to_csv(self): response = self.client.get('{}?export'.format(self._get_url('list'))) self.assertHttpStatus(response, 200) self.assertEqual(response.get('Content-Type'), 'text/csv') self.assertEqual('device,console_port,console_server,port,reachable\nDevice 1,Console Port 1,Device 2,Console Server Port 1,True\nDevice 1,Console Port 2,Device 2,Console Server Port 2,True\nDevice 1,Console Port 3,,,False', response.content.decode(response.charset))<|docstring|>This view has a custom queryset_to_csv() implementation.<|endoftext|>
9b0f19497ceb87f84ba362e6c8db148c80cbdbe24d0231a8249705e80240751f
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_queryset_to_csv(self): 'This view has a custom queryset_to_csv() implementation.' response = self.client.get('{}?export'.format(self._get_url('list'))) self.assertHttpStatus(response, 200) self.assertEqual(response.get('Content-Type'), 'text/csv') self.assertEqual('device,power_port,pdu,outlet,reachable\nDevice 1,Power Port 1,Device 2,Power Outlet 1,True\nDevice 1,Power Port 2,Device 2,Power Outlet 2,True\nDevice 1,Power Port 3,,Power Feed 1,True', response.content.decode(response.charset))
This view has a custom queryset_to_csv() implementation.
nautobot/dcim/tests/test_views.py
test_queryset_to_csv
johannwagner/nautobot
384
python
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_queryset_to_csv(self): response = self.client.get('{}?export'.format(self._get_url('list'))) self.assertHttpStatus(response, 200) self.assertEqual(response.get('Content-Type'), 'text/csv') self.assertEqual('device,power_port,pdu,outlet,reachable\nDevice 1,Power Port 1,Device 2,Power Outlet 1,True\nDevice 1,Power Port 2,Device 2,Power Outlet 2,True\nDevice 1,Power Port 3,,Power Feed 1,True', response.content.decode(response.charset))
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_queryset_to_csv(self): response = self.client.get('{}?export'.format(self._get_url('list'))) self.assertHttpStatus(response, 200) self.assertEqual(response.get('Content-Type'), 'text/csv') self.assertEqual('device,power_port,pdu,outlet,reachable\nDevice 1,Power Port 1,Device 2,Power Outlet 1,True\nDevice 1,Power Port 2,Device 2,Power Outlet 2,True\nDevice 1,Power Port 3,,Power Feed 1,True', response.content.decode(response.charset))<|docstring|>This view has a custom queryset_to_csv() implementation.<|endoftext|>
5b4757b6e5b9e1a8ffd4b986e7303a725b81cfd13f700eebae3f44aaa44fbace
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_queryset_to_csv(self): 'This view has a custom queryset_to_csv() implementation.' response = self.client.get('{}?export'.format(self._get_url('list'))) self.assertHttpStatus(response, 200) self.assertEqual(response.get('Content-Type'), 'text/csv') self.assertEqual('device_a,interface_a,device_b,interface_b,reachable\nDevice 1,Interface 1,Device 2,Interface 1,True\nDevice 1,Interface 2,,,True\nDevice 1,Interface 3,,,False', response.content.decode(response.charset))
This view has a custom queryset_to_csv() implementation.
nautobot/dcim/tests/test_views.py
test_queryset_to_csv
johannwagner/nautobot
384
python
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_queryset_to_csv(self): response = self.client.get('{}?export'.format(self._get_url('list'))) self.assertHttpStatus(response, 200) self.assertEqual(response.get('Content-Type'), 'text/csv') self.assertEqual('device_a,interface_a,device_b,interface_b,reachable\nDevice 1,Interface 1,Device 2,Interface 1,True\nDevice 1,Interface 2,,,True\nDevice 1,Interface 3,,,False', response.content.decode(response.charset))
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*']) def test_queryset_to_csv(self): response = self.client.get('{}?export'.format(self._get_url('list'))) self.assertHttpStatus(response, 200) self.assertEqual(response.get('Content-Type'), 'text/csv') self.assertEqual('device_a,interface_a,device_b,interface_b,reachable\nDevice 1,Interface 1,Device 2,Interface 1,True\nDevice 1,Interface 2,,,True\nDevice 1,Interface 3,,,False', response.content.decode(response.charset))<|docstring|>This view has a custom queryset_to_csv() implementation.<|endoftext|>
864ee9359f00876083c4b4a978daefef1c91b8bed6f5d689f0a017dea7164f15
@override_settings(EXEMPT_VIEW_PERMISSIONS=[]) def test_list_objects_with_constrained_permission(self): '\n Extend base GetObjectViewTestCase to have correct permissions for *both ends* of a connection.\n ' instance1 = self._get_queryset().all()[0] endpoint = instance1.connected_endpoint obj_perm = ObjectPermission(name='Endpoint test permission', constraints={'pk': endpoint.pk}, actions=['view']) obj_perm.save() obj_perm.users.add(self.user) obj_perm.object_types.add(ContentType.objects.get_for_model(endpoint)) super().test_list_objects_with_constrained_permission()
Extend base GetObjectViewTestCase to have correct permissions for *both ends* of a connection.
nautobot/dcim/tests/test_views.py
test_list_objects_with_constrained_permission
johannwagner/nautobot
384
python
@override_settings(EXEMPT_VIEW_PERMISSIONS=[]) def test_list_objects_with_constrained_permission(self): '\n \n ' instance1 = self._get_queryset().all()[0] endpoint = instance1.connected_endpoint obj_perm = ObjectPermission(name='Endpoint test permission', constraints={'pk': endpoint.pk}, actions=['view']) obj_perm.save() obj_perm.users.add(self.user) obj_perm.object_types.add(ContentType.objects.get_for_model(endpoint)) super().test_list_objects_with_constrained_permission()
@override_settings(EXEMPT_VIEW_PERMISSIONS=[]) def test_list_objects_with_constrained_permission(self): '\n \n ' instance1 = self._get_queryset().all()[0] endpoint = instance1.connected_endpoint obj_perm = ObjectPermission(name='Endpoint test permission', constraints={'pk': endpoint.pk}, actions=['view']) obj_perm.save() obj_perm.users.add(self.user) obj_perm.object_types.add(ContentType.objects.get_for_model(endpoint)) super().test_list_objects_with_constrained_permission()<|docstring|>Extend base GetObjectViewTestCase to have correct permissions for *both ends* of a connection.<|endoftext|>
4c21c2b08e99c0bb2635dee25d0a17f2203ba9c7d57f255e15a11f65afbba960
def assay(self, dataset): ' Assay based on a given dataset.\n\n Parameters\n ----------\n dataset : Dataset\n Dataset to assay.\n\n Returns\n -------\n dataset : Dataset\n Assayed dataset, with all `y` annotated.\n\n ' for point in dataset: assert (point in self.dataset) point.y = self.dataset[point].y point.extra = self.dataset[point].extra return dataset
Assay based on a given dataset. Parameters ---------- dataset : Dataset Dataset to assay. Returns ------- dataset : Dataset Assayed dataset, with all `y` annotated.
malt/agents/assayer.py
assay
choderalab/malt
2
python
def assay(self, dataset): ' Assay based on a given dataset.\n\n Parameters\n ----------\n dataset : Dataset\n Dataset to assay.\n\n Returns\n -------\n dataset : Dataset\n Assayed dataset, with all `y` annotated.\n\n ' for point in dataset: assert (point in self.dataset) point.y = self.dataset[point].y point.extra = self.dataset[point].extra return dataset
def assay(self, dataset): ' Assay based on a given dataset.\n\n Parameters\n ----------\n dataset : Dataset\n Dataset to assay.\n\n Returns\n -------\n dataset : Dataset\n Assayed dataset, with all `y` annotated.\n\n ' for point in dataset: assert (point in self.dataset) point.y = self.dataset[point].y point.extra = self.dataset[point].extra return dataset<|docstring|>Assay based on a given dataset. Parameters ---------- dataset : Dataset Dataset to assay. Returns ------- dataset : Dataset Assayed dataset, with all `y` annotated.<|endoftext|>
a280d3c2fbabb25c489320b6931e2eac5e2e947ecb71c4dd6f28b0b7881165b9
def test_get_person_from_tax_id_ok(self): 'Test get_person_from_tax_id returns the correct result\n ' tax_id = faker.ssn() self.mock_get.return_value = MockResponse(get_person_ok(tax_id=tax_id), status_code=200, ok=True) atoka_conn = AtokaConn(key='testing') atoka_p = atoka_conn.get_person_from_tax_id(tax_id) self.assertEqual(atoka_p['base']['taxId'], tax_id)
Test get_person_from_tax_id returns the correct result
tests/test_atokaconn.py
test_get_person_from_tax_id_ok
openpolis/atokaconn
1
python
def test_get_person_from_tax_id_ok(self): '\n ' tax_id = faker.ssn() self.mock_get.return_value = MockResponse(get_person_ok(tax_id=tax_id), status_code=200, ok=True) atoka_conn = AtokaConn(key='testing') atoka_p = atoka_conn.get_person_from_tax_id(tax_id) self.assertEqual(atoka_p['base']['taxId'], tax_id)
def test_get_person_from_tax_id_ok(self): '\n ' tax_id = faker.ssn() self.mock_get.return_value = MockResponse(get_person_ok(tax_id=tax_id), status_code=200, ok=True) atoka_conn = AtokaConn(key='testing') atoka_p = atoka_conn.get_person_from_tax_id(tax_id) self.assertEqual(atoka_p['base']['taxId'], tax_id)<|docstring|>Test get_person_from_tax_id returns the correct result<|endoftext|>
5cbb3f7d658a924bbf04b92715679df6fdbafa5cc5180ff9cae1606b92f23147
def test_get_person_from_tax_id_broken_json_failure(self): 'Test get_person_from_tax_id fails when json is broken\n ' tax_id = faker.ssn() self.mock_get.return_value = MockBrokenJsonResponse(get_person_ok(tax_id=tax_id), status_code=200, ok=True) atoka_conn = AtokaConn(key='testing') with self.assertRaises(Exception): _ = atoka_conn.get_person_from_tax_id(tax_id)
Test get_person_from_tax_id fails when json is broken
tests/test_atokaconn.py
test_get_person_from_tax_id_broken_json_failure
openpolis/atokaconn
1
python
def test_get_person_from_tax_id_broken_json_failure(self): '\n ' tax_id = faker.ssn() self.mock_get.return_value = MockBrokenJsonResponse(get_person_ok(tax_id=tax_id), status_code=200, ok=True) atoka_conn = AtokaConn(key='testing') with self.assertRaises(Exception): _ = atoka_conn.get_person_from_tax_id(tax_id)
def test_get_person_from_tax_id_broken_json_failure(self): '\n ' tax_id = faker.ssn() self.mock_get.return_value = MockBrokenJsonResponse(get_person_ok(tax_id=tax_id), status_code=200, ok=True) atoka_conn = AtokaConn(key='testing') with self.assertRaises(Exception): _ = atoka_conn.get_person_from_tax_id(tax_id)<|docstring|>Test get_person_from_tax_id fails when json is broken<|endoftext|>
fc88b21020b682c8d3605a000e73fd9581a17d73dc6da3674ff3f141658cbec5
def test_get_person_from_tax_id_timeout_failure(self): 'Test get_person_from_tax_id fails when connection fails\n ' tax_id = faker.ssn() self.mock_get.return_value = MockResponse(get_person_ok(tax_id=tax_id), status_code=200, ok=True) self.mock_get.side_effect = Timeout() atoka_conn = AtokaConn(key='testing') with self.assertRaises(AtokaTimeoutException): _ = atoka_conn.get_person_from_tax_id(tax_id) self.mock_get.side_effect = None
Test get_person_from_tax_id fails when connection fails
tests/test_atokaconn.py
test_get_person_from_tax_id_timeout_failure
openpolis/atokaconn
1
python
def test_get_person_from_tax_id_timeout_failure(self): '\n ' tax_id = faker.ssn() self.mock_get.return_value = MockResponse(get_person_ok(tax_id=tax_id), status_code=200, ok=True) self.mock_get.side_effect = Timeout() atoka_conn = AtokaConn(key='testing') with self.assertRaises(AtokaTimeoutException): _ = atoka_conn.get_person_from_tax_id(tax_id) self.mock_get.side_effect = None
def test_get_person_from_tax_id_timeout_failure(self): '\n ' tax_id = faker.ssn() self.mock_get.return_value = MockResponse(get_person_ok(tax_id=tax_id), status_code=200, ok=True) self.mock_get.side_effect = Timeout() atoka_conn = AtokaConn(key='testing') with self.assertRaises(AtokaTimeoutException): _ = atoka_conn.get_person_from_tax_id(tax_id) self.mock_get.side_effect = None<|docstring|>Test get_person_from_tax_id fails when connection fails<|endoftext|>
bffa3a34e21baee2223a057c2b6eb43ff2d7b2a1ecb418b616608a9288d17b84
def test_search_person_ok(self): 'Test get_person_from_tax_id returns the correct result\n ' parent_area = AreaFactory(name='Lazio') area = AreaFactory(name='Roma', parent=parent_area) person = PersonFactory.create(family_name=faker.last_name_male(), given_name=faker.first_name_male(), birth_date=faker.date(pattern='%Y-%m-%d', end_datetime='-47y'), birth_location_area=area) tax_id = faker.ssn() self.mock_get.return_value = MockResponse(get_person_ok(tax_id=tax_id, search_params={'family_name': person.family_name, 'given_name': person.given_name, 'birth_date': person.birth_date}), status_code=200, ok=True) atoka_conn = AtokaConn(key='testing') atoka_p = atoka_conn.search_person(person) self.assertEqual(atoka_p['name'], person.name) self.assertEqual(atoka_p['base']['taxId'], tax_id)
Test get_person_from_tax_id returns the correct result
tests/test_atokaconn.py
test_search_person_ok
openpolis/atokaconn
1
python
def test_search_person_ok(self): '\n ' parent_area = AreaFactory(name='Lazio') area = AreaFactory(name='Roma', parent=parent_area) person = PersonFactory.create(family_name=faker.last_name_male(), given_name=faker.first_name_male(), birth_date=faker.date(pattern='%Y-%m-%d', end_datetime='-47y'), birth_location_area=area) tax_id = faker.ssn() self.mock_get.return_value = MockResponse(get_person_ok(tax_id=tax_id, search_params={'family_name': person.family_name, 'given_name': person.given_name, 'birth_date': person.birth_date}), status_code=200, ok=True) atoka_conn = AtokaConn(key='testing') atoka_p = atoka_conn.search_person(person) self.assertEqual(atoka_p['name'], person.name) self.assertEqual(atoka_p['base']['taxId'], tax_id)
def test_search_person_ok(self): '\n ' parent_area = AreaFactory(name='Lazio') area = AreaFactory(name='Roma', parent=parent_area) person = PersonFactory.create(family_name=faker.last_name_male(), given_name=faker.first_name_male(), birth_date=faker.date(pattern='%Y-%m-%d', end_datetime='-47y'), birth_location_area=area) tax_id = faker.ssn() self.mock_get.return_value = MockResponse(get_person_ok(tax_id=tax_id, search_params={'family_name': person.family_name, 'given_name': person.given_name, 'birth_date': person.birth_date}), status_code=200, ok=True) atoka_conn = AtokaConn(key='testing') atoka_p = atoka_conn.search_person(person) self.assertEqual(atoka_p['name'], person.name) self.assertEqual(atoka_p['base']['taxId'], tax_id)<|docstring|>Test get_person_from_tax_id returns the correct result<|endoftext|>
f7badefa771ae61819e7f54a83878b554c8b4b4671897d0fe2aea0b7f579e43e
def test_search_person_timeout_failure(self): 'Test timeout during search_person invocations\n ' parent_area = AreaFactory(name='Lazio') area = AreaFactory(name='Roma', parent=parent_area) person = PersonFactory.create(family_name=faker.last_name_male(), given_name=faker.first_name_male(), birth_date=faker.date(pattern='%Y-%m-%d', end_datetime='-47y'), birth_location_area=area) tax_id = faker.ssn() self.mock_get.return_value = MockResponse(get_person_ok(tax_id=tax_id, search_params={'family_name': person.family_name, 'given_name': person.given_name, 'birth_date': person.birth_date}), status_code=200, ok=True) self.mock_get.side_effect = Timeout() atoka_conn = AtokaConn(key='testing') with self.assertRaises(AtokaTimeoutException): _ = atoka_conn.search_person(person) self.mock_get.side_effect = None
Test timeout during search_person invocations
tests/test_atokaconn.py
test_search_person_timeout_failure
openpolis/atokaconn
1
python
def test_search_person_timeout_failure(self): '\n ' parent_area = AreaFactory(name='Lazio') area = AreaFactory(name='Roma', parent=parent_area) person = PersonFactory.create(family_name=faker.last_name_male(), given_name=faker.first_name_male(), birth_date=faker.date(pattern='%Y-%m-%d', end_datetime='-47y'), birth_location_area=area) tax_id = faker.ssn() self.mock_get.return_value = MockResponse(get_person_ok(tax_id=tax_id, search_params={'family_name': person.family_name, 'given_name': person.given_name, 'birth_date': person.birth_date}), status_code=200, ok=True) self.mock_get.side_effect = Timeout() atoka_conn = AtokaConn(key='testing') with self.assertRaises(AtokaTimeoutException): _ = atoka_conn.search_person(person) self.mock_get.side_effect = None
def test_search_person_timeout_failure(self): '\n ' parent_area = AreaFactory(name='Lazio') area = AreaFactory(name='Roma', parent=parent_area) person = PersonFactory.create(family_name=faker.last_name_male(), given_name=faker.first_name_male(), birth_date=faker.date(pattern='%Y-%m-%d', end_datetime='-47y'), birth_location_area=area) tax_id = faker.ssn() self.mock_get.return_value = MockResponse(get_person_ok(tax_id=tax_id, search_params={'family_name': person.family_name, 'given_name': person.given_name, 'birth_date': person.birth_date}), status_code=200, ok=True) self.mock_get.side_effect = Timeout() atoka_conn = AtokaConn(key='testing') with self.assertRaises(AtokaTimeoutException): _ = atoka_conn.search_person(person) self.mock_get.side_effect = None<|docstring|>Test timeout during search_person invocations<|endoftext|>
7706a32770bb50311eed051a60558e797d59accf950fd4a609670e291970853f
def test_get_person_from_tax_id_fails_doesnotexist(self): 'Test get_person_from_tax_id returns void result\n ' tax_id = faker.ssn() self.mock_get.return_value = MockResponse(get_void_response(), status_code=200, ok=True) atoka_conn = AtokaConn(key='testing') with self.assertRaises(AtokaObjectDoesNotExist): atoka_conn.get_person_from_tax_id(tax_id)
Test get_person_from_tax_id returns void result
tests/test_atokaconn.py
test_get_person_from_tax_id_fails_doesnotexist
openpolis/atokaconn
1
python
def test_get_person_from_tax_id_fails_doesnotexist(self): '\n ' tax_id = faker.ssn() self.mock_get.return_value = MockResponse(get_void_response(), status_code=200, ok=True) atoka_conn = AtokaConn(key='testing') with self.assertRaises(AtokaObjectDoesNotExist): atoka_conn.get_person_from_tax_id(tax_id)
def test_get_person_from_tax_id_fails_doesnotexist(self): '\n ' tax_id = faker.ssn() self.mock_get.return_value = MockResponse(get_void_response(), status_code=200, ok=True) atoka_conn = AtokaConn(key='testing') with self.assertRaises(AtokaObjectDoesNotExist): atoka_conn.get_person_from_tax_id(tax_id)<|docstring|>Test get_person_from_tax_id returns void result<|endoftext|>
0ffca5b69f64518a36c843f9609ab93e070f1ec047cdf9e30728d5502b8e4199
def test_get_person_from_tax_id_fails_notok(self): 'Test get_person_from_tax_id returns not ok\n ' tax_id = faker.ssn() self.mock_get.return_value = MockResponse(get_void_response(), status_code=404, ok=False, reason='Requested URI was not found here') atoka_conn = AtokaConn(key='testing') with self.assertRaises(AtokaResponseError): atoka_conn.get_person_from_tax_id(tax_id)
Test get_person_from_tax_id returns not ok
tests/test_atokaconn.py
test_get_person_from_tax_id_fails_notok
openpolis/atokaconn
1
python
def test_get_person_from_tax_id_fails_notok(self): '\n ' tax_id = faker.ssn() self.mock_get.return_value = MockResponse(get_void_response(), status_code=404, ok=False, reason='Requested URI was not found here') atoka_conn = AtokaConn(key='testing') with self.assertRaises(AtokaResponseError): atoka_conn.get_person_from_tax_id(tax_id)
def test_get_person_from_tax_id_fails_notok(self): '\n ' tax_id = faker.ssn() self.mock_get.return_value = MockResponse(get_void_response(), status_code=404, ok=False, reason='Requested URI was not found here') atoka_conn = AtokaConn(key='testing') with self.assertRaises(AtokaResponseError): atoka_conn.get_person_from_tax_id(tax_id)<|docstring|>Test get_person_from_tax_id returns not ok<|endoftext|>